text
stringlengths 29
850k
|
|---|
#!/usr/bin/python
import sys
import socket
import string
import datetime
import config as config
class Pynorcio:
readbuffer = ""
s = ""
conf = ""
def __init__(self):
self.conf = config.Config()
self.connect()
def connect(self):
self.s = socket.socket()
self.s.connect((self.conf.HOST, self.conf.PORT))
self.s.send("NICK %s\r\n" % self.conf.NICK)
self.s.send("USER %s %s bla :%s\r\n" % (self.conf.IDENT, self.conf.HOST, self.conf.REALNAME))
for channel in self.conf.CHANNELS:
self.s.send("JOIN " + channel + "\r\n")
while 1:
self.readbuffer = self.readbuffer + self.s.recv(1024)
temp = string.split(self.readbuffer, "\n")
self.readbuffer = temp.pop( )
for line in temp:
msg = line
line = string.rstrip(line)
line = string.split(line)
if (line[0] == "PING"):
self.s.send("PONG %s\r\n" % line[1])
elif ( (line[1] == "PRIVMSG") and line[3].startswith(":" + self.conf.NICK) ):
self.read(line, msg)
def read(self, line, msg):
sndr = line[0][1:line[0].find("!")]
rcvr = line[2]
rply = ""
cmd = line[4]
to = ""
if (rcvr.startswith("#")):
to = rcvr
else:
to = sndr
if (cmd == "ping"):
self.write(to, self.query(line, msg))
elif (cmd == "whois"):
self.write(to, self.query(line, msg))
elif (cmd == "help"):
self.write(to, self.query(line, msg))
elif (cmd == "time"):
self.write(to, self.query(line, msg))
def write(self, to, output):
for line in output:
self.s.send("PRIVMSG " + to + " :" + line + "\r\n")
return ""
def query(self, line, msg):
sndr = line[0][1:line[0].find("!")]
rcvr = line[2]
cmd = line[4]
msg = msg[string.find(msg, ":", 1)+1:]
rply = []
eof = "\r\n"
if (cmd == "ping"):
return ["pong"]
elif (cmd == "whois"):
return ["My name is " + self.conf.REALNAME + ", and I am a python IRC bot."]
elif (cmd == "help"):
return [self.conf.NICK + " help command (commands : ping, whois)"]
elif (cmd == "time"):
return [datetime.datetime.now().time().isoformat()]
if __name__ == '__main__':
bot = Pynorcio()
|
TECHNICAL CLAIMS MANAGER who has gained 12 years industry experience. Has an excellent track record of performance management, implementing change in an insurance based environment and client care. In last role, managed a team of 12 individuals, handling up to 1200 live RTA PI claims, managing the claims through the litigation process to a successful conclusion on behalf of the injured party. Ensured excellent customer service, managing complaints, driving performance and working towards key performance indicators that collectively helped the team achieve over £8million in fee income across the 2012-14 financial years, representing one of the highest performing teams in the firm. Due to redundancy, seeks a new challenge that has the potential to match his ambition and dedication moving forward.
Diploma in Accident Management and Personal Injury. Degree in Geography with History 2:1.
|
# coding=utf-8
## Modified from https://github.com/tommasolevato/CNN-Classification/blob/master/mjsynth.py
from os.path import isfile
import logging
import numpy as np
import os.path
import matplotlib.image as mpimg
from skimage.transform import resize
np.random.seed(1)
class MJSYNTH_CHARNET():
classes = []
def __init__(self, which_set, numExamples):
self.output_char = [x for x in '0123456789abcdefghijklmnopqrstuvwxyz ']
self.space_hot = [0]*37
self.space_hot[-1] = 1
self.one_hot = [0]*37
self.height = 32
self.width = 100
self.examples = []
self.img_shape = (1, self.height, self.width)
self.numExamples = numExamples
self.which_set = which_set
if which_set == "train":
self.fileToLoadFrom = "annotation_train.txt"
elif which_set == "test":
self.fileToLoadFrom = "annotation_test.txt"
elif which_set == "valid":
self.fileToLoadFrom = "annotation_val.txt"
else:
raise ValueError("Set not recognized")
self.datapath = 'LOCATION OF SYNTH 90kDICT32px/ FOLDER'
self.loadData()
def findExamples(self):
with open(self.datapath + self.fileToLoadFrom) as f:
for line in f:
exampleClass = line.split(" ")[1].rstrip()
file = line.split(" ")[0].rstrip()
try:
self.examples.append(file[2:len(file)])
if len(self.examples) == self.numExamples:
break
except KeyError:
pass
def findOtherExamplesIfNeeded(self):
if len(self.examples) < self.numExamples:
with open(self.datapath + self.fileToLoadFrom) as f:
for line in f:
file = line.split(" ")[0].rstrip()
if file not in self.examples:
self.examples.append(file[2:len(file)])
if len(self.examples) == self.numExamples:
break
assert len(self.examples) == self.numExamples
def loadData(self):
self.findExamples()
self.findOtherExamplesIfNeeded()
self.loadImages()
def loadImages(self):
self.x = np.zeros((len(self.examples), 1, self.height, self.width), dtype=np.float32)
i = 0
tmp = []
for example in self.examples:
filename = self.datapath + example
self.x[i, :, :, :] = self.loadImage(filename)
classLabel = self.loadClassLabel(filename)
tmp.append(classLabel)
i += 1
self.labels = np.array(tmp)
def loadImage(self, filename):
if not isfile(filename):
print filename + "does not exist"
else:
img = mpimg.imread(filename)
if len(img.shape) == 3 and img.shape[2] == 3:
img = np.dot(img[...,:3], [0.2989, 0.5870, 0.1140]) # Convert to greyscale
im = resize(img, (32,100), order=1, preserve_range=True)
im = np.array(im,dtype=np.float32) # convert to single precision
img = (im - np.mean(im)) / ( (np.std(im) + 0.0001) )
return img
def loadClassLabel(self, filename):
word = (filename.split("_")[1]).lower()
#convert the word in the filename to a one-hot vector of length 37*23
classLabel = []
for i,c in enumerate(word):
ind = self.output_char.index(c)
tmp_hot = self.one_hot[:]
tmp_hot[ind] = 1
classLabel.extend(tmp_hot)
classLabel.extend((23-(i+1))*self.space_hot)
return classLabel
if __name__ == '__main__':
z = MJSYNTH_CHARNET("train",10)
output_char = [x for x in '0123456789abcdefghijklmnopqrstuvwxyz ']
for j in range(len(z.labels)):
y = z.labels[j]
for i in range(23):
c = np.where(y[i*37:(i+1)*37]==1)[0][0]
print output_char[c],
print ''
|
Apricots don't get a lot of love and I can't figure out why. They have a delicious flavor that works in both sweet and savory recipes. On top of that, they are pretty inexpensive when they're in season and you can find them dried or frozen when they aren't. Looking for something new to make? I think something with apricots is the clear answer. Need more proof? Here you go!
These apricot wings are perfect for a party. People will eat them all up in no time!
These are a traditional dessert in many parts of the world and you are going to love them.
These bars take very little effort to make, but start your day off right.
You are going to blown away by the huge flavor in these tiny little bites.
This mouth puckering lemonade recipe will be one you want to make all the time.
Doesn't this look delightful? Is tastes great with steamed rice.
You can make these with any kind of jam, but apricot is a top choice.
This is perfect for jam thumbprint cookies or your morning slice of toast.
Looking for a gourmet dessert that is healthy? You have found it!
Indulge your love of this childhood favorite by whipping up a healthier version at home.
Get ready to be addicted! This cookie is definitely one that is hard to stop eating.
Just when you though cheesecake couldn't get any better, it shows up with apricot.
Scones only look hard to make. They are actually really easy, this recipe included!
This fruity glazed ham is a superb thing to make for a celebration.
These can satisfy a candy craving without much work in the kitchen.
Pineapple shouldn't get to have all the fun. Apricots make a great upside down cake too!
These handheld pastries are a crowd pleaser, so make a lot of them.
Oatmeal cookies are great alone, but add some apricots and it will rock your world.
The surprise burst of flavor in these cookies will have you coming back for more until they are gone.
These soft and fruit filled cookies will satisfy a craving in no time.
Need something to get you going in the morning? These will do it for you!
If you love apple butter, you will adore apricot butter.
The flavors in this dish will make you feel like you're on a tropical vacay.
Served with coffee, this is something no one will be able to get enough.
Fish pairs well with fruity flavors and this recipe is proof positive.
Apricots and strawberries make such an obvious choice for a smoothie. Yum!
Doesn't this look decadent. It pairs deliciously with vanilla ice cream.
Here's something else that tastes better served with vanilla ice cream.
Bar desserts are pretty popular and this one is going to get a top spot in your rotation.
What a perfect treat for a hot summer day!
I guarantee that this tastes just as good as it looks.
Never thought to put apricots in pie? Today is the day to change all that.
I bet you wish you could grab a square or two of this, don't you?
This ethnic meal is one you'll prepare all the time. Everyone loves it!
A straight apricot smoothie is a choice you can never go wrong with.
Do you like apricots? Are you going to try one these tasty recipes today?
Would You Rather Eat a Burger or BBQ Chicken for the 4th of July?
|
# -*- coding: utf-8 -*-
__author__ = 'Nilk'
from qqbot import QQBotSlot as qqbotslot, RunBot
import csv
import tweepy
import datetime
import os
import json
import urllib2
import sys
import re as regex
import time
import logging
from bs4 import BeautifulSoup as BS
reload(sys)
sys.setdefaultencoding('utf-8')
CONSUMER_KEY = 'uWb94m6mwDnHOix6YAfMQ1ESt'
CONSUMER_SECRET = 'AHOrZYDUvskktLFIQRvXxnN7hDxtkaW8PZQsg1AatQfNGvbczQ'
ACCESS_TOKEN = '1936186141-P1P8jBW8gwcLVMOW3kzeSOoF8GXvkyCPYvq4uB9'
ACCESS_TOKEN_SECRET = 'aLyYUHTYXkS4VHdI8Wvf49ydOOWYjMldGrMeFSMukeWuU'
TULINGKEY = "0a3727130d754c8d95797977f8f61646"
TULINGURL = "http://www.tuling123.com/openapi/api?"
TIME_ONEDAY = datetime.timedelta(1)
KCWIKI_DATA = "http://kcwikizh.github.io/kcdata/slotitem/poi_improve.json"
GROUP_NUMBER = '337545621'
with open('responses.csv', mode = 'r') as infile:
reader = csv.reader(infile)
responses = {rows[0]:rows[1] for rows in reader}
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
api = tweepy.API(auth)
repCounter = 0
prevMsg = ''
logger = logging.getLogger('shoukaku')
hdlr = logging.FileHandler('shoukaku.log')
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.INFO)
def tail( f, lines=20 ):
total_lines_wanted = lines
BLOCK_SIZE = 1024
f.seek(0, 2)
block_end_byte = f.tell()
lines_to_go = total_lines_wanted
block_number = -1
blocks = [] # blocks of size BLOCK_SIZE, in reverse order starting
# from the end of the file
while lines_to_go > 0 and block_end_byte > 0:
if (block_end_byte - BLOCK_SIZE > 0):
# read the last block we haven't yet read
f.seek(block_number*BLOCK_SIZE, 2)
blocks.append(f.read(BLOCK_SIZE))
else:
# file too small, start from begining
f.seek(0,0)
# only read what was not read
blocks.append(f.read(block_end_byte))
lines_found = blocks[-1].count('\n')
lines_to_go -= lines_found
block_end_byte -= BLOCK_SIZE
block_number -= 1
all_read_text = ''.join(reversed(blocks))
return '\n'.join(all_read_text.splitlines()[-total_lines_wanted:])
@qqbotslot
def onQQMessage(bot, contact, member, content):
global api
global logger
logger.info(content)
print(contact.qq)
if (contact.qq == '1259276249'):
# content = {'userid':'123456', 'info':content, 'key':TULINGKEY}
# data = json.dumps(content)
# req = urllib2.Request(TULINGURL, data, {'Content-Type': 'application'})
# re = urllib2.urlopen(req)
# re = re.read()
# re_dict = json.loads(re)
# text = re_dict['text']
# bot.SendTo(contact, str(text.encode('utf-8', 'ignore')))
if (content == '-stop'):
bot.SendTo(contact, 'QQ Bot terminated')
bot.Stop()
if (content == '-log output'):
log_file = open('shoukaku.log')
bot.SendTo(contact, tail(log_file, lines = 30))
if('@正规空母翔鹤' in content):
for key, value in responses.iteritems():
print(key)
if key in content:
bot.SendTo(contact, value)
break
return
if('抓取官推' in content):
time_now = datetime.datetime.now()
public_tweets = api.user_timeline('fgoproject')
for tweet in public_tweets:
if(time_now - tweet.created_at < TIME_ONEDAY):
time.sleep(1)
bot.SendTo(contact, str(tweet.text.encode('utf-8', 'ignore')))
return
if('改修' in content):
total_string = ''
print('checking for akashi factory list')
req = urllib2.Request(KCWIKI_DATA)
re = urllib2.urlopen(req)
re = re.read()
equip_list = json.loads(re)
today_week = datetime.datetime.now() + datetime.timedelta(hours = 14)
today_week = (today_week.weekday() + 1) % 7
for equip in equip_list:
list_of_secretary = []
improvements = equip[u'improvement']
#note: one equip can have different combination of secretary and weekdays,
#also different improvement paths
for current_improvement in improvements:
current_requirements = current_improvement[u'req']
for requirement in current_requirements:
days = requirement[u'day']
if(days[today_week]):
#add secretaries to the list
list_of_secretary.extend(requirement[u'secretary'])
if(len(list_of_secretary) > 0):
info = '装备名称: '.encode('utf-8') + equip['name'] + ' 秘书舰: '.encode('utf-8')
for secretary in list_of_secretary:
info = info + secretary + ' '
total_string = total_string + ';' + info
bot.SendTo(contact, total_string)
return
#testgroup '209127315' target 337545621
if (contact.qq == GROUP_NUMBER and '@ME' in content): #info mode
#check the info list
for key, value in responses.iteritems():
if key in content:
bot.SendTo(contact, value)
return
if('攻略' in content or '配置' in content or '带路' in content):
#turn to kcwiki pages
area = ''
if('1-' in content):
area = urllib2.quote('镇守府海域')
if('2-' in content):
area = urllib2.quote('南西群岛海域')
if('3-' in content):
area = urllib2.quote('北方海域')
if('4-' in content):
area = urllib2.quote('西方海域')
if('5-' in content):
area = urllib2.quote('南方海域')
if('6-' in content):
area = urllib2.quote('中部海域')
pattern = regex.compile(r'\d-\d')
subarea = regex.search(pattern, content).group()
print(subarea)
html_content = urllib2.urlopen('https://zh.kcwiki.org/wiki/' + area + '/' + subarea).read()
soup = BS(html_content)
print(soup.title)
flag = False
pattern = regex.compile(r'</?\w+[^>]*>|<br\s*?/?>|\n+')
for item in soup.find_all('div'):
if(flag and item.ul is not None):
for entry in item.ul:
time.sleep(1)
bot.SendTo(contact, str(pattern.sub('',str(entry))).encode('utf-8'))
break
if(item.b is not None and '海域情报' in str(item.b)):
print(item.get('class'))
flag = True
return
if('FGO' in content and '情报' in content):
time_now = datetime.datetime.now()
public_tweets = api.user_timeline('fgoproject')
for tweet in public_tweets:
if(time_now - tweet.created_at < TIME_ONEDAY):
time.sleep(1)
bot.SendTo(contact, str(tweet.text.encode('utf-8', 'ignore')))
return
if('舰' in content and '情报' in content):
time_now = datetime.datetime.now()
public_tweets = api.user_timeline('KanColle_STAFF')
for tweet in public_tweets:
if(time_now - tweet.created_at < TIME_ONEDAY):
time.sleep(1)
bot.SendTo(contact, str(tweet.text.encode('utf-8', 'ignore')))
return
#氪金信息
if('充值' in content or '氪金' in content):
print('check for current price')
bot.SendTo(contact, 'FGO黑卡充值:'.encode('utf-8') + 'https://item.taobao.com/item.htm?spm=0.0.0.0.nBUIej&id=546772277736')
bot.SendTo(contact, 'FGO白卡充值:'.encode('utf-8') + 'https://item.taobao.com/item.htm?spm=a1z0k.7628870.0.0.kayXcs&id=545942439642&_u=p2o03db0b500')
bot.SendTo(contact, '舰娘氪金:'.encode('utf-8') + 'https://item.taobao.com/item.htm?spm=a1z10.5-c.w4002-15864276650.23.yejdE6&id=539141881167')
return
#if no keywords matched, turn to tuling123 api
#the response categories: 100000 = text, 200000 = url, 302000 = news(return type is perhaps a list)
if('改修' in content):
total_string = ''
print('checking for akashi factory list')
req = urllib2.Request(KCWIKI_DATA)
re = urllib2.urlopen(req)
re = re.read()
equip_list = json.loads(re)
today_week = datetime.datetime.now() + datetime.timedelta(hours = 14)
today_week = (today_week.weekday() + 1) % 7
for equip in equip_list:
list_of_secretary = []
improvements = equip[u'improvement']
#note: one equip can have different combination of secretary and weekdays,
#also different improvement paths
for current_improvement in improvements:
current_requirements = current_improvement[u'req']
for requirement in current_requirements:
days = requirement[u'day']
if(days[today_week]):
#add secretaries to the list
list_of_secretary.extend(requirement[u'secretary'])
if(len(list_of_secretary) > 0):
info = '装备名称: '.encode('utf-8') + equip['name'] + ' 秘书舰: '.encode('utf-8')
for secretary in list_of_secretary:
info = info + secretary + ' '
total_string = total_string + ';' + info
bot.SendTo(contact, total_string)
return
pure_content = content.decode('utf8')[6:].encode('utf8')
print('pure_content = ' + pure_content.encode('gb2312'))
content = {'userid':member.uin, 'info':pure_content, 'key':TULINGKEY}
data = json.dumps(content)
req = urllib2.Request(TULINGURL, data, {'Content-Type': 'application'})
re = urllib2.urlopen(req)
re = re.read()
re_dict = json.loads(re)
category = re_dict['code']
print(category)
if(category == 100000):
text = re_dict['text']
bot.SendTo(contact, str(text.encode('utf-8')))
elif(category == 200000):
text = re_dict['text']
bot.SendTo(contact, str(text.encode('utf-8')))
link = re_dict['url']
bot.SendTo(contact, str(link.encode('utf-8')))
elif(category == 308000): #the return type is a list
text = re_dict['text']
bot.SendTo(contact, str(text.encode('utf-8')))
return_list = re_dict['list']
print(len(return_list))
counter = 0
for item in return_list:
time.sleep(1)
bot.SendTo(contact, item['name'].encode('utf-8') + '用料: '.encode('utf-8')
+ item['info'].encode('utf-8') + ' 详细做法: '.encode('utf-8') + item['detailurl'].encode('utf-8'))
counter+=1
if(counter > 2):
break
elif(category == 302000):
text = re_dict['text']
bot.SendTo(contact, str(text.encode('utf-8')))
return_list = re_dict['list']
print(len(return_list))
counter = 0
for item in return_list:
time.sleep(1)
bot.SendTo(contact, item['article'].encode('utf-8') + ' 消息来自: '.encode('utf-8')
+ item['source'].encode('utf-8') + ' 详情请见: '.encode('utf-8') + item['detailurl'].encode('utf-8'))
counter+=1
if(counter > 2):
break
else:
#trolling in chat
#1. 复读
# repeatition should be such that, once it has participated in a row, it should not say anything anymore
global repCounter
global prevMsg
curMsg = content
if(repCounter == 0):
repCounter += 1
else:
if(curMsg == prevMsg):
repCounter += 1
print(repCounter)
else:
if(repCounter > 3):
bot.SendTo(contact, '你们的复读坚持了' + str(repCounter + 1) + '次~人类的本质就是个复读机!')
repCounter = 0
if(repCounter == 3):
bot.SendTo(contact, content)
prevMsg = curMsg
@qqbotslot
def onInterval(bot):
test_group = bot.List('group', '337545621')[0]
# bot.SendTo(test_group, 'interval method evoked')
#execute per 5mins
#sending debug info
time_now = datetime.datetime.time(datetime.datetime.now())
if(time_now >= datetime.time(0,50,0,0) and time_now < datetime.time(0,55,0,0)):
bot.SendTo(test_group, 'Kancolle 演习马上更新, 请各位提督不要忘记演习~'.encode('utf-8'))
if(time_now >= datetime.time(12,50,0,0) and time_now < datetime.time(12,55,0,0)):
bot.SendTo(test_group, 'Kancolle 演习马上更新, 请各位提督不要忘记演习~'.encode('utf-8'))
if(time_now >= datetime.time(6,50,0,0) and time_now < datetime.time(6,55,0,0)):
public_tweets = api.user_timeline('fgoproject')
for tweet in public_tweets:
if(datetime.datetime.now() - tweet.created_at < TIME_ONEDAY):
bot.SendTo(test_group, str(tweet.text.encode('utf-8', 'ignore')))
public_tweets = api.user_timeline('KanColle_STAFF')
for tweet in public_tweets:
if(time_now - tweet.created_at < TIME_ONEDAY):
bot.SendTo(test_group, str(tweet.text.encode('utf-8', 'ignore')))
if(time_now >= datetime.time(10,0,0,0) and time_now < datetime.time(10,5,0,0)):
bot.SendTo(test_group, 'FGO日常任务以及免费友情点十连已经更新~'.encode('utf-8'))
if(time_now >= datetime.time(14,0,0,0) and time_now < datetime.time(14,5,0,0)):
bot.SendTo(test_group, 'FGO日常登录奖励大家不要错过哦~'.encode('utf-8'))
if(time_now >= datetime.time(15,0,0,0) and time_now < datetime.time(15,5,0,0)):
bot.SendTo(test_group, 'Kancolle每日任务已经更新~'.encode('utf-8'))
#some time point,
#1am, kancolle drill
#6am, check latest info
#10am, FGO free summoning, daily quest update
#1pm, kancolle drill
#2pm, FGO login award
#3pm, kancolle quest update
@qqbotslot
def onNewContact(bot, contact, owner):
#exec when there is new member joining owner
print('onNewContact evoked')
if(owner is None): return
if(owner.qq == GROUP_NUMBER):
test_group = bot.List('group', GROUP_NUMBER)[0]
new_member = bot.List(test_group, 'qq='+str(contact.qq))[0]
bot.SendTo(owner, '欢迎新dalao~'.encode('utf-8'))
bot.SendTo(owner, 'Hello '.encode('utf-8')+ contact.card.encode('utf-8')+'. 我是翔鹤,有什么问题可以at我,如果对于我的功能有什么建议的话请找nilk.'
.encode('utf-8'))
#open the info table
RunBot(qq='3407757156', user = 'Nilk')
'''
Goal:
1. Nilk will be the only authorized person who has the ability to edit the response of it.
2. Can troll in the group chat
3. When called out by @, provide proper info
'''
'''
TODO:
0. try to trim the @me before msg in group chat(done)
1.点歌,发url
3.氪金信息
4.crawl for info, instead of hard coded csv(done)
5.今日改修,今日修炼场,今日种火
6.定时提醒清本,上线清任务领奖励(done)
7.带33节奏
舰娘信息可以用kcwiki api
'''
|
From interior designers to stylists to colour consultants, design professionals can help transform your initial ideas into your perfect space. Working with a professional enables you to leverage their years of experience, streamline the project management process, and access their existing network of trades. Simply select your specialty area in the drop down menu, view the designers' portfolio and mood boards and contact them directly to discuss your next project.
Born in Sydney, Kelly’s connection with flare and design began at a young age in the studio of her mother’s wedding couture and dressmaking business. Her passion came to life designing, building and styling her own home, leading her to complete a Diploma in Interior Design and forming 13 Interiors-The 12 elements of design, with you in mind. Her inspiration is drawn from new materials, trends and products, unique spaces and happy clients. A love of great design, attention to detail and resourcefulness is the driving force behind the projects Kelly delivers and this is apparent in her expanding portfolio of projects.
Hi there, I want to share MY KIND OF BLISS where ever I find it, so check out my new website for blog posts, free mood board designs and new products added to MY BLISS LIST. I'm interiors obsessed and my passion is creating beautiful spaces and mood boards and sharing them with the world. My aim is to make it easier for you to design and shop for a new space or update an old one. Fell free to contact me for any questions or collaborations.
Katy Thomas is a Melbourne based Interior and Product Stylist with a love for creating beautiful spaces on an affordable budget. Having recently renovated her own home, Katy understands the importance of planning, designing and creating the perfect space. Her interiors business has enabled her to create beautiful, well thought out spaces for her clients. Whether you're selling your home or simply wanting to redecorate, Katy would love to help you with your next project.
Dot + Pop is the interiors blog of Eve Gunson, lover of all things interior, home ware, design, architecture and renovation. Dot + Pop began firstly as an inspiring blog and has now evolved into an online home wares store and interior design and styling business. Together with her partner Matt, a registered builder, they have completed 3 home renovations in Melbourne. Most recently their own home was featured in the Summer 17 Issue of Adore Magazine and Planted Magazine.
Qualified Interior Decorator based in Perth, WA. Specialising in affordable decorating E-Design and product styling.
Gina is the Editor of STYLE CURATOR, an award winning Australian blog about the pursuit of a stylish home. She is passionate about sharing her love of design, architecture, decorating and homewares.
Silvia Roldan Interiors is a boutique Design studio providing design, decorating and styling advice for residential, commercial and retail projects. We create unique, colour filled interiors with a focus on quality craftsmanship and original design. Our designs are tactile, warm and layered palettes of materials, finishes and furnishings that create the perfect interior. Our studio is available for consultation, design, styling and project management for projects of all sizes and budgets. "I describe my style as contemporary minimalist but with a focus on warmth, texture and layers. I like using unexpected colour in my designs. The spaces I design have all the elements that an interior needs to make it feel like a home. I believe every space needs a hero piece; whether that be furniture, lighting or art and my designs are based around this idea".
Our colour designers are fully qualified Interior Designers with the experience to help you achieve your dream home make over with confidence. •Begin with a consultation (1.5 hours on average) •Your colour designer will provide you with everything you need including a report detailing all products and colours for your project, and colour swatches •Receive a redemption voucher for Dulux premium paint (purchase your paint and you can redeem $1 per litre to a maximum of $100) •Gain complete confidence in your final colour selection!
|
"""
Define abstract conv2d interface
"""
import logging
import theano
from theano.tensor import (as_tensor_variable, patternbroadcast)
from theano.tensor import TensorType
from theano.gof import Apply, Op
from theano.gof import local_optimizer
from theano.tensor.opt import register_specialize_device
# Cpu implementation
from theano.tensor.nnet import conv2d as cpu_conv2d, ConvOp
from theano.tensor.nnet.ConvGrad3D import convGrad3D
from theano.tensor.nnet.ConvTransp3D import convTransp3D
__docformat__ = "restructuredtext en"
_logger = logging.getLogger("theano.tensor.nnet.conv2d")
def conv2d(input,
filters,
input_shape=None,
filter_shape=None,
border_mode='valid',
subsample=(1, 1),
filter_flip=True):
"""
This function will build the symbolic graph for convolving a mini-batch of a
stack of 2D inputs with a set of 2D filters. The implementation is modelled
after Convolutional Neural Networks (CNN).
:type input: symbolic 4D tensor
:param input: mini-batch of feature map stacks, of shape
(batch size, input channels, input rows, input columns).
See the optional parameter ``input_shape``.
:type filters: symbolic 4D tensor
:param filters: set of filters used in CNN layer of shape
(output channels, input channels, filter rows, filter columns).
See the optional parameter ``filter_shape``.
:type input_shape: None, tuple/list of len 4 of int or Constant variable
:param input_shape: The shape of the input parameter.
Optional, possibly used to choose an optimal implementation.
You can give ``None`` for any element of the list to specify that this
element is not known at compile time.
:type filter_shape: None, tuple/list of len 4 of int or Constant variable
:param filter_shape: The shape of the filters parameter.
Optional, possibly used to choose an optimal implementation.
You can give ``None`` for any element of the list to specify that this
element is not known at compile time.
:type border_mode: str, int or tuple of two int
:param border_mode: Either of the following:
* ``'valid'``: apply filter wherever it completely overlaps with the
input. Generates output of shape: input shape - filter shape + 1
* ``'full'``: apply filter wherever it partly overlaps with the input.
Generates output of shape: input shape + filter shape - 1
* ``'half'``: pad input with a symmetric border of ``filter rows // 2``
rows and ``filter columns // 2`` columns, then perform a valid
convolution. For filters with an odd number of rows and columns, this
leads to the output shape being equal to the input shape.
* ``int``: pad input with a symmetric border of zeros of the given
width, then perform a valid convolution.
* ``(int1, int2)``: pad input with a symmetric border of ``int1`` rows
and ``int2`` columns, then perform a valid convolution.
:type subsample: tuple of len 2
:param subsample: factor by which to subsample the output.
Also called strides elsewhere.
:type filter_flip: bool
:param filter_flip: If ``True``, will flip the filter rows and columns
before sliding them over the input. This operation is normally referred
to as a convolution, and this is the default. If ``False``, the filters
are not flipped and the operation is referred to as a cross-correlation.
:rtype: symbolic 4D tensor
:return: set of feature maps generated by convolutional layer. Tensor is
of shape (batch size, output channels, output rows, output columns)
"""
conv_op = AbstractConv2d(imshp=input_shape,
kshp=filter_shape,
border_mode=border_mode,
subsample=subsample,
filter_flip=filter_flip)
return conv_op(input, filters)
class BaseAbstractConv2d(Op):
"""
Base class for AbstractConv
Define an abstract convolution op that will be replaced with the appropriate implementation
:type imshp: None, tuple/list of len 4 of int or Constant variable
:param imshp: The shape of the input parameter.
Optional, possibly used to choose an optimal implementation.
You can give ``None`` for any element of the list to specify that this
element is not known at compile time.
imshp is defined w.r.t the forward conv.
:type kshp: None, tuple/list of len 4 of int or Constant variable
:param kshp: The shape of the filters parameter.
Optional, possibly used to choose an optimal implementation.
You can give ``None`` for any element of the list to specify that this
element is not known at compile time.
kshp is defined w.r.t the forward conv.
:type border_mode: str, int or tuple of two int
:param border_mode: Either of the following:
* ``'valid'``: apply filter wherever it completely overlaps with the
input. Generates output of shape: input shape - filter shape + 1
* ``'full'``: apply filter wherever it partly overlaps with the input.
Generates output of shape: input shape + filter shape - 1
* ``'half'``: pad input with a symmetric border of ``filter rows // 2``
rows and ``filter columns // 2`` columns, then perform a valid
convolution. For filters with an odd number of rows and columns, this
leads to the output shape being equal to the input shape.
* ``int``: pad input with a symmetric border of zeros of the given
width, then perform a valid convolution.
* ``(int1, int2)``: pad input with a symmetric border of ``int1`` rows
and ``int2`` columns, then perform a valid convolution.
:type subsample: tuple of len 2
:param subsample: factor by which to subsample the output.
Also called strides elsewhere.
:type filter_flip: bool
:param filter_flip: If ``True``, will flip the filter rows and columns
before sliding them over the input. This operation is normally referred
to as a convolution, and this is the default. If ``False``, the filters
are not flipped and the operation is referred to as a cross-correlation.
"""
check_broadcast = False
__props__ = ('border_mode', 'subsample', 'filter_flip', 'imshp', 'kshp')
def __init__(self,
imshp=None, kshp=None,
border_mode="valid", subsample=(1, 1),
filter_flip=True):
if isinstance(border_mode, int):
border_mode = (border_mode, border_mode)
if isinstance(border_mode, tuple):
pad_h, pad_w = map(int, border_mode)
border_mode = (pad_h, pad_w)
if not ((isinstance(border_mode, tuple) and min(border_mode) >= 0) or
border_mode in ('valid', 'full', 'half')):
raise ValueError(
'invalid border_mode {}, which must be either '
'"valid", "full", "half", an integer or a pair of'
' integers'.format(border_mode))
self.imshp = tuple(imshp) if imshp else None
self.kshp = tuple(kshp) if kshp else None
self.border_mode = border_mode
self.filter_flip = filter_flip
if len(subsample) != 2:
raise ValueError("subsample must have two elements")
self.subsample = subsample
def flops(self, inp, outp):
""" Useful with the hack in profilemode to print the MFlops"""
# if the output shape is correct, then this gives the correct
# flops for any direction, sampling, padding, and border mode
inputs, filters = inp
outputs, = outp
assert inputs[1] == filters[1]
# nb mul and add by output pixel
flops = filters[2] * filters[3] * 2
# nb flops by output image
flops *= outputs[2] * outputs[3]
# nb patch multiplied
flops *= inputs[1] * filters[0] * inputs[0]
return flops
class AbstractConv2d(BaseAbstractConv2d):
"""
Abstract Op for the forward convolution.
"""
def __init__(self,
imshp=None,
kshp=None,
border_mode="valid",
subsample=(1, 1),
filter_flip=True):
super(AbstractConv2d, self).__init__(imshp, kshp,
border_mode, subsample, filter_flip)
def make_node(self, img, kern):
if img.type.ndim != 4:
raise TypeError('img must be 4D tensor')
if kern.type.ndim != 4:
raise TypeError('kern must be 4D tensor')
broadcastable = [img.broadcastable[0],
kern.broadcastable[0],
False, False]
output = img.type.clone(broadcastable=broadcastable)()
return Apply(self, [img, kern], [output])
def perform(self, node, inp, out_):
raise NotImplementedError('AbstractConv2d theano optimization failed')
def grad(self, inp, grads):
bottom, weights = inp
top, = grads
d_bottom = AbstractConv2d_gradInputs(self.imshp, self.kshp,
self.border_mode,
self.subsample,
self.filter_flip)(
weights, top, bottom.shape[-2:])
d_weights = AbstractConv2d_gradWeights(self.imshp, self.kshp,
self.border_mode,
self.subsample,
self.filter_flip)(
bottom, top, weights.shape[-2:])
return d_bottom, d_weights
class AbstractConv2d_gradWeights(BaseAbstractConv2d):
"""Gradient wrt. filters for `AbstractConv2d`.
:note: You will not want to use this directly, but rely on
Theano's automatic differentiation or graph optimization to
use it as needed.
"""
def __init__(self,
imshp=None,
kshp=None,
border_mode="valid",
subsample=(1, 1),
filter_flip=True):
super(AbstractConv2d_gradWeights, self).__init__(imshp, kshp,
border_mode, subsample, filter_flip)
# Update shape/height_width
def make_node(self, img, topgrad, shape):
if img.type.ndim != 4:
raise TypeError('img must be 4D tensor')
if topgrad.type.ndim != 4:
raise TypeError('topgrad must be 4D tensor')
shape = as_tensor_variable(shape)
broadcastable = [topgrad.broadcastable[1],
img.broadcastable[1],
False, False]
output = img.type.clone(broadcastable=broadcastable)()
return Apply(self, [img, topgrad, shape], [output])
def perform(self, node, inp, out_):
raise NotImplementedError('AbstractConv2d_gradWeight theano optimization failed')
def grad(self, inp, grads):
bottom, top = inp[:2]
weights, = grads
d_bottom = AbstractConv2d_gradInputs(self.imshp, self.kshp,
self.border_mode,
self.subsample,
self.filter_flip)(weights, top, bottom.shape[-2:])
d_top = AbstractConv2d(self.imshp,
self.kshp,
self.border_mode,
self.subsample,
self.filter_flip)(bottom, weights)
d_height_width = (theano.gradient.DisconnectedType()(),)
return (d_bottom, d_top) + d_height_width
def connection_pattern(self, node):
return [[1], [1], [0]] # no connection to height, width
class AbstractConv2d_gradInputs(BaseAbstractConv2d):
"""Gradient wrt. inputs for `AbstractConv2d`.
:note: You will not want to use this directly, but rely on
Theano's automatic differentiation or graph optimization to
use it as needed.
"""
def __init__(self,
imshp=None,
kshp=None,
border_mode="valid",
subsample=(1, 1),
filter_flip=True):
super(AbstractConv2d_gradInputs, self).__init__(imshp, kshp,
border_mode, subsample, filter_flip)
# Update shape/height_width
def make_node(self, kern, topgrad, shape):
if kern.type.ndim != 4:
raise TypeError('kern must be 4D tensor')
if topgrad.type.ndim != 4:
raise TypeError('topgrad must be 4D tensor')
shape = as_tensor_variable(shape)
broadcastable = [topgrad.type.broadcastable[0],
kern.type.broadcastable[1],
False, False]
output = kern.type.clone(broadcastable=broadcastable)()
return Apply(self, [kern, topgrad, shape], [output])
def perform(self, node, inp, out_):
raise NotImplementedError('AbstractConv2d_gradWeight theano optimization failed')
def grad(self, inp, grads):
weights, top = inp[:2]
bottom, = grads
d_weights = AbstractConv2d_gradWeights(self.imshp, self.kshp,
self.border_mode,
self.subsample)(bottom, top, weights.shape[-2:])
d_top = AbstractConv2d(self.imshp, self.kshp,
self.border_mode, self.subsample)(bottom, weights)
d_height_width = (theano.gradient.DisconnectedType()(),)
return (d_weights, d_top) + d_height_width
def connection_pattern(self, node):
return [[1], [1], [0]] # no connection to height, width
# Cpu Optmization
@local_optimizer([AbstractConv2d])
def local_conv2d_cpu(node):
if not isinstance(node.op, AbstractConv2d):
return None
img, kern = node.inputs
if ((not isinstance(img.type, TensorType) or
not isinstance(kern.type, TensorType))):
return None
if node.op.border_mode not in ['full', 'valid']:
return None
if not node.op.filter_flip:
# Not tested yet
return None
rval = cpu_conv2d(img, kern,
node.op.imshp, node.op.kshp,
border_mode=node.op.border_mode,
subsample=node.op.subsample)
return [rval]
register_specialize_device(local_conv2d_cpu, 'fast_compile')
@local_optimizer([AbstractConv2d_gradWeights])
def local_conv2d_gradweight_cpu(node):
img, topgrad, shape = node.inputs
if ((not isinstance(img.type, TensorType) or
not isinstance(topgrad.type, TensorType))):
return None
if node.op.border_mode not in ['full', 'valid']:
return None
if not node.op.filter_flip:
# Not tested yet
return
if node.op.border_mode == 'valid' and \
(node.op.subsample != (1, 1)):
# Use the gradient as defined in conv3D, because the implementation
# by Conv is slow (about 3x slower than conv3D, and probably 10x
# slower than it could be), nad incorrect when subsample > 2.
# build a "node", that should be equivalent to the one given by
# self.make_node, but using convGrad3D instead.
shuffled_img = img.dimshuffle(0, 2, 3, 'x', 1)
shuffled_topgrad = topgrad.dimshuffle(0, 2, 3, 'x', 1)
rval = convGrad3D(V=shuffled_img,
d=(node.op.subsample[0], node.op.subsample[1], 1),
WShape=(shuffled_topgrad.shape[4],
shape[0], shape[1], 1,
shuffled_img.shape[4]),
dCdH=shuffled_topgrad)
rval = theano.tensor.addbroadcast(rval, 3)
rval = rval.dimshuffle(0, 4, 1, 2)
rval = rval[:, :, ::-1, ::-1]
rval = patternbroadcast(rval, node.outputs[0].broadcastable)
return [rval]
dx, dy = node.op.subsample
if dx not in (1, 2) or dy not in (1, 2):
# Not implemented in the gradient of ConvOp
return None
if node.op.imshp is None:
op_imshp = (None, None, None, None)
else:
op_imshp = node.op.imshp
if node.op.kshp is None:
op_kshp = (None, None, None, None)
else:
op_kshp = node.op.kshp
if None in op_imshp or None in op_kshp:
if (dx, dy) != (1, 1):
# We cannot infer the shapes
return None
# Determine gradient on kernels
assert len(op_imshp) == 4 and len(op_kshp) == 4
outshp = ConvOp.getOutputShape(op_imshp[2:],
op_kshp[2:], node.op.subsample,
node.op.border_mode)
fulloutshp = ConvOp.getOutputShape(op_imshp[2:],
op_kshp[2:], (1, 1),
node.op.border_mode)
newimg = img.dimshuffle((1, 0, 2, 3))
newtopgrad = topgrad.dimshuffle((1, 0, 2, 3))
if node.op.border_mode == 'valid':
(img, filters) = (newimg, newtopgrad)
kshp_logical = fulloutshp
kshp_logical_top_aligned = False
imshp_logical = None
(bsize, nkern) = (op_imshp[1], op_kshp[0])
imshp = (op_imshp[0], op_imshp[2], op_imshp[3])
kshp = outshp
elif node.op.border_mode == 'full':
(img, filters) = (newtopgrad, newimg)
kshp_logical = None
kshp_logical_top_aligned = True
imshp_logical = (op_imshp[0],
fulloutshp[0],
fulloutshp[1])
(bsize, nkern) = (op_kshp[0], op_imshp[1])
imshp = (op_imshp[0], outshp[0], outshp[1])
kshp = op_imshp[2:]
else:
raise NotImplementedError(
'Only [full,valid] modes are currently supported.')
# Flip the kernels
filters = filters[:, :, ::-1, ::-1]
dw = ConvOp(imshp, kshp, nkern, bsize, 1, 1, output_mode='valid',
unroll_batch=None, unroll_kern=None, unroll_patch=None,
imshp_logical=imshp_logical,
kshp_logical=kshp_logical,
kshp_logical_top_aligned=kshp_logical_top_aligned,
direction_hint='bprop weights')
res = dw(img, filters)
if node.op.border_mode == 'valid':
res = res.dimshuffle((1, 0, 2, 3))
res = res[:, :, ::-1, ::-1]
res = patternbroadcast(res, node.outputs[0].broadcastable)
return [res]
register_specialize_device(local_conv2d_gradweight_cpu, 'fast_compile')
@local_optimizer([AbstractConv2d_gradInputs])
def local_conv2d_gradinputs_cpu(node):
kern, topgrad, shape = node.inputs
if ((not isinstance(kern.type, TensorType) or
not isinstance(topgrad.type, TensorType))):
return None
if node.op.border_mode not in ['full', 'valid']:
return None
if not node.op.filter_flip:
# Not tested yet
return None
# Conv 3d implementation, needed when subsample > 2
if node.op.border_mode == 'valid' and node.op.subsample != (1, 1):
kern = kern[:, :, ::-1, ::-1]
shuffled_kern = kern.dimshuffle(0, 2, 3, 'x', 1)
shuffled_topgrad = topgrad.dimshuffle(0, 2, 3, 'x', 1)
b = theano.tensor.zeros_like(shuffled_kern[0, 0, 0, 0, :])
rval = convTransp3D(W=shuffled_kern, b=b,
d=(node.op.subsample[0], node.op.subsample[1], 1),
H=shuffled_topgrad,
RShape=(shape[0], shape[1], 1))
rval = theano.tensor.addbroadcast(rval, 3)
rval = rval.dimshuffle(0, 4, 1, 2)
rval = patternbroadcast(rval, node.outputs[0].broadcastable)
return [rval]
# Conv2d Implementation
dx, dy = node.op.subsample
if dx not in (1, 2) or dy not in (1, 2):
# Not implemented in the gradient of ConvOp
return None
if node.op.imshp is None:
op_imshp = (None, None, None, None)
else:
op_imshp = node.op.imshp
if node.op.kshp is None:
op_kshp = (None, None, None, None)
else:
op_kshp = node.op.kshp
if None in op_imshp or None in op_kshp:
if (dx, dy) != (1, 1):
return None
mode = 'valid'
if not node.op.border_mode == 'full':
mode = 'full'
filters = kern.dimshuffle((1, 0, 2, 3))
filters = filters[:, :, ::-1, ::-1]
outshp = ConvOp.getOutputShape(op_imshp[2:],
op_kshp[2:], node.op.subsample,
node.op.border_mode)
fulloutshp = ConvOp.getOutputShape(op_imshp[2:],
op_kshp[2:], (1, 1),
node.op.border_mode)
nkern = op_imshp[1]
imshp = (op_kshp[0], outshp[0], outshp[1])
imshp_logical = (op_kshp[0], fulloutshp[0], fulloutshp[1])
din = ConvOp(imshp,
op_kshp[2:],
nkern,
op_imshp[0],
1, 1, output_mode=mode,
unroll_batch=None, unroll_kern=None,
unroll_patch=None,
imshp_logical=imshp_logical,
kshp_logical=None,
version=-1,
direction_hint='bprop inputs')
din = din(topgrad, filters)
din = patternbroadcast(din, node.outputs[0].broadcastable)
return [din]
register_specialize_device(local_conv2d_gradinputs_cpu, 'fast_compile')
|
Now you can get complete details about Armenia visa application form including Armenia work visa application form, Armenia Business visa application form, Armenia study visa application form, the Armenia Visitor visa application form, Armenia family visa application form, Armenia settlement visa application form, Armenia Transit visa application form, Armenia permanent residence visa application form, Armenia temporary residence visa application form and Armenia Professional visa application form.
Going to Armenia needs some official steps. Such as, you have to apply for a visa. For applying to visa, you will be provided by Armenia visa application form. You have to fill that Armenia visa application form. The guideline will be provided to you for filling the Armenia visa application form. Such as there are some special boxes that are needed to be filled very carefully in Armenia visa application form.
Many people ask the question that how to get Armenia visa application form? The answer to your question is that you can easily get the Armenia visa application form from the embassy. If you ask us that if you have applied for Armenia visa, now how to get the Armenia visa application? We give you the same answer that you can either get it from your nearest embassy or a related consular office for Armenia. So choose the type of Armenia visa Application form according to your choice for Armenia and visit Armenia.
You have to fill that Armenia visa application form completely and have to do your official signatures on the form. Besides this, you have to attach some of your important documents on the Armenia visa application form such as passport sized photographs, cover letter, and many other official documents. After filling the Armenia visa application form, you have to submit that Armenia visa application to the embassy. You have to select what type of visa option on which you want to go, on the Armenia visa application form such as student visa, tourist visa, business visa or Armenia visa application form for work, etc.
Armenia Visa Application Form - Armenia Visa Application - How To Get Armenia Visa Application Form - How To Get Armenia Visa Application - Armenia Work Visa Application Form - Armenia Tourist Visa Application Form - Armenia Work Visa Application Form - Armenia Business Visa Application Form - Armenia Permanent Residence Visa Application Form - Armenia Temporary Residence Visa Application Form - Armenia Student Visa Application Form - Armenia Professional Visa Application Form - Armenia Transit Visa Application Form - Armenia Visa Application Form For Pakistani Now you can get complete details about Armenia visa application form including Armenia work visa application form, Armenia Business visa application form, Armenia study visa application form, the Armenia Visitor visa application form, Armenia family visa application form, Armenia settlement visa application form, Armenia Transit visa application form, Armenia permanent residence visa application form, Armenia temporary residence visa application form and Armenia Professional visa application form.
|
import os
from functools import reduce
from wtforms.validators import InputRequired, Length, Optional
from flask_wtf.file import FileAllowed
from wtforms import StringField, SelectField, SubmitField, BooleanField, TextAreaField, FileField
from wtforms_components import TimeField
from app.forms.base import LocalizedForm
from app.forms.constents import (FONT_SIZES, BTN_COLORS, DURATIONS, TOUCH_TEMPLATES, DISPLAY_TEMPLATES,
ANNOUNCEMENT_REPEATS, ANNOUNCEMENT_REPEAT_TYPE, VISUAL_EFFECTS,
VISUAL_EFFECT_REPEATS, BOOLEAN_SELECT_1, TICKET_TYPES,
TICKET_REGISTERED_TYPES, SLIDE_EFFECTS, SLIDE_DURATIONS, EVERY_OPTIONS)
from app.database import Media
from app.constants import SUPPORTED_MEDIA_FILES, SUPPORTED_LANGUAGES, PRINTED_TICKET_SCALES
from app.helpers import get_tts_safely
class TouchScreenForm(LocalizedForm):
touch = SelectField('Select a template for Touch screen :',
coerce=int,
choices=TOUCH_TEMPLATES)
title = StringField('Enter a title :',
validators=[InputRequired('Must enter at least 5 letters and Title '
'should be maximum of 300 letters'),
Length(5, 300)])
hsize = SelectField('Choose title font size :',
coerce=str,
choices=FONT_SIZES)
hcolor = StringField('Select title font color :')
hfont = StringField('choose a font for title :')
hbg = StringField('Select heading background color :')
tsize = SelectField('choose task font size :',
coerce=str,
choices=FONT_SIZES)
tcolor = SelectField('choose tasks color :',
coerce=str,
choices=BTN_COLORS)
tfont = StringField('choose tasks font :')
msize = SelectField('choose message font size :',
coerce=str,
choices=FONT_SIZES)
mcolor = StringField('Select message font color :')
mfont = StringField('Choose message font :')
mduration = SelectField('choose motion effect duration of appearing :',
coerce=str,
choices=DURATIONS)
mbg = StringField('Select message background color :')
message = TextAreaField('Enter a notification message :',
validators=[InputRequired('Must enter at least 5 letter and Message'
'should be maximum of 300 letters ..'),
Length(5, 300)])
bcolor = StringField('Select a background color : ')
background = SelectField('Select background : ',
coerce=int,
choices=[(0, 'Use color selection')])
naudio = SelectField('Select audio notification : ',
coerce=int,
choices=[(0, 'Disable audio notification')])
submit = SubmitField('Apply')
def __init__(self, *args, **kwargs):
super(TouchScreenForm, self).__init__(*args, **kwargs)
for m in Media.get_all_images():
self.background.choices += [(m.id, f'{m.id}. {m.name}')]
for m in Media.get_all_audios():
self.naudio.choices += [(m.id, f'{m.id}. {m.name}')]
class DisplayScreenForm(LocalizedForm):
display = SelectField('Select a template for Display screen : ',
coerce=int,
choices=DISPLAY_TEMPLATES)
title = StringField('Enter a title : ',
validators=[InputRequired('Title should be maximum of 300 letters'),
Length(0, 300)])
background = SelectField('Select a background : ',
coerce=int,
choices=[(0, 'Use color selection')])
hsize = SelectField('Choose title font size : ',
coerce=str,
choices=FONT_SIZES)
hcolor = StringField('Choose title font color : ')
hfont = StringField('Choose title font : ')
hbg = StringField('Choose title background color : ')
tsize = SelectField('choose main heading office font size :',
coerce=str,
choices=FONT_SIZES)
tcolor = StringField('choose main heading office color : ')
tfont = StringField('choose main heading office font : ')
h2color = StringField('choose main heading ticket color : ')
h2size = SelectField('choose main heading ticket font size :',
coerce=str,
choices=FONT_SIZES)
h2font = StringField('choose main heading ticket font : ')
ssize = SelectField('choose secondary heading font size : ',
coerce=str,
choices=FONT_SIZES)
scolor = StringField('choose secondary heading color : ')
sfont = StringField('choose secondary heading font :')
mduration = SelectField('choose motion effect duration of appearing : ',
coerce=str,
choices=DURATIONS)
rrate = SelectField('choose page refresh rate : ',
coerce=str,
choices=DURATIONS)
effect = SelectField('choose visual motion effect for notification : ',
coerce=str,
choices=VISUAL_EFFECTS)
repeats = SelectField('choose motion effect number of repeats : ',
coerce=str,
choices=VISUAL_EFFECT_REPEATS)
anr = SelectField('Number of announcement repeating : ',
coerce=int,
choices=ANNOUNCEMENT_REPEATS)
anrt = SelectField('Type of announcement and notification repeating :',
coerce=str,
choices=ANNOUNCEMENT_REPEAT_TYPE)
naudio = SelectField('Select audio notification : ',
coerce=int,
choices=[(0, 'Disable audio notification')])
bgcolor = StringField('Select a background color : ')
prefix = BooleanField('Attach prefix office letter: ')
always_show_ticket_number = BooleanField('Always show ticket number: ')
wait_for_announcement = BooleanField('Wait for announcement to finish:')
hide_ticket_index = BooleanField('Hide ticket index number:')
submit = SubmitField('Apply')
for shortcode in get_tts_safely().keys():
locals()[f'check{shortcode}'] = BooleanField()
def __init__(self, *args, **kwargs):
super(DisplayScreenForm, self).__init__(*args, **kwargs)
for m in Media.get_all_images():
self.background.choices += [(m.id, f'{m.id}. {m.name}')]
for m in Media.get_all_audios():
self.naudio.choices += [(m.id, f'{m.id}. {m.name}')]
for shortcode, bundle in get_tts_safely().items():
self[f'check{shortcode}'].label = self.translate(bundle.get('language'))
class SlideAddForm(LocalizedForm):
title = StringField('Enter a slide title :')
hsize = SelectField('Select a title font size :',
coerce=str,
choices=FONT_SIZES)
hcolor = StringField('Select a title font color :')
hfont = StringField('Select a title font :')
hbg = StringField('Select title background color :')
subti = StringField('Enter a subtitle :')
tsize = SelectField('Select subtitle font size :',
coerce=str,
choices=FONT_SIZES)
tcolor = StringField('Select sub title color :')
tfont = StringField('Select subtitle font :')
tbg = StringField('Select subtitle background color :')
background = SelectField('Select background : ',
coerce=int,
choices=[(0, 'Use color selection')])
bgcolor = StringField('Select background color : ')
submit = SubmitField('Add a slide')
def __init__(self, *args, **kwargs):
super(SlideAddForm, self).__init__(*args, **kwargs)
for m in Media.get_all_images():
self.background.choices += [(m.id, f'{m.id}. {m.name}')]
class SlideSettingsForm(LocalizedForm):
status = SelectField('Disable or enable slide-show :',
coerce=int,
choices=BOOLEAN_SELECT_1)
effect = SelectField('Select transition effect :',
coerce=str,
choices=SLIDE_EFFECTS)
navigation = SelectField('Slide navigation bars :',
coerce=int,
choices=BOOLEAN_SELECT_1)
rotation = SelectField('Slide images rotation :',
coerce=str,
choices=SLIDE_DURATIONS)
submit = SubmitField('Apply')
class MultimediaForm(LocalizedForm):
mf = FileField('Select multimedia file :',
validators=[FileAllowed(
reduce(lambda sum, group: sum + group, SUPPORTED_MEDIA_FILES),
'make sure you followed the given conditions !')])
submit = SubmitField('Upload')
class VideoForm(LocalizedForm):
video = SelectField('Select uploaded video to use : ',
coerce=int,
choices=[(0, 'Do not assign video')])
enable = SelectField('Enable or disable video : ',
coerce=int,
choices=BOOLEAN_SELECT_1)
ar = SelectField('Auto replaying the video : ',
coerce=int,
choices=BOOLEAN_SELECT_1)
controls = SelectField('Enable or disable video controls : ',
coerce=int,
choices=BOOLEAN_SELECT_1)
mute = SelectField('Mute sound : ',
coerce=int,
choices=BOOLEAN_SELECT_1)
submit = SubmitField('Set video')
def __init__(self, defLang='en', *args, **kwargs):
super(VideoForm, self).__init__(*args, **kwargs)
videos = Media.get_all_videos()
for v in videos:
self.video.choices.append((v.id, f'{v.id}. {v.name}'))
if not videos:
self.video.choices = [(0, self.translate('No videos were found'))]
class TicketForm(LocalizedForm):
kind = SelectField('Select type of ticket to use : ',
coerce=int,
choices=TICKET_TYPES)
value = SelectField('Select a value of registering : ',
coerce=int,
choices=TICKET_REGISTERED_TYPES)
langu = SelectField('Select language of printed ticket : ',
choices=list(SUPPORTED_LANGUAGES.items()),
coerce=str)
printers = SelectField('Select a usb printer : ',
coerce=str,
choices=[('00', 'No printers were found')])
scale = SelectField('Select font scaling measurement for printed tickets :',
coerce=int)
header = StringField('Enter a text header : ')
sub = StringField('Enter a text sub-header : ')
submit = SubmitField('Set ticket')
def __init__(self, inspected_printers_from_view, lp_printing, *args, **kwargs):
super(TicketForm, self).__init__(*args, **kwargs)
# NOTE: here so it won't be localized.
self.scale.choices = [(i, f'x{i}') for i in PRINTED_TICKET_SCALES]
if inspected_printers_from_view:
self.printers.choices = []
for printer in inspected_printers_from_view:
if os.name == 'nt' or lp_printing:
self.printers.choices.append((f'{printer}', f'Printer Name: {printer}'))
else:
vendor, product = printer.get('vendor'), printer.get('product')
in_ep, out_ep = printer.get('in_ep'), printer.get('out_ep')
identifier = f'{vendor}_{product}'
if in_ep and out_ep:
identifier += f'_{in_ep}_{out_ep}'
self.printers.choices.append((identifier, f'Printer ID: {vendor}_{product}'))
class AliasForm(LocalizedForm):
_message = 'Alias must be at least of 2 and at most 10 letters'
office = StringField('Enter alias for office : ',
validators=[InputRequired(_message), Length(2, 10)])
task = StringField('Enter alias for task : ',
validators=[InputRequired(_message), Length(2, 10)])
ticket = StringField('Enter alias for ticket : ',
validators=[InputRequired(_message), Length(2, 10)])
name = StringField('Enter alias for name : ',
validators=[InputRequired(_message), Length(2, 10)])
number = StringField('Enter alias for number : ',
validators=[InputRequired(_message), Length(2, 10)])
class BackgroundTasksForms(LocalizedForm):
_every_message = 'Time range to repeat the task within :'
_time_message = 'Specific time to execute the task in :'
cache_tts_enabled = BooleanField('Enable caching text-to-speech announcements :')
cache_tts_every = SelectField(_every_message,
coerce=str,
choices=[(o, o) for o in EVERY_OPTIONS])
delete_tickets_enabled = BooleanField('Enable deleting tickets :')
delete_tickets_every = SelectField(_every_message,
coerce=str,
choices=[(o, o) for o in EVERY_OPTIONS])
delete_tickets_time = TimeField(_time_message,
validators=[Optional()])
|
The hydrating and nourishing formulation of the Intensive Cream offers an optimal help to protect hands and body skin from the signs of time and atmospheric agents, with anti-aging effect. The hydrating, nourishing, protective, anti stain properties are ensured by the concentrated mix of jaluronic acid, Karitè butter and Argan oil, elastin and marine collagen, allantoin and derivatives of vitamins A-E-C. Application during SPA treatments gives the skin a pleasant and lasting softness, elasticity and freshness sensation. The sophisticated combination of Pear and Jasmine makes the treatment Elegant and Calming.
|
import logging
log = logging.getLogger(__name__)
import itertools
import copy
import uuid
from collections import deque
import numpy as np
from enaml.core.api import Declarative
class QueueEmptyError(Exception):
pass
class QueueBufferEmptyError(Exception):
pass
def as_iterator(x):
if x is None:
x = 0
try:
x = iter(x)
except TypeError:
x = itertools.cycle([x])
return x
class AbstractSignalQueue:
def __init__(self):
'''
Parameters
----------
fs : float
Sampling rate of output that will be using this queue
initial_delay : float
Delay, in seconds, before starting playout of queue
filter_delay : float
Filter delay, in seconds, of the output. The starting timestamp of
each trial will be adjusted by the filter delay to reflect the true
time at which the trial reaches the output of the DAC.
'''
self._delay_samples = 0
self._data = {} # list of generators
self._ordering = [] # order of items added to queue
self._source = None
self._samples = 0
self._notifiers = []
def set_fs(self, fs):
# Sampling rate at which samples will be generated.
self._fs = fs
def set_t0(self, t0):
# Sample at which queue was started relative to experiment acquisition
# start.
self._t0 = t0
def _add_source(self, source, trials, delays, duration, metadata):
key = uuid.uuid4()
if duration is None:
duration = source.shape[-1]/self._fs
data = {
'source': source,
'trials': trials,
'delays': as_iterator(delays),
'duration': duration,
'metadata': metadata,
}
self._data[key] = data
return key
def get_max_duration(self):
def get_duration(source):
try:
return source.get_duration()
except AttributeError:
return source.shape[-1]/self._fs
return max(get_duration(d['source']) for d in self._data.values())
def connect(self, callback):
self._notifiers.append(callback)
def _notify(self, trial_info):
for notifier in self._notifiers:
notifier(trial_info)
def insert(self, source, trials, delays=None, duration=None, metadata=None):
k = self._add_source(source, trials, delays, duration, metadata)
self._ordering.insert(k)
return k
def append(self, source, trials, delays=None, duration=None, metadata=None):
k = self._add_source(source, trials, delays, duration, metadata)
self._ordering.append(k)
return k
def count_factories(self):
return len(self._ordering)
def count_trials(self):
return sum(v['trials'] for v in self._data.values())
def is_empty(self):
return self.count_trials() == 0
def next_key(self):
raise NotImplementedError
def pop_next(self, decrement=True):
key = self.next_key()
return key, self.pop_key(key, decrement=decrement)
def pop_key(self, key, decrement=True):
'''
Removes one trial of specified key from queue and returns waveform
'''
data = self._data[key]
if decrement:
self.decrement_key(key)
return data
def remove_key(self, key):
'''
Removes key from queue entirely, regardless of number of trials
'''
self._data.pop(key)
self._ordering.remove(key)
def decrement_key(self, key, n=1):
if key not in self._ordering:
raise KeyError('{} not in queue'.format(key))
self._data[key]['trials'] -= n
if self._data[key]['trials'] <= 0:
self.remove_key(key)
def _get_samples_waveform(self, samples):
if samples > len(self._source):
waveform = self._source
complete = True
else:
waveform = self._source[:samples]
self._source = self._source[samples:]
complete = False
return waveform, complete
def _get_samples_generator(self, samples):
samples = min(self._source.get_remaining_samples(), samples)
waveform = self._source.next(samples)
complete = self._source.is_complete()
return waveform, complete
def next_trial(self, decrement=True):
'''
Setup the next trial
This has immediate effect. If you call this (from external code), the
current trial will not finish.
'''
key, data = self.pop_next(decrement=decrement)
self._source = data['source']
try:
self._source.reset()
self._get_samples = self._get_samples_generator
except AttributeError:
self._source = data['source']
self._get_samples = self._get_samples_waveform
delay = next(data['delays'])
self._delay_samples = int(delay*self._fs)
if self._delay_samples < 0:
raise ValueError('Invalid option for delay samples')
queue_t0 = self._samples/self._fs
uploaded = {
't0': self._t0 + queue_t0, # Time re. acq. start
'queue_t0': queue_t0, # Time re. queue start
'duration': data['duration'], # Duration of token
'key': key, # Unique ID
'metadata': data['metadata'], # Metadata re. token
}
self._notify(uploaded)
def pop_buffer(self, samples, decrement=True):
'''
Return the requested number of samples
Removes stack of waveforms in order determind by `pop`, but only returns
requested number of samples. If a partial fragment of a waveform is
returned, the remaining part will be returned on subsequent calls to
this function.
'''
# TODO: This is a bit complicated and I'm not happy with the structure.
# It should be simplified quite a bit. Cleanup?
waveforms = []
queue_empty = False
# Load samples from current source
if samples > 0 and self._source is not None:
# That this is a dynamic function that is set when the next
# source is loaded (see below in this method).
waveform, complete = self._get_samples(samples)
samples -= len(waveform)
self._samples += len(waveform)
waveforms.append(waveform)
if complete:
self._source = None
# Insert intertrial interval delay
if samples > 0 and self._delay_samples > 0:
n_padding = min(self._delay_samples, samples)
waveform = np.zeros(n_padding)
samples -= n_padding
self._samples += len(waveform)
self._delay_samples -= n_padding
waveforms.append(waveform)
# Get next source
if (self._source is None) and (self._delay_samples == 0):
try:
self.next_trial(decrement)
except QueueEmptyError:
queue_empty = True
waveform = np.zeros(samples)
waveforms.append(waveform)
log.info('Queue is now empty')
if (samples > 0) and not queue_empty:
waveform, queue_empty = self.pop_buffer(samples, decrement)
waveforms.append(waveform)
samples -= len(waveform)
waveform = np.concatenate(waveforms, axis=-1)
return waveform, queue_empty
class FIFOSignalQueue(AbstractSignalQueue):
'''
Return waveforms based on the order they were added to the queue
'''
def next_key(self):
if len(self._ordering) == 0:
raise QueueEmptyError
return self._ordering[0]
class InterleavedFIFOSignalQueue(AbstractSignalQueue):
'''
Return waveforms based on the order they were added to the queue; however,
trials are interleaved.
'''
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._i = -1
self._complete = False
def next_key(self):
if self._complete:
raise QueueEmptyError
self._i = (self._i + 1) % len(self._ordering)
return self._ordering[self._i]
def decrement_key(self, key, n=1):
if key not in self._ordering:
raise KeyError('{} not in queue'.format(key))
self._data[key]['trials'] -= n
for key, data in self._data.items():
if data['trials'] > 0:
return
self._complete = True
def count_trials(self):
return sum(max(v['trials'], 0) for v in self._data.values())
class RandomSignalQueue(AbstractSignalQueue):
'''
Return waveforms in random order
'''
def next_key(self):
if len(self._ordering) == 0:
raise QueueEmptyError
i = np.random.randint(0, len(self._ordering))
return self._ordering[i]
class BlockedRandomSignalQueue(InterleavedFIFOSignalQueue):
def __init__(self, seed=0, *args, **kwargs):
super().__init__(*args, **kwargs)
self._i = []
self._rng = np.random.RandomState(seed)
def next_key(self):
if self._complete:
raise QueueEmptyError
if not self._i:
# The blocked order is empty. Create a new set of random indices.
i = np.arange(len(self._ordering))
self._rng.shuffle(i)
self._i = i.tolist()
i = self._i.pop()
return self._ordering[i]
class GroupedFIFOSignalQueue(FIFOSignalQueue):
def __init__(self, group_size, *args, **kwargs):
super().__init__(*args, **kwargs)
self._i = -1
self._group_size = group_size
def next_key(self):
if len(self._ordering) == 0:
raise QueueEmptyError
self._i = (self._i + 1) % self._group_size
return self._ordering[self._i]
def decrement_key(self, key, n=1):
if key not in self._ordering:
raise KeyError('{} not in queue'.format(key))
self._data[key]['trials'] -= n
# Check to see if the group is complete. Return from method if not
# complete.
for key in self._ordering[:self._group_size]:
if self._data[key]['trials'] > 0:
return
# If complete, remove the keys
for key in self._ordering[:self._group_size]:
self.remove_key(key)
queues = {
'first-in, first-out': FIFOSignalQueue,
'interleaved first-in, first-out': InterleavedFIFOSignalQueue,
'random': RandomSignalQueue,
}
|
Today we’re going to be taking a look at the Kinect Bundle, available for digital download only for the Xbox One platform. The bundle includes Squid Hero and Boom Ball from Virtual Air Guitar Company, a studio known for delivering motion controlled experiences. The usual price of the bundle is £11.99, saving you £3.99 when purchasing the titles individually, which are priced at £7.99 each.
Currently the bundle is on sale from December 29th, 2015 through till January 4th, 2016 and offers a discount of up to 50%, depending on your membership. Right now you can pickup both titles during the sale for a lesser price of just one of the games when not in sale. The discounted price is £7.19 for Silver members and £6.00 for GOLD members.
You hit a virtual ball using your on-screen hands, which will cause it to bounce and ricochet around a semi-transparent room, breaking cubes in its path. You then have to volley the ball on its return or it’ll disappear off screen and you’ll lose the ball. Once all balls are lost you’ll have to retry the level. Upon completing a level you can progress to the next.
Fans of old school block breaking games will be happy to know that other classic mechanics make an appearance, such as multi-ball and new ones like portals.
I loved block breaking games as a child and that love hasn’t diminished with time. Unfortunately something has happened on the way to adulthood because I was out played my 8 year old son!
It’s a great little game that will be forever remembered as the first game that my son out did me on, for a time. I fully intend to beat whatever scores he manages to achieve, like a father should.
You control Squids Hero’s tentacles and position on screen with your hands and are able to interact with ice by hovering a tentacle over it. Small movements allow you to manipulate the ice around the screen, whereas sharp movements will throw it in the direction of which your hand was traveling.
As you progress through the game new obstacles and challenges arise, such as mines and boss fights. The level checkpoints are forgiving and well placed which makes the game child friendly, offering frustration free retries.
Coins gathered from playing through the game can be used to purchase different hats to customise your squid, which adds an other dimension for children to explore.
There’s also a 2 player co-op mode so you can bring a friend in on the fun.
The graphics are colorful and beautifully rendered making Squid Hero’s visuals very appealing, especially to young children, but that’s not to say that it isn’t for adults too.
I had a lot of fun playing it on my own and then watched my 5 year old daughter try get to grips with the game. She grasped the general concept and did pretty well but completely forgot that she had a left hand. She tried to play it with just her right which lead to many mistakes, but she just simply laughed because she was having such a good time on it.
Review code authorized and provided by Virtual Air Guitar Company.
Boom Ball is a lighthearted, full-body Kinect game that mixes tennis, Breakout, and gratuitous explosions. Throw and bounce balls to smash all the cubes and blow up all the bombs in 55 different levels. Guest starring: Explosions! Portals! Multiballs! More explosions! Ducks! Join the fun and go ballistic with Boom Ball for Kinect!
Squid Hero saves the planet from a new ice age, in this cheery and colorful adventure for all ages, using the power of Kinect to smash thousands of encroaching chunks of ice.
I’d definitely recommend this bundle to any Xbox One owner with Kinect, especially parents with young children. Just prepare yourself for the possibility that your children may be able to out play you on these titles.
Dave is somewhat of a reclusive creature who predominantly resides in what has come to be known as 'The Man Cave'. Self-proclaimed nemesis to the Sun and its warm embrace, he relies heavily on background radiation and tea for sustenance to fuel his insatiable lust for gaming.
The time has come to dig up and dust off our Kinect sensor bar so that we rock out with air guitars to become legend, as an Air Guitar Warrior! Developed by Virtual Air Guitar Company who has always and continues to support the Kinect – I for one could not wait to see what their latest title had in store.
We talk to Founder and Lead Designer of Virtual Air Guitar Company, Aki Kanerva, about the studios history, their future and upcoming game – Beatsplosion.
|
"""
<This library provides a Python interface for the Telegram Bot API>
Copyright (C) <2015> <Jacopo De Luca>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from Object.AUFile import AUFile
from Object.PhotoSize import PhotoSize
class Video(AUFile):
"""
This object represents a video file.
"""
def __init__(self, file_id, width, height, duration, thumb=None, mime_type="", file_size=0):
"""
:param file_id: Unique identifier for this file
:type file_id: str
:param width: Video width as defined by sender
:type width: int
:param height: Video height as defined by sender
:type height: int
:param duration: Duration of the video in seconds as defined by sender
:type duration: int
:param thumb: Optional. Video thumbnail
:type thumb: PhotoSize
:param mime_type: Optional. Mime type of a file as defined by sender
:type mime_type: str
:param file_size: Optional. File size
:type file_size: int
"""
super().__init__(file_id, file_size, mime_type=mime_type)
self.width = width
self.height = height
self.duration = duration
self.thumb = thumb
@staticmethod
def build_from_json(jvideo):
"""
:param jvideo: A dictionary that contains JSON-parsed object
:type jvideo: dict
:rtype: Video
"""
thumb = None
mime_type = ""
file_size = 0
if 'thumb' in jvideo.keys():
thumb = PhotoSize.build_from_json(jvideo['thumb'])
if 'mime_type' in jvideo.keys():
mime_type = jvideo['mime_type']
if 'file_size' in jvideo.keys():
file_size = int(jvideo['file_size'])
return Video(jvideo['file_id'], int(jvideo['width']), int(jvideo['height']), int(jvideo['duration']), thumb,
mime_type, file_size)
|
We've been providing gutter cleaning, gutter screening and gutter repairs to the Peekskill NY area for over 15 years. With an emphasis on customer convenience, it's our goal to gain and keep your trust through reliable and excellent gutter maintenance service.
Gutters clogged? Or in need of repair? Well, you’ve come to the right place. Westchester Gutter Cleaning is the company that Peekskill, New York residents have come to trust. Why risk injury? It’s just not worth it. We will handle your regular gutter cleanings and maintenance efficiently and thoroughly, at an affordable price. We have been providing gutter maintenance to the Peekskill area for over 15 years. We are a specialized service. All we do is gutter cleaning, gutter screening and gutter maintenance.
Give us a call for your no obligation phone estimate. We’ll set you up on a regularly occurring gutter cleaning and maintenance schedule so that you can avoid the damaging effects of improperly maintained or neglected gutters. Based on 4 to 6 questions we’ll give you an estimate over the phone to within a $25 range to clean the gutters on your Peekskill, NY home or business. We answer and return customer calls promptly. Our service technicians begin your service with a call on the morning of your service. We then neatly and carefully clear all roof and gutter areas by hand. The downspouts are then cleared and flushed of any clogs. Any mess made is completely cleaned up. For your convenience we then leave a postage paid envelope in your front door. Our goal is to provide you with an efficient and easy service.
Westchester Gutter Cleaning guarantees all work and is fully licensed, insured and bonded to provide you with the very best. Thank you for visiting our website. We look forward to hearing from you.
|
import re
from livestreamer.plugin import Plugin
from livestreamer.plugin.api import http, validate
from livestreamer.stream import HTTPStream, RTMPStream
from livestreamer.plugin.api.support_plugin import common_jwplayer as jwplayer
BASE_VOD_URL = "https://www.connectcast.tv"
SWF_URL = "https://www.connectcast.tv/jwplayer/jwplayer.flash.swf"
_url_re = re.compile("http(s)?://(\w+\.)?connectcast.tv/")
_smil_schema = validate.Schema(
validate.union({
"base": validate.all(
validate.xml_find("head/meta"),
validate.get("base"),
validate.url(scheme="rtmp")
),
"videos": validate.all(
validate.xml_findall("body/video"),
[validate.get("src")]
)
})
)
class ConnectCast(Plugin):
@classmethod
def can_handle_url(self, url):
return _url_re.match(url)
def _get_smil_streams(self, url):
res = http.get(url, verify=False)
smil = http.xml(res, schema=_smil_schema)
for video in smil["videos"]:
stream = RTMPStream(self.session, {
"rtmp": smil["base"],
"playpath": video,
"swfVfy": SWF_URL,
"pageUrl": self.url,
"live": True
})
yield "live", stream
def _get_streams(self):
res = http.get(self.url)
playlist = jwplayer.parse_playlist(res)
if not playlist:
return
for item in playlist:
for source in item["sources"]:
filename = source["file"]
if filename.endswith(".smil"):
# TODO: Replace with "yield from" when dropping Python 2.
for stream in self._get_smil_streams(filename):
yield stream
elif filename.startswith("/"):
name = source.get("label", "vod")
url = BASE_VOD_URL + filename
yield name, HTTPStream(self.session, url)
break
__plugin__ = ConnectCast
|
Would you like to earn above average working part time?
Alpha ltd has been in the business 13years, now in 55countries worldwide.
We are looking for individuals to earn averagel Ksh 15000 Weekly.
|
__description__ = \
"""
"""
__author__ = "Michael J. Harms"
__date__ = "2014-06-18"
import multiprocessing, time, random, copy
from rpyBot.messages import RobotMessage
class DeviceManager:
"""
Class for aynchronous communication and integration between all of the
devices attached to the robot. It runs on the main thread and then spawns
a thread for each device attached to the robot.
"""
def __init__(self,device_list=[],poll_interval=0.1,verbosity=0):
"""
Initialize.
device_list: list of RobotDevice instances
poll_interval: how often to poll messaging queues (in seconds)
verbosity: whether or not to spew messages to standard out
"""
self.device_list = device_list
self.poll_interval = poll_interval
self.verbosity = verbosity
self.queue = []
self.loaded_devices = []
self.loaded_devices_dict = {}
self.device_processes = []
self.manager_id = int(random.random()*1e9)
self._run_loop = False
def start(self):
"""
Start the main loop running.
"""
self._run_loop = True
self._run()
def stop(self):
"""
Stop the main loop from running. Does not automatically unload devices
or stop them.
"""
self._run_loop = False
def shutdown(self):
"""
Shutdown all loaded devices (will propagate all the way down to cleanup
of GPIO pins).
"""
for d in self.loaded_devices:
self.unload_device(d.name)
def load_device(self,d):
"""
Load a device into the DeviceManager.
"""
try:
d.connect(self.manager_id)
if d.name in list(self.loaded_devices_dict.keys()):
message = "device {:s} already connected!".format(d.name)
self._queue_message(message,destination_device="warn")
else:
self.loaded_devices.append(d)
self.loaded_devices_dict[d.name] = len(self.loaded_devices) - 1
self.device_processes.append(multiprocessing.Process(target=self.loaded_devices[-1].start))
self.device_processes[-1].start()
except exceptions.BotConnectionError as err:
self._queue_message(err,destination_device="warn")
def unload_device(self,device_name):
"""
Unload a device from the control of the DeviceManager.
"""
try:
index = self.loaded_devices_dict[device_name]
# Stop the device, diconnect it from this device manager instance,
# and then kill its thread.
self.loaded_devices[index].stop(self.manager_id)
self.loaded_devices[index].disconnect()
self.device_processes[index].terminate()
# Remove it from the lists holding the devices.
for k in self.loaded_devices_dict.keys():
self.loaded_devices_dict[k] -= 1
self.loaded_devices.pop(index)
self.loaded_devices_dict.pop(device_name)
self.device_processes.pop(index)
except KeyError:
message = "device {} is not connected".format(device_name)
self._queue_message(message,destination_device="warn")
def _run(self):
for d in self.device_list:
self.load_device(d)
self._queue_message("starting system")
while self._run_loop:
# Go through the queue and pipe messages to appropriate devices
if len(self.queue) > 0:
# Get the next message
message = self._get_message()
# If the message is past its delay, send it to a device. If not,
# stick it back into the queue
if message.check_delay() == True:
self._message_to_device(message)
else:
self._queue_message(message)
# Rotate through the loaded devices and see if any of them have
# output ready. If so, put the output into the queue for the next
# pass.
for d in self.loaded_devices:
msgs = d.get()
for m in msgs:
self._queue_message(m)
# Wait poll_interval seconds before checking queues again
time.sleep(self.poll_interval)
def _message_to_device(self,message):
"""
Send a RobotMessage instance to appropriate devices
"""
# if the message is sent to the virtual "warn" device, forward this to
# the controller
if message.destination_device == "warn":
self.loaded_devices[self.loaded_devices_dict["controller"]].put(message)
return
try:
self.loaded_devices[self.loaded_devices_dict[message.destination_device]].put(message)
except KeyError:
err = "device \"{}\" not loaded.".format(message.destination_device)
self._queue_message(err,destination_device="warn")
def _queue_message(self,
message="",
destination="robot",
destination_device="",
delay_time=0.0,
msg_string=None):
"""
Append to a RobotMessage instance to to the message queue. If message
is already a RobotMessage, pass it through without modification. If it
is a string, construct the RobotMessage, setting source to "manager".
"""
if type(message) != RobotMessage:
m = RobotMessage(destination=destination,
destination_device=destination_device,
source="manager",
source_device="",
delay_time=delay_time,
message=message)
# If msg_string is set to something besides None, parse that string
# and load into the RobotMessage instance.
if msg_string != None:
m.from_string(msg_string)
message = m
if self.verbosity > 0:
message.pretty_print()
self.queue.append(message) #.put(message)
def _get_message(self):
"""
Return the first message in the queue.
"""
if len(self.queue) == 0:
return
message = self.queue.pop(0) #.get()
# If this is a raw message string, convert it to a RobotMessage
# instance
if type(message) == str:
try:
m = RobotMessage()
m.from_string(message)
message = m
except exceptions.BotMessageError as err:
message = "Mangled message ({})".format(err.args[0])
self._queue_message(message,destination_device="warn")
return None
if self.verbosity > 0:
message.pretty_print()
return message
|
Polygon Engineering Plastics Ltd (PEP) is a supplier of engineering plastics to injection moulding and plastic forming manufacturers. The majority of their annual sales value comes from their supply of raw materials to the many producers of the “Euro Pallet†which is a standard size and specification pallet used universally across Europe and more increasingly throughout the world for the transportation of goods by land, sea and air. The Euro Pallet is made by the injection moulding process and PEP supply the raw material in granular form to the pallet manufacturers.
PEP has just developed a new material which can be moulded in exactly the same manner as a conventiona ...Read More l engineering plastic but has some useful properties. Using what PEP believe is a unique process, the density of the new material is 60% of the traditional plastic but when moulded into the final product using the same mould, and manufacturing techniques, it has improved mechanical properties. Hence the mass of the final product is 60% of the traditional pallet but the new pallet has increased strength. The exciting opportunity for PEP is that the cost this ‘new’ material is only marginally more than the traditional engineering plastic used by the pallet manufacturers.
The Managing Director of PEP has to ensure that they maximise the opportunities offered by this new product as the company is experiencing difficult times. He has asked you for comment and advice regarding the selling price of the new material.
Through their customers, the pallet manufacturers, PEP Ltd supply over 90% of the raw material used in the euro pallet market. The pallet manufacturers are under pressure from their customers, the transport industry, to reduce the weight of the pallets, whilst maintaining the overall dimensions and load carrying capacity. This is driven by legislation with the overall aim of contributing to increasing fuel efficiency and reducing carbon emissions. Naturally, any increase in costs will be fiercely opposed by both the pallet manufacturers and the transport companies.
Polygon has a good relationship with the pallet manufacturers and it would be easy to convert them to the new material particularly as it would allow them to reduce the weight of their product from 10kg to 6kg.
So a significant opportunity for PEP.The accounts department has calculated the standard cost of producing the new material and has applied the Company’s usual method to set the selling price. The Director of Finance had persuaded the accounts department to ‘add a little extra’ to this figure as the additional margin would be useful. This made the selling price £1100 / tonne.
1. This is obviously an exciting new product for PEP Ltd. and managed correctly should help their competitive advantage. What is first thing you should do to ensure that the company secures and maximises the commercial success of the new material?
2. What type of pricing method was being used at PEP Ltd. and what leads you to your answer?
3. Suggest a better pricing method based on the data given in the case and justify your argument.
4. The accounts department has set a price of £1100 per tonne, what is your opinion regarding this price and what price would you offer this new material to PEP’s customers, the pallet manufacturers?
Discuss fully your considerations in deciding upon this particular price.
|
"""
This script creates a preprocessed dataset of image pairs
related by the defined transformation. The content of the
images is generated with a uniform distribution, this to
to show that the gating models do not depend on the
content but only on the relations.
"""
import itertools
import numpy
from pylearn2.datasets import preprocessing
from pylearn2.utils import serial
from pylearn2.datasets import dense_design_matrix
from pylearn2.utils.rng import make_np_rng
from pylearn2.datasets.vector_spaces_dataset import VectorSpacesDataset
from pylearn2.space import VectorSpace, CompositeSpace, Conv2DSpace
def generate(opc):
"""
Summary (Generates a dataset with the chosen transformation).
Parameters
----------
opc: string
Only two options, shifts or rotations.
"""
dim = 19 # outer square
# A bigger image is used to avoid empty pixels in the
# borders.
reg = 13 # inner square
total = 20000 # Number of training examples
im1 = numpy.zeros((total, reg, reg, 1), dtype='float32')
im2 = numpy.zeros((total, reg, reg, 1), dtype='float32')
Y = numpy.zeros((total, 1), dtype='uint8')
rng = make_np_rng(9001, [1, 2, 3], which_method="uniform")
transformation = opc
if transformation == 'shifts':
# Shifts
# only shifts between [-3, +3] pixels
shifts = list(itertools.product(range(-3, 4), range(-3, 4)))
t = 0
while t < total:
x = rng.uniform(0, 1, (dim, dim))
x = numpy.ceil(x * 255)
im_x = x[3:16, 3:16][:, :, None]
ind = rng.randint(0, len(shifts))
Y[t] = ind
txy = shifts[ind]
tx, ty = txy
im_y = x[(3 + tx):(16 + tx), (3 + ty):(16 + ty)][:, :, None]
im1[t, :] = im_x
im2[t, :] = im_y
t += 1
else:
assert transformation == 'rotations'
# Rotations
import Image
# import cv2
angs = numpy.linspace(0, 359, 90)
t = 0
while t < total:
x = rng.uniform(0, 1, (dim, dim))
x = numpy.ceil(x * 255)
im_x = x[3:16, 3:16][:, :, None]
ind = rng.randint(0, len(angs))
Y[t] = ind
ang = angs[ind]
y = numpy.asarray(Image.fromarray(x).rotate(ang))
# scale = 1
# M1 = cv2.getRotationMatrix2D((dim/2, dim/2), ang, scale)
# y = cv2.warpAffine(x, M1, (dim, dim))
im_y = y[3:16, 3:16][:, :, None]
im1[t, :] = im_x
im2[t, :] = im_y
t += 1
view_converter = dense_design_matrix.DefaultViewConverter((reg, reg, 1))
design_X = view_converter.topo_view_to_design_mat(im1)
design_Y = view_converter.topo_view_to_design_mat(im2)
# Normalize data:
pipeline = preprocessing.Pipeline()
gcn = preprocessing.GlobalContrastNormalization(
sqrt_bias=10., use_std=True)
pipeline.items.append(gcn)
XY = numpy.concatenate((design_X, design_Y), 0)
XY_ImP = dense_design_matrix.DenseDesignMatrix(X=XY)
XY_ImP.apply_preprocessor(preprocessor=pipeline, can_fit=True)
X1 = XY_ImP.X[0:design_X.shape[0], :]
X2 = XY_ImP.X[design_X.shape[0]:, :]
# As a Conv2DSpace
topo_X1 = view_converter.design_mat_to_topo_view(X1)
topo_X2 = view_converter.design_mat_to_topo_view(X2)
axes = ('b', 0, 1, 'c')
data_specs = (CompositeSpace(
[Conv2DSpace((reg, reg), num_channels=1, axes=axes),
Conv2DSpace((reg, reg), num_channels=1, axes=axes),
VectorSpace(1)]),
('featuresX', 'featuresY', 'targets'))
train = VectorSpacesDataset((topo_X1, topo_X2, Y), data_specs=data_specs)
# As a VectorSpace
# data_specs = (CompositeSpace(
# [VectorSpace(reg * reg),
# VectorSpace(reg * reg),
# VectorSpace(1)]),
# ('featuresX', 'featuresY', 'targets'))
# train = VectorSpacesDataset(data=(X1, X2, Y), data_specs=data_specs)
import os
save_path = os.path.dirname(os.path.realpath(__file__))
serial.save(os.path.join(save_path, 'train_preprocessed.pkl'), train)
if __name__ == '__main__':
# Define the desired transformation between views
generate('shifts') # shifts or rotations
|
Heavy cash holdings in Asia are limiting portfolio returns and condemning retirees to an uncertain future, according to a survey. Indonesians are likely to suffer most, while Hongkongers face major asset shortfalls.
Asian investors will never achieve their financial goals using their current investment portfolios, according to a new report.
|
#!/usr/bin/env python
#
# PyGab - Python Jabber Framework
# Copyright (c) 2008, Patrick Kennedy
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# - Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import datetime
import random
import re
import shlex
import time
from common import argparse, const, mounts, utils
from common.ini import iMan
class Init(mounts.PluginInitializers):
name = __file__
def initialize(self):
iMan.load([utils.get_module(), 'roster'])
def __exit__(self, *args):
iMan.unload('roster')
mounts.PluginInitializers.remove(self.__class__)
class Search(mounts.CommandMount):
name = 'search'
rank = const.RANK_USER
file = __file__
__doc__ = """Search for users containing a passed arg.
Dan - Searches for all names containing 'dan'
*Dan - Searches for all names ending with 'dan'
Dan* - Searches for all names beginning with 'dan'"""
def thread(self, user, sub, whisper):
#if not self.parent.was_whispered and not utils.isadmin(user):
#raise const.CommandHelp, 'Whisper Only Command'
sub = sub.lower().encode('utf-8', 'replace')
base = str
if len(sub) < 3:
raise const.CommandHelp, 'Minimum 3 Letters'
if sub.startswith('*'):
sub = sub[1:]
func = base.endswith
elif sub.endswith('*'):
sub = sub[:-1]
func = base.startswith
else:
func = base.count
names = [name for name in iMan.roster if func(name, sub)]
if names:
reply = 'Matched Names (%s) - %s' % (len(names), ', '.join(names))
else:
reply = "I can't find anyone with your search parameters."
if self.parent.was_whispered:
self.parent.sendto(user, reply)
else:
self.parent.sendtoall(reply)
|
The remains of the largest domestic Roman building in Britain is displayed inside a cover building, with many impressive mosaic floors, underfloor heating systems, corridors and courtyards. The story of the site is told in an audio-visual programme using computer generated imagery.
The museum gallery tells the history of the site and displays many artefacts found during excavations of the site. Outside, the formal garden has been re-planted to its original plan, based on excavated bedding trenches. This is supplemented by a plant display area and museum of Roman gardens.
The Collections Discovery Centre offers visitors the chance to look behind the scenes into the conservation laboratory, sensitive and bulk stores, where the reserve archaeological collections of the Roman Palace are stored. Guided tours and artefacts handling sessions are offered on a daily basis.
See here for more Historic Houses in Sussex.
From Portsmouth or Brighton A27, take A259 at western end of Chichester by-pass signed to Fishbourne, turn right in village & follow signs. From Midhurst- A286, take B2178 at circulatory system on North side of Chichester, then left into Fishbourne village at Salthill road following signs. By Rail; Alight at Fishbourne Station (Portsmouth to Brighton line), turn right down road, then left into Roman Way. (c 500m) Buses 700, 56 and 11 stop at the end of Salthill Road.
|
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Contains tables and objects for authorization in Aquilon """
from datetime import datetime
from sqlalchemy import (Table, Column, Integer, DateTime, Sequence, String,
select, ForeignKey, PassiveDefault, UniqueConstraint)
from sqlalchemy.orm import relation, deferred
from aquilon.aqdb.model import Base, Role, Realm
class UserPrincipal(Base):
""" Simple class for strings representing users kerberos credential """
__tablename__ = 'user_principal'
id = Column(Integer, Sequence('user_principal_id_seq'), primary_key=True)
name = Column(String(32), nullable=False)
realm_id = Column(Integer, ForeignKey(
'realm.id', name='usr_princ_rlm_fk'), nullable=False)
role_id = Column(Integer, ForeignKey(
'role.id', name='usr_princ_role_fk', ondelete='CASCADE'),
nullable=False)
creation_date = deferred(Column(DateTime,
nullable=False, default=datetime.now))
comments = deferred(Column('comments', String(255), nullable=True))
realm = relation(Realm, uselist=False)
role = relation(Role, uselist=False)
def __str__(self):
return '@'.join([self.name,self.realm.name])
user_principal = UserPrincipal.__table__
user_principal.primary_key.name='user_principal_pk'
user_principal.append_constraint(
UniqueConstraint('name','realm_id',name='user_principal_realm_uk'))
table = user_principal
def populate(sess, *args, **kw):
if len(sess.query(UserPrincipal).all()) < 1:
log = kw['log']
from sqlalchemy import insert
admin = sess.query(Role).filter_by(name='aqd_admin').one()
eng = sess.query(Role).filter_by(name='engineering').one()
ops = sess.query(Role).filter_by(name='operations').one()
telco = sess.query(Role).filter_by(name='telco_eng').one()
admins = ['cdb', 'aqdqa', 'njw', 'wesleyhe', 'daqscott', 'kgreen',
'benjones']
unixeng = ['cesarg', 'jasona', 'dankb', 'goliaa', 'samsh', 'hagberg',
'hookn', 'jelinker', 'kovacsk', 'lookerm', 'walkert', 'af',
'lillied']
operations = ['premdasr', 'bestc', 'chawlav', 'wbarnes', 'gleasob',
'lchun', 'peteryip', 'richmoj', 'hardyb', 'martinva']
telco_eng = ['dalys', 'medinad', 'peikonb', 'kulawiak']
r = sess.query(Realm).first()
assert(r.name == 'is1.morgan')
for nm in admins:
up=UserPrincipal(name = nm, realm = r,role = admin,
comments = 'AutoPopulated')
sess.add(up)
sess.commit()
assert(up)
for nm in unixeng:
up=UserPrincipal(name = nm, realm = r,role = eng,
comments = 'AutoPopulated')
sess.add(up)
sess.commit()
assert(up)
for nm in operations:
up=UserPrincipal(name = nm, realm = r, role = ops,
comments = 'AutoPopulated')
sess.add(up)
sess.commit()
assert(up)
for nm in telco_eng:
up = UserPrincipal(name = nm, realm = r, role = telco,
comments = 'AutoPopulated')
sess.add(up)
sess.commit()
assert(up)
cnt = len(sess.query(UserPrincipal).all())
assert(cnt > 0)
log.debug('created %s users'%(cnt))
|
The following affidavit was executed by Lt. J. C. Day on May 7, 1964.
When testifying before the President's Commission, I stated I did not remember who returned the two spent 6.5 hulls and envelope to my possession on the night of November 22, 1963. Since returning to Dallas Detective C. N. Dhority has called my attention to the fact he brought the three hulls in the envelope to me and asked me to check them again for fingerprints even though I had checked them when they were picked up on the sixth floor of the Texas School Book Depository about 1:20 p.m. November 22, 1963 by Detective R. M. Sims and myself and placed in a manila envelope. Since talking to Dhority I remember now that he was the one who returned the shells to me about 10:00 p.m. and stated that his office wanted to retain one. He left me two shells and the envelope that Detective Sims and I had previously marked. It was then that I scratched my name on the two shells that were released at 11:45 p.m. Agent Vince Drain along with the rifle and other evidence.
Signed this 7th day of May 1964.
The following affidavit was executed by Lt. J. C. Day on June 23, 1964.
The following affidavit is made to clear up confusion regarding the three spent 6.5 hulls, commission numbers 543, 544, and 545, found by the 6th floor window of the Texas School Book Depository on November 22, 1963. The hulls were picked up by Detective R. M. Sims and Lieutenant J. C. Day and placed in an envelope. Detective R. L. Studebaker was also present. The envelope was marked and dated by Sims and Day. Detective Sims took the hulls after they were checked for fingerprints by Day. The third hull, commission number 545, was later released directly to the FBI by the Dallas Police Department Homicide Division. At 10:00 P.M. November 22, 1963, Detective C. N. Dhority brought the three hulls in the marked envelope back to Lieutenant Day in the Identification Bureau office to recheck for prints. Dhority retained one hull, commission number 545 and left the other two, commission numbers 543, 544 along with the envelope with me to be sent to the FBI.
Vince Drain, FBI agent, took custody at 11:45 A.M. the same day. When I appeared before the commission April 22, 1964, I could not find my name on one of the hulls, identified as commission number 543, and thought this was the hull that had been retained by Dhority.
On June 8, 1964, the three hulls, commission numbers 543, 544, and 545, were back in Dallas and were examined by Captain G. M. Doughty and myself at the local FBI office.
I can identify commission numbers 543, 544, and 545 from my name on them, as the three hulls found on the sixth floor of the Texas School Book Depository on November 22, 1963.
As to the time I scratched my name on the hulls, I do not remember whether it was at the window when picked up or at 10:00 P.M. November 22, 1963, when they were returned to me by Dhority in the marked envelope. It had to be one or the other, because this is the only time I had all three hulls in my possession.
Both Detective R. L. Studebaker and Detective R. M. Sims, who were present at the window when the hulls were picked up, state I marked them as they were found under the window.
Signed this 23d day of June 1964.
|
'''
Created on May 13, 2014
@author: utku, yusuf
'''
import json
import pprint
from src import jsonFile
def addToBookmarks2(jsonfile, path, name):
j = jsonFile.jsonFile()
toAdd = {'path' : path, 'name' : name}
readFromFile = j.fileToJson(jsonfile)
#pprint(readFromFile)
for paths in readFromFile:
if paths['path'] == path:
print 'already in bookmarks'
return
toWriteList = []
toWriteList.extend(readFromFile)
toWriteList.append(dict(toAdd))
if j.jsonToFile(toWriteList, jsonfile):
print 'added to bookmarks'
def checkBookmarkList(path, filePath):
return path in getAllPaths(filePath)
'''def addToBookmarks(jsonfile, path, name):
if checkBookmarkList(path, jsonfile):
print 'already in bookmarks'
else:
with open(jsonfile, 'a') as datafile:
json.dump(createJSONObject(jsonfile, path, name), datafile)
print 'added to bookmarks'''
'''def createJSONObject(jsonfile, path, name):
data = {'id':idGenerator(jsonfile), 'path':str(path), 'name':name}
return data'''
def readBookmarks(jsonFile):
plainJSONString = open(jsonFile).read();
plainJSONString = plainJSONString.replace('}{', '},{')
jsonList = '[%s]'%(plainJSONString)
jsonObj = json.loads(jsonList)
return jsonObj
def getAllPaths(jsonFile):
availablePaths = []
jsonObjj = readBookmarks(jsonFile)
for index in range(len(jsonObjj)):
availablePaths.append(jsonObjj[index]['path'])
return availablePaths
def showAllBookmarks(jsonFile):
jsonObjj = readBookmarks(jsonFile)
allBookmarks = []
for index in range(len(jsonObjj)):
allBookmarks.append(jsonObjj[index])
return allBookmarks
'''def idGenerator(jsonFile):
jsonObjj = readBookmarks(jsonFile)
if jsonObjj != []:
return jsonObjj[len(jsonObjj)-1]['id'] + 1
else:
return 0'''
|
With cooler temperatures comes comfort food! This ultimate comfort food recipes, printables, & decor round is perfect for every day cooking or the holidays!
Are you still trying to get back on track after the holiday season?
Mexican salsa doesn't get easier than this: you only need 5 ingredients to make this fresh, chunky, and spicy tomato salsa recipe.
Tater tot nachos, a.k.a. totchos, make great party food with very little effort.
Spice up your seafood with this Alaska cod with Moroccan stewed tomatoes and chickpeas for an easy, delicious and healthy weeknight meal.
NYT Cooking: Arguably one of the coziest autumnal dishes you can make in under an hour, this cheesy pumpkin pasta doesn't rely on a roux.
... recipe for a sweet (and savoury) 2019: Free Food & Cooking programs and workshops at the Library: https://t.co/bstjXoUZIi… https://t.co/cL5Kvj32XM"
When you think of dill dijon mustard sauce, you likely don't think of adding almond milk. But this version uses it to lessen carbs while keeping it creamy.
These healthy and light chicken stuffed sweet potatoes will leave you feeling energized and satisfied.
Chicken Gyro Wrap Lunch box idea - They don't include the recipe, but we could use canned chicken, homemade sauce, cucumbers, onions.
Best Budget Friendly Recipes – my most loved Breakfast, Lunch, and Dinner recipes that won't break the bank!
I know I didn't share as much as I did in times past – but get ready, in 2019 you'll see a lot more recipes and recipe collections from me.
Low carb meal plan for 7 days was a labor of love as it became extremely obvious to me why I fail when I don't plan my meals for the week.
These Cajun chicken tacos are loaded with fresh lettuce, tomatoes, cheese, juicy seasoned chicken, and a drizzle of ranch dressing.
Don't get stuck in a dinner rut! Check out my 20 Best Vegetarian Dinner Recipes for so many delicious options that your whole family will love!
We Californians wouldn't dream about drafting a Thanksgiving menu without a gorgeous salad, and these 15 Thanksgiving Salads might just outshine the turkey!
20 Easy Slow Cooker Recipes That Aren't Soup! Hearty slow cooker recipes your whole family will love! Weeknight cooking made easy!
|
"""Parallel workflow execution via SLURM
"""
import os
import sys
from .base import (GraphPluginBase, logger)
from ...interfaces.base import CommandLine
def node_completed_status(checknode):
"""
A function to determine if a node has previously completed it's work
:param checknode: The node to check the run status
:return: boolean value True indicates that the node does not need to be run.
"""
""" TODO: place this in the base.py file and refactor """
node_state_does_not_require_overwrite = (checknode.overwrite is False or
(checknode.overwrite is None and not
checknode._interface.always_run)
)
hash_exists = False
try:
hash_exists, _, _, _ = checknode.hash_exists()
except Exception:
hash_exists = False
return (hash_exists and node_state_does_not_require_overwrite)
class SLURMGraphPlugin(GraphPluginBase):
"""Execute using SLURM
The plugin_args input to run can be used to control the SGE execution.
Currently supported options are:
- template : template to use for batch job submission
- qsub_args : arguments to be prepended to the job execution script in the
qsub call
"""
_template = "#!/bin/bash"
def __init__(self, **kwargs):
if 'plugin_args' in kwargs and kwargs['plugin_args']:
if 'retry_timeout' in kwargs['plugin_args']:
self._retry_timeout = kwargs['plugin_args']['retry_timeout']
if 'max_tries' in kwargs['plugin_args']:
self._max_tries = kwargs['plugin_args']['max_tries']
if 'template' in kwargs['plugin_args']:
self._template = kwargs['plugin_args']['template']
if os.path.isfile(self._template):
self._template = open(self._template).read()
if 'sbatch_args' in kwargs['plugin_args']:
self._sbatch_args = kwargs['plugin_args']['sbatch_args']
if 'dont_resubmit_completed_jobs' in kwargs['plugin_args']:
self._dont_resubmit_completed_jobs = kwargs['plugin_args']['dont_resubmit_completed_jobs']
else:
self._dont_resubmit_completed_jobs = False
super(SLURMGraphPlugin, self).__init__(**kwargs)
def _submit_graph(self, pyfiles, dependencies, nodes):
def make_job_name(jobnumber, nodeslist):
"""
- jobnumber: The index number of the job to create
- nodeslist: The name of the node being processed
- return: A string representing this job to be displayed by SLURM
"""
job_name = 'j{0}_{1}'.format(jobnumber, nodeslist[jobnumber]._id)
# Condition job_name to be a valid bash identifier (i.e. - is invalid)
job_name = job_name.replace('-', '_').replace('.', '_').replace(':', '_')
return job_name
batch_dir, _ = os.path.split(pyfiles[0])
submitjobsfile = os.path.join(batch_dir, 'submit_jobs.sh')
cache_doneness_per_node = dict()
if self._dont_resubmit_completed_jobs: # A future parameter for controlling this behavior could be added here
for idx, pyscript in enumerate(pyfiles):
node = nodes[idx]
node_status_done = node_completed_status(node)
# if the node itself claims done, then check to ensure all
# dependancies are also done
if node_status_done and idx in dependencies:
for child_idx in dependencies[idx]:
if child_idx in cache_doneness_per_node:
child_status_done = cache_doneness_per_node[child_idx]
else:
child_status_done = node_completed_status(nodes[child_idx])
node_status_done = node_status_done and child_status_done
cache_doneness_per_node[idx] = node_status_done
with open(submitjobsfile, 'wt') as fp:
fp.writelines('#!/usr/bin/env bash\n')
fp.writelines('# Condense format attempted\n')
for idx, pyscript in enumerate(pyfiles):
node = nodes[idx]
if cache_doneness_per_node.get(idx, False):
continue
else:
template, sbatch_args = self._get_args(
node, ["template", "sbatch_args"])
batch_dir, name = os.path.split(pyscript)
name = '.'.join(name.split('.')[:-1])
batchscript = '\n'.join((template,
'%s %s' % (sys.executable, pyscript)))
batchscriptfile = os.path.join(batch_dir,
'batchscript_%s.sh' % name)
batchscriptoutfile = batchscriptfile + '.o'
batchscripterrfile = batchscriptfile + '.e'
with open(batchscriptfile, 'wt') as batchfp:
batchfp.writelines(batchscript)
batchfp.close()
deps = ''
if idx in dependencies:
values = ''
for jobid in dependencies[idx]:
# Avoid dependancies of done jobs
if not self._dont_resubmit_completed_jobs or cache_doneness_per_node[jobid] == False:
values += "${{{0}}}:".format(make_job_name(jobid, nodes))
if values != '': # i.e. if some jobs were added to dependency list
values = values.rstrip(':')
deps = '--dependency=afterok:%s' % values
jobname = make_job_name(idx, nodes)
# Do not use default output locations if they are set in self._sbatch_args
stderrFile = ''
if self._sbatch_args.count('-e ') == 0:
stderrFile = '-e {errFile}'.format(
errFile=batchscripterrfile)
stdoutFile = ''
if self._sbatch_args.count('-o ') == 0:
stdoutFile = '-o {outFile}'.format(
outFile=batchscriptoutfile)
full_line = '{jobNm}=$(sbatch {outFileOption} {errFileOption} {extraSBatchArgs} {dependantIndex} -J {jobNm} {batchscript} | awk \'/^Submitted/ {{print $4}}\')\n'.format(
jobNm=jobname,
outFileOption=stdoutFile,
errFileOption=stderrFile,
extraSBatchArgs=sbatch_args,
dependantIndex=deps,
batchscript=batchscriptfile)
fp.writelines(full_line)
cmd = CommandLine('bash', environ=dict(os.environ),
terminal_output='allatonce')
cmd.inputs.args = '%s' % submitjobsfile
cmd.run()
logger.info('submitted all jobs to queue')
|
What’s next for Timothy Zahn and Thrawn? As far as villainous characters go, Thrawn is certainly one of the most popular in the Star Wars galaxy. Surely we haven’t heard the last of him, right? Based on what we learned from Thrawn: Alliances and the end of Star Wars Rebels, here’s what we may have to look forward to… Punch it!
The video below has the main topical content of the episode, and the audio version above has an even wilder theory for your consideration!
Next Post: Episode 1,508: Is “Resistance” the “Deep Space Nine” of Star Wars TV?
|
# -*- coding: utf-8 -*-
"""
Python Interface for
Sartorius Serial Interface for
EA, EB, GD, GE, TE scales.
2010-2011 Robert Gieseke - robert.gieseke@gmail.com
See LICENSE.
"""
import serial
class Sartorius(serial.Serial):
def __init__(self, com_port):
"""
Initialise Sartorius device.
Example:
scale = Sartorius('COM1')
"""
serial.Serial.__init__(self, com_port)
self.baudrate = 9600
self.bytesize = 7
self.parity = serial.PARITY_ODD
self.timeout = 0.5
def value(self):
"""
Return displayed scale value.
"""
try:
if self.inWaiting() == 0:
self.write('\033P\n')
answer = self.readline()
if len(answer) == 16: # menu code 7.1.1
answer = float(answer[0:11].replace(' ', ''))
else: # menu code 7.1.2
answer = float(answer[6:17].replace(' ',''))
return answer
except:
return "NA"
def display_unit(self):
"""
Return unit.
"""
self.write('\033P\n')
answer = self.readline()
try:
answer = answer[11].strip()
except:
answer = ""
return answer
def tara_zero(self):
"""
Tara and zeroing combined.
"""
self.write('\033T\n')
def tara(self):
"""
Tara.
"""
self.write('\033U\n')
def zero(self):
"""
Zero.
"""
self.write('\033V\n')
|
Description: Opskrift Pirogger from the above 4608x3072 resolutions which is part of the Opskrifter directory. Download this image for free in HD resolution the choice "download button" below. If you do not find the exact resolution you are looking for, then go for a native or higher resolution. or if you are interested in similar pictures of Opskrift Pirogger, you are free to browse through search feature or related post section at below of this post. You can bookmark our site to get more update related to Opskrift Pirogger or any other topic.
Denne Opskrift Pirogger leveres kun til personlig brug som billede på computere, smartphones eller andre displayenheder. Hvis du fandt billeder, der er ophavsretligt beskyttet til dig, bedes du kontakte os, og vi vil fjerne det. Vi har ikke til hensigt at vise nogen copyrightbeskyttede billeder.
|
from ..main import db
import math
import logging
class Board(db.Model):
#user = models.ForeignKey(User)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)
user = db.relationship('User', backref=db.backref('boards', lazy=True))
name = models.CharField(max_length=255)
anchored = models.BooleanField(default=False)
history_loc = models.IntegerField(default=-1)
shared_with = models.ManyToManyField(User, related_name='shared_with_me', through='Board_Share')
logger = logging.getLogger('solver')
class Meta:
app_label = "solver"
def context(self):
return {'name' : self.name,
'id' : self.id,
'anchored' : self.anchored,
'rows' : [row.context() for row in self.row_set.extra(order_by = ['row_index'])],
'history' : self.history_context(),
'history_loc' : self.history_loc
}
def history_context(self):
context = {}
for e in sorted(self.historyentry_set.all()):
context[e.order] = e.context()
return context
def update(self, data):
self.logger.debug('updating board')
history = data['history']
most_recent = data['most_recent']
self.history_loc = data['history_loc']
#self.logger.debug(history)
self.logger.debug('loc = %d, most recent = %d' % (self.history_loc, most_recent))
self.name = data['name']
self.anchored = data['anchored']
row_data = data['rows']
[e.delete() for e in self.historyentry_set.all()]
from solver.models.cell import Cell
for entry_index in sorted(history):
self.logger.debug('entry = %d' % (int(entry_index)))
if(int(entry_index) > most_recent):
break
entry = history[entry_index]
cell = Cell.objects.get(row__board=self.id, \
row__row_index=entry['row_index'], cell_index=entry['col_index'])
self.historyentry_set.create(cell=cell, choice=entry['choice'], \
action=entry['action'], loc=entry_index, order=entry_index)
for row in self.row_set.extra(order_by = ['row_index']):
#self.logger.debug(row.row_index)
if(not row.update(row_data.__getitem__(row.row_index))):
return False
self.save()
return True
@receiver(post_save, sender=Board)
def build_board(sender, instance, **kwargs):
if not kwargs['created']:
return
from solver.models.row import Row
from solver.models.cell import Cell
for num in range(0,9):
instance.row_set.create(row_index=num)
for row in instance.row_set.all():
for index in range(0,9):
Cell.objects.create(row=row, cell_index=index)
|
Air Duct Cleaning Guys will be available for your requirements regarding Air Duct Cleaning in Brown City, MI. You expect the most advanced modern technology in the industry, and our workforce of qualified professionals can provide just that. Our products are of the highest quality and we have learned to help save costs. Contact us by dialing 800-376-4281 and we will be able to explore the options, reply to your concerns, and organize an appointment to start setting up your job.
Here at Air Duct Cleaning Guys, we know that you will need to stay within budget and reduce costs everywhere you're able to. Still you need to have superior services with Air Duct Cleaning in Brown City, MI, so you can trust our staff to save you money while still providing the finest quality services. Our initiatives to conserve your funds will never give up the excellent quality of our results. We use the very best solutions and products to make sure that your venture can withstand the test of time, and we save you money with strategies that do not modify the superior quality of any project. We'll achieve this by offering you the very best prices in the industry and eliminating costly blunders. Get in touch with Air Duct Cleaning Guys when you're needing the best quality solutions at a minimal rate. You'll be able to get in touch with our team by dialing 800-376-4281 to begin.
It is important to be knowledgeable when it comes to Air Duct Cleaning in Brown City, MI. We will ensure that you understand what to expect. You will not deal with any unexpected situations whenever you deal with Air Duct Cleaning Guys. Start by talking about your job with our customer care associates once you dial 800-376-4281. In this call, you'll get your concerns responded to, and we'll schedule a time to begin the work. Our team can arrive at the appointed time with all the appropriate equipment, and will work closely with you through the entire undertaking.
If you find yourself setting up a task for Air Duct Cleaning in Brown City, MI, there are many reasons to prefer Air Duct Cleaning Guys. Our products are of the highest quality, our cash saving practices are sensible and efficient, and our client satisfaction scores are unrivaled. We fully understand your needs and intentions, and we're there to serve you using our experience. Dial 800-376-4281 when you require Air Duct Cleaning in Brown City, and we're going to work together with you to systematically finish your job.
|
# coding=utf-8
import hexchat
import os
import sys
if sys.version_info[0] == 2:
import urllib2 as urllib_error
import urllib as urllib_request
else:
import urllib.error as urllib_error
import urllib.request as urllib_request
__module_name__ = "Twitch Emote Autoformat"
__module_author__ = "Poorchop"
__module_version__ = "0.8"
__module_description__ = "Automatically format TwitchTV emote names with proper capitalization"
# TODO: cross platform support
# TODO: emote unicode character support
# TODO: only load subscriber emotes for subscribed/specified channels
# change this value to False if you do not wish to use subscriber emotes
allow_sub_emotes = True
events = ("Channel Message", "Channel Msg Hilight",
"Channel Action", "Channel Action Hilight",
"Your Message")
edited = False
# emote names taken from: http://twitchemotes.com/
# list last updated August 18, 2014
emote_dict = {'4head': '4Head',
'arsonnosexy': 'ArsonNoSexy',
'asianglow': 'AsianGlow',
'atgl': 'AtGL',
'ativy': 'AtIvy',
'atww': 'AtWW',
'bcwarrior': 'BCWarrior',
'bort': 'BORT',
'batchest': 'BatChest',
'biblethump': 'BibleThump',
'bigbrother': 'BigBrother',
'bionicbunion': 'BionicBunion',
'blargnaunt': 'BlargNaut',
'bloodtrail': 'BloodTrail',
'brainslug': 'BrainSlug',
'brokeback': 'BrokeBack',
'cougarhunt': 'CougarHunt',
'daesuppy': 'DAESuppy',
'dbstyle': 'DBstyle',
'dansgame': 'DansGame',
'datsheffy': 'DatSheffy',
'dogface': 'DogFace',
'eagleeye': 'EagleEye',
'elegiggle': 'EleGiggle',
'evilfetus': 'EvilFetus',
'fpsmarksman': 'FPSMarksman',
'fungineer': 'FUNgineer',
'failfish': 'FailFish',
'frankerz': 'FrankerZ',
'freakinstinkin': 'FreakinStinkin',
'fuzzyotteroo': 'FuzzyOtterOO',
'gasjoker': 'GasJoker',
'gingerpower': 'GingerPower',
'grammarking': 'GrammarKing',
'hassaanchop': 'HassaanChop',
'hassanchop': 'HassanChop',
'hotpokket': 'HotPokket',
'itsboshytime': 'ItsBoshyTime',
'jkanstyle': 'JKanStyle',
'jebaited': 'Jebaited',
'joncarnage': 'JonCarnage',
'kapow': 'KAPOW',
'kzassault': 'KZassault',
'kzcover': 'KZcover',
'kzguerilla': 'KZguerilla',
'kzhelghast': 'KZhelghast',
'kzowl': 'KZowl',
'kzskull': 'KZskull',
'kappa': 'Kappa',
'keepo': 'Keepo',
'kevinturtle': 'KevinTurtle',
'kippa': 'Kippa',
'kreygasm': 'Kreygasm',
'mvgame': 'MVGame',
'mechasupes': 'MechaSupes',
'mrdestructoid': 'MrDestructoid',
'nightbat': 'NightBat',
'ninjatroll': 'NinjaTroll',
'nonospot': 'NoNoSpot',
'omgscoots': 'OMGScoots',
'onehand': 'OneHand',
'opieop': 'OpieOP',
'optimizeprime': 'OptimizePrime',
'pjharley': 'PJHarley',
'pjsalt': 'PJSalt',
'pmstwin': 'PMSTwin',
'panicvis': 'PanicVis',
'pazpazowitz': 'PazPazowitz',
'peopleschamp': 'PeoplesChamp',
'picomause': 'PicoMause',
'pipehype': 'PipeHype',
'pogchamp': 'PogChamp',
'poooound': 'Poooound',
'punchtrees': 'PunchTrees',
'ralpherz': 'RalpherZ',
'redcoat': 'RedCoat',
'residentsleeper': 'ResidentSleeper',
'ritzmitz': 'RitzMitz',
'rulefive': 'RuleFive',
'smorc': 'SMOrc',
'smskull': 'SMSkull',
'ssssss': 'SSSsss',
'shazbotstix': 'ShazBotstix',
'shazam': "Shazam",
'sobayed': 'SoBayed',
'sonnerlater': 'SoonerLater',
'srihead': 'SriHead',
'stonelightning': 'StoneLightning',
'strawbeary': 'StrawBeary',
'supervinlin': 'SuperVinlin',
'swiftrage': 'SwiftRage',
'tf2john': 'TF2John',
'tehfunrun': 'TehFunrun',
'theringer': 'TheRinger',
'thetarfu': 'TheTarFu',
'thething': 'TheThing',
'thunbeast': 'ThunBeast',
'tinyface': 'TinyFace',
'toospicy': 'TooSpicy',
'trihard': 'TriHard',
'uleetbackup': 'UleetBackup',
'unsane': 'UnSane',
'unclenox': 'UncleNox',
'volcania': 'Volcania',
'wtruck': 'WTRuck',
'wholewheat': 'WholeWheat',
'winwaker': 'WinWaker',
'youwhy': 'YouWHY',
'aneleanele': 'aneleanele',
'noscope420': 'noScope420',
'shazamicon': 'shazamicon'}
def parse_sub_emotes(file_path):
f = open(file_path, "r")
for line in f:
stripped_emote = line.replace("\n", "")
lowercase_emote = stripped_emote.lower()
emote_dict[lowercase_emote] = stripped_emote
f.close()
def download_emotes(file_path):
url = "https://raw.githubusercontent.com/Poorchop/hexchat-scripts/master/twitch-sub-emotes.txt"
try:
urllib_request.urlretrieve(url, file_path)
hexchat.prnt("Successfully downloaded subscriber emote list")
parse_sub_emotes(file_path)
except urllib_error.HTTPError as e:
hexchat.prnt("Could not retrieve subscriber emote list ({}), try downloading manually at {} and then reload "
"this script".format(e, url))
if allow_sub_emotes:
file_path = os.path.join(hexchat.get_info("configdir"),
"addons", "twitch-sub-emotes.txt")
if os.path.exists(file_path):
parse_sub_emotes(file_path)
else:
download_emotes(file_path)
def is_twitch():
server = hexchat.get_info("host")
if server and "twitch.tv" in server:
return True
else:
return False
def keypress_cb(word, word_eol, userdata):
key = word[0]
mod = word[1]
# a ctrl backspace
if (key, mod) == ("97", "4") or key == "65288":
return
if is_twitch():
msg = hexchat.get_info("inputbox")
if msg:
split_words = msg.split(" ")
for w in split_words:
if w.lower() in emote_dict:
split_words[split_words.index(w)] = emote_dict[w.lower()]
new_msg = " ".join(split_words)
hexchat.command("SETTEXT {}".format(new_msg))
hexchat.command("SETCURSOR {}".format(len(new_msg)))
def emote_cb(word, word_eol, event):
word = [(word[i] if len(word) > i else "") for i in range(4)]
global edited
if edited:
return
if is_twitch():
word[1] = word[1] \
.replace(":)", "😊") \
.replace(":(", "☹") \
.replace(":z", "😴") \
.replace("B)", "😎") \
.replace(";)", "😉") \
.replace(";p", "😜") \
.replace(":p", "😛") \
.replace(":D", "😄") \
.replace(">(", "😠") \
.replace("<3", "♥") \
.replace("BionicBunion", "😺") \
.replace("FrankerZ", "🐶") \
.replace("ItsBoshyTime", "⚠") \
.replace("Kappa", "😏") \
.replace("KZskull", "💀")
edited = True
hexchat.emit_print(event, *word)
edited = False
return hexchat.EAT_ALL
hexchat.hook_print("Key Press", keypress_cb)
for event in events:
hexchat.hook_print(event, emote_cb, event, priority=hexchat.PRI_HIGH)
hexchat.prnt(__module_name__ + " version " + __module_version__ + " loaded")
|
What is the Philadelphia Presbytery all about?
In planning for the life and ministry of the new Philadelphia Presbytery it is our conviction that we must take into consideration several balancing concerns affecting the health, growth, and functioning of a presbytery. These concerns reflect three main components of the role of Presbytery in the life of the Church: congregation, court, and mission.
§ Presbytery will function as a body effectively providing worship, edification, encouragement, oversight, and accountability to its members and churches.
§ Presbytery will function as a court, efficiently and proactively exercising spiritual justice with mercy and recognizing and confirming God’s calling to Gospel ministry.
§ Presbytery will function as a mission, mobilizing all members to promote kingdom expansion through mercy ministry, evangelism, and church planting.
As our Presbytery is effective as a congregation and efficient as a court, it is able to focus more fully on the fulfillment of the Great Commission and the Great Commandment.
Our vision is to see Philadelphia saturated with Gospel-centered churches so that many people come to Christ and together our communities are transformed.
5. The heart of our king also compels us to engage our communities in deeds of mercy and love, of justice and truth.
God's heart moves Him relentlessly to implement His holy agenda through His redeemed people, the Church. Isaiah tells us that when justice is driven back, when righteousness stands at a distance, when truth stumbles in the streets, when honesty cannot enter, when truth is nowhere to be found, when whoever shuns evil becomes a prey...when the Lord sees this and there is no one among His people to intervene, He is displeased and appalled (see Isa. 59:14-16).
Every leader learns this the hard way: you are just one limited person. You are just a man, just a woman. You can only touch a few lives and do a few things. When Moses tried to be "The Man," the only leader of a redeemed but unruly Israel coming out of centuries of slavery, he burned out. His father-in-law, Jethro, watched him work, and gave him wise advice: "What you are doing is not good...The work is too heavy for you; you cannot handle it alone...But select capable men from all the people...and appoint them as officials over thousands, hundreds, fifties, and tens...That will make your load lighter, because they will share it with you. If you do this...all these people will go home satisfied" (Ex. 18:17–23). Leaders multiplied at every level: the only way to lead the people and get them home to the promised land. So it is into the New Testament. Paul urges Timothy, his young apprentice who was a leader in the next generation: "And the things you have heard me say in the presence of many witnesses entrust to reliable men who will also be qualified to teach others" (2 Tim. 2:2). To fulfill our calling—to make disciples of the nations until Jesus returns—leaders must be replicated at every level of ministry. Because ministry takes place at every level of life, we need leaders there, too: trained to serve, unleashed to lead, and able to develop new leaders.
|
#!/usr/bin/python
# Copyright 2015 Andreas Mosburger
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import json
import time
import argparse
#streams triples from a file to a streaming engine
parser = argparse.ArgumentParser(description='Stream triples read from capture_file to stdout')
parser.add_argument('capture_file', type=argparse.FileType('r'))
parser.add_argument('-d', '--delay', type=float, default=0)
args = parser.parse_args()
for line in args.capture_file:
time.sleep(args.delay)
triple = line.rstrip()
triple = triple.split(" ")
if len(triple) == 3:
#simple triple, separated by blanks
print(json.dumps(triple))
sys.stdout.flush()
else:
if len(triple) == 4:
#simple triple, separated by blanks, timestamp in the front
print(json.dumps(triple[1:4]))
sys.stdout.flush()
else:
print('match error')
|
I did the second run for the Juicy print yesterday. I'm really excited about picking up screenprinting again- I've got some ideas brewin'.
I decided to entitle this print Juicy Jangles, which is Juicy's nickname when she wears her bell necklace. Yes, she has different names for most occasions.
B"animal", "art", "cute", "juicy", "screenprint", "simple"
|
# James M. Coughlan
# Simple Belief Propagation stereo implementation using Shed Skin.
# Use speed-up technique from:
# J. Coughlan and H. Shen. "An Embarrassingly Simple Speed-Up of Belief Propagation with Robust Potentials." arXiv. 2010. http://arxiv.org/abs/1010.0012
import numpy as np
import pylab as pl
from scipy import interpolate
# above line must be imported *after* changing directory!
interp1d=interpolate.interp1d
from math import floor
import scipy
from scipy.misc import imread, imsave
from stereo import do_sweepsSS2
##############
Lnam, Rnam='L.bmp','R.bmp'
nd=40 #number of disparities
dmin,dmax=0,19 #min, max disparities
Tu,Tb=15,3 #discontinuity thresholds (unary, binary potentials)
unBeta,binBeta=1/5., 0.75
num_sweeps = 5
##############
#from integer index to real-valued disparity:
disps=[dmin + k/(nd-1.)*(dmax-dmin) for k in range(nd)]
pl.close('all')
pl.ion()
imL,imR=imread(Lnam)+0.,imread(Rnam)+0.
h,w=np.shape(imL)
print 'h,w:',h,w
rlo,rhi,clo,chi=0,h,0,w
h2,w2=h,w
print 'h2,w2:',h2,w2
#make unary potential:
print 'calculating unPots'
unPots=np.zeros((h2,w2,nd),float) #unPots[i,j,d]
errors=np.zeros((h2,w2,nd),float)
x_sparse=np.arange(w)
for i in range(rlo,rhi):
print 'row:',i,
y_sparse=imL[i,:]
for j in range(clo,chi):
func=interp1d(x_sparse,y_sparse)
x_dense=np.clip(np.array([j+d for d in disps]),0.,w-1) #clip so that nothing is out of bounds
y_dense=func(x_dense)
errors[i-rlo,j-clo,:]=np.array([min(abs(y-imR[i,j]),Tu) for y in y_dense])
unPots=np.exp(-unBeta*errors)
print
#make binary potential (homogeneous, and assume symmetric!):
print 'calculating binPots'
binPots=np.ones((nd,nd),float) #binPots[d0,d1]
f0=np.exp(-binBeta*Tb)
for d0 in range(nd):
for d1 in range(nd):
binPots[d0,d1]=np.exp(-binBeta*min(abs(d0-d1),Tb))
#make messages (Left, Right, Up, Down) and initialize to all ones:
#convention: all message indices [i,j] label ***source*** (not destination) of message
msgs={'L':np.ones((h2,w2,nd),float), 'R':np.ones((h2,w2,nd),float),
'U':np.ones((h2,w2,nd),float), 'D':np.ones((h2,w2,nd),float)}
def getbeliefs(unPots,msgs):
h,w,jnk=np.shape(unPots)
unBels=unPots+0.
for i0 in range(h):
for j0 in range(w):
incoming_nodes=[(i0-1,j0,'D'), (i0+1,j0,'U'), (i0,j0-1,'R'), (i0,j0+1,'L')]
for (i,j,direc) in incoming_nodes:
if i>=0 and i<h and j>=0 and j<w:
unBels[i0,j0,:] *= msgs[direc][i,j,:]
unBels[i0,j0,:] /= np.sum(unBels[i0,j0,:]) #normalize beliefs
return unBels #unBels[i,j,d]
def getwinners(unBels):
#at each pixel, what is the winning disparity?
h,w,nd=np.shape(unBels)
winners=np.ones((h,w),int)
for i in range(h):
for j in range(w):
winners[i,j]=np.argmax(unBels[i,j,:])
return winners
#(row,col) pixel ranges for each update direction, for use with range() function:
ranges={'L':[(0,h2,1),(w2-1,0,-1)],'R':[(0,h2,1),(0,w2-1,1)],'U':[(h2-1,0,-1),(0,w2,1)],'D':[(0,h2-1,1),(0,w2,1)]}
#note that range should go from right column to left column for 'L' update, etc.
#note: must be compatible with the SS version, which will work on messages padded on each side to eliminate special border cases
def do_sweeps(unPots, binPots, msgs, nsweeps):
h,w,nd=np.shape(msgs['L'])
h2,w2=h+2,w+2
msgs2={}
for dir in ['L','R','U','D']:
msgs2[dir]=np.ones((h2,w2,nd),float)
msgs2[dir][1:(h2-1),1:(w2-1),:]=msgs[dir]+0.
msgs2['L'],msgs2['R'],msgs2['U'],msgs2['D']=do_sweepsSS2(unPots.tolist(), binPots.tolist(), msgs2['L'].tolist(),msgs2['R'].tolist(),msgs2['U'].tolist(),msgs2['D'].tolist(), nsweeps, h2,w2,nd, Tb, f0)
for dir in ['L','R','U','D']:
msgs2[dir]=np.array(msgs2[dir]) #convert from lists:
msgs2[dir]=msgs2[dir][1:(h2-1),1:(w2-1)][:]+0
return msgs2
#do BP sweeps:
msgs=do_sweeps(unPots, binPots, msgs, num_sweeps)
unBels=getbeliefs(unPots,msgs)
winners=getwinners(unBels)
pl.figure();pl.imshow(winners,interpolation='nearest');pl.title('winners');pl.colorbar()
pl.show()
raw_input('<press enter>')
|
Installation of the Putco Optic 360 Higher Electrical power LED Fog Lamp Bulbs on a 2005 GMC Sierra - Wholesaler From China Co., Ltd.
Currently on the 2005 GMC Sierra Crew Taxi, we're going to set up aspect variety P250010W. This is the Putco Optic 360 Higher Electrical power LED Fog Lamp bulbs. For comparison, we'll go in advance and convert on the preexisting fog lights that are on the truck. Enable's go in advance and test out our new ones. Now with the new light-weight bulbs you can see how it supplies substantially a lot more illumination in a wider place than the stock bulbs did. Now the stop of the bulb truly has an optic lens on the stop to support disperse the light-weight.
Take note: Videos are furnished as a manual only. Refer to producer installation guidance and specs for finish information and facts.
|
"""
Magic functions for rendering vega/vega-lite specifications
"""
__all__ = ['vega', 'vegalite']
import json
import warnings
import IPython
from IPython.core import magic_arguments
import pandas as pd
import six
from toolz import pipe
from altair.vegalite import v1 as vegalite_v1
from altair.vegalite import v2 as vegalite_v2
from altair.vega import v2 as vega_v2
from altair.vega import v3 as vega_v3
try:
import yaml
YAML_AVAILABLE = True
except ImportError:
YAML_AVAILABLE = False
RENDERERS = {
'vega': {
'2': vega_v2.Vega,
'3': vega_v3.Vega,
},
'vega-lite': {
'1': vegalite_v1.VegaLite,
'2': vegalite_v2.VegaLite,
}
}
TRANSFORMERS = {
'vega': {
# Vega doesn't yet have specific data transformers; use vegalite
'2': vegalite_v1.data_transformers,
'3': vegalite_v2.data_transformers,
},
'vega-lite': {
'1': vegalite_v1.data_transformers,
'2': vegalite_v2.data_transformers,
}
}
def _prepare_data(data, data_transformers):
"""Convert input data to data for use within schema"""
if data is None or isinstance(data, dict):
return data
elif isinstance(data, pd.DataFrame):
return pipe(data, data_transformers.get())
elif isinstance(data, six.string_types):
return {'url': data}
else:
warnings.warn("data of type {0} not recognized".format(type(data)))
return data
def _get_variable(name):
"""Get a variable from the notebook namespace."""
ip = IPython.get_ipython()
if ip is None:
raise ValueError("Magic command must be run within an IPython "
"environemnt, in which get_ipython() is defined.")
if name not in ip.user_ns:
raise NameError("argument '{0}' does not match the "
"name of any defined variable".format(name))
return ip.user_ns[name]
@magic_arguments.magic_arguments()
@magic_arguments.argument(
'data',
nargs='*',
help='local variable name of a pandas DataFrame to be used as the dataset')
@magic_arguments.argument('-v', '--version', dest='version', default='3')
@magic_arguments.argument('-j', '--json', dest='json', action='store_true')
def vega(line, cell):
"""Cell magic for displaying Vega visualizations in CoLab.
%%vega [name1:variable1 name2:variable2 ...] [--json] [--version='3']
Visualize the contents of the cell using Vega, optionally specifying
one or more pandas DataFrame objects to be used as the datasets.
If --json is passed, then input is parsed as json rather than yaml.
"""
args = magic_arguments.parse_argstring(vega, line)
version = args.version
assert version in RENDERERS['vega']
Vega = RENDERERS['vega'][version]
data_transformers = TRANSFORMERS['vega'][version]
def namevar(s):
s = s.split(':')
if len(s) == 1:
return s[0], s[0]
elif len(s) == 2:
return s[0], s[1]
else:
raise ValueError("invalid identifier: '{0}'".format(s))
try:
data = list(map(namevar, args.data))
except ValueError:
raise ValueError("Could not parse arguments: '{0}'".format(line))
if args.json:
spec = json.loads(cell)
elif not YAML_AVAILABLE:
try:
spec = json.loads(cell)
except json.JSONDecodeError:
raise ValueError("%%vega: spec is not valid JSON. "
"Install pyyaml to parse spec as yaml")
else:
spec = yaml.load(cell)
if data:
spec['data'] = []
for name, val in data:
val = _get_variable(val)
prepped = _prepare_data(val, data_transformers)
prepped['name'] = name
spec['data'].append(prepped)
return Vega(spec)
@magic_arguments.magic_arguments()
@magic_arguments.argument(
'data',
nargs='?',
help='local variablename of a pandas DataFrame to be used as the dataset')
@magic_arguments.argument('-v', '--version', dest='version', default='2')
@magic_arguments.argument('-j', '--json', dest='json', action='store_true')
def vegalite(line, cell):
"""Cell magic for displaying vega-lite visualizations in CoLab.
%%vegalite [dataframe] [--json] [--version=2]
Visualize the contents of the cell using Vega-Lite, optionally
specifying a pandas DataFrame object to be used as the dataset.
if --json is passed, then input is parsed as json rather than yaml.
"""
args = magic_arguments.parse_argstring(vegalite, line)
version = args.version
assert version in RENDERERS['vega-lite']
VegaLite = RENDERERS['vega-lite'][version]
data_transformers = TRANSFORMERS['vega-lite'][version]
if args.json:
spec = json.loads(cell)
elif not YAML_AVAILABLE:
try:
spec = json.loads(cell)
except json.JSONDecodeError:
raise ValueError("%%vegalite: spec is not valid JSON. "
"Install pyyaml to parse spec as yaml")
else:
spec = yaml.load(cell)
if args.data is not None:
data = _get_variable(args.data)
spec['data'] = _prepare_data(data, data_transformers)
return VegaLite(spec)
|
b. 4 MAY 1669 Hampton, Rockingham, NH.
b. ABT. AUG 1870 Monson, Mass.
d. 18 AUG 1935 West Springfield, Mass.
Marriage: 8 MAY 1893 Belchertown, Mass.
b. 30 AUG 1869 Wilbraham, Mass.
d. 17 MAR 1937 West Springfield, Mass.
b. 2 MAR 1768 Sutton, Worcester, Mass.
d. 4 JUL 1839 Dudley, Mass.
d. 6 JUN 1838 Dudley, Mass.
b. 16 DEC 1810 Dudley, Mass.
Marriage: 31 OCT 1823 Dudley, Mass.
b. ABT. 1797 Thompson, CT.
b. JUN 1669 Salem, Mass.
d. NOV 1748 Sutton, Worcester, Mass.
b. 15 OCT 1671 Salem, Mass.
d. BEF. SEP 1717 Salem, Mass.
b. 25 JAN 1696/97 Salem, Mass.
b. 1 JUN 1699 Salem, Mass.
b. 11 FEB 1699/00 Salem, Mass.
b. 16 DEC 1702 Salem, Mass.
b. 15 OCT 1705 Salem, Mass.
b. 17 JUN 1739 Sutton, Worcester, Mass.
Marriage: 18 JUN 1772 Sutton, Worcester, Mass.
b. 9 MAY 1748 Sutton, Worcester, Mass.
d. 2 JAN 1822 Leicester, Mass.
b. 19 FEB 1774 Leicester, Mass.
b. 27 FEB 1779 Leicester, Mass.
b. 8 JUN 1783 Leicester, Mass.
Marriage: 9 OCT 1800 East Haddam, CT.
b. 14 MAR 1777 East Haddam, CT.
d. 30 MAY 1848 East Haddam, CT.
b. 23 JAN 1782 Richmond, Berkshire, Mass.
d. 15 AUG 1832 East Haddam, CT.
b. 11 DEC 1778 East Haddam, CT.
d. 2 SEP 1824 East Haddam, CT.
b. 1810 East Haddam, CT.
b. ABT. JUL 1812 East Haddam, CT.
b. ABT. JUL 1815 East Haddam, CT.
b. ABT. JUL 1818 East Haddam, CT.
d. 14 DEC 1821 East Haddam, CT.
b. 13 FEB 1789 East Haddam, CT.
Marriage: 1 OCT 1807 East Haddam, CT.
b. 11 FEB 1785 East Haddam, CT.
b. 5 DEC 1809 East Haddam, CT.
b. 23 JUN 1811 East Haddam, CT.
b. 21 APR 1814 East Haddam, CT.
b. 19 MAY 1787 East Haddam, CT.
b. 17 OCT 1791 East Haddam, CT.
Marriage: 12 MAR 1814 East Haddam, CT.
b. 22 DEC 1793 East Haddam, CT.
b. 17 DEC 1738 Lebanon, CT.
d. 27 JUN 1808 Lebanon, CT.
b. 6 JUL 1780 Lebanon, CT.
b. 10 SEP 1732 Lebanon, CT.
b. 1735 Martha's Vineyard, Mass.
d. 12 NOV 1789 Sharon, CT.
b. 8 APR 1730 Lebanon, CT.
b. 17 APR 1758 Lebanon, CT.
b. 26 NOV 1759 Lebanon, CT.
b. 26 JUL 1734 Lunenburg, Mass.
Marriage: 11 MAY 1758 Lebanon, CT.
b. 16 OCT 1734 Lebanon, CT.
b. 22 AUG 1668 Barnstable, Mass.
d. AFT. 1746 Edgartown, Mass.
b. NOV 1715 Topsfield, Mass.
Marriage: 3 SEP 1733 Topsfield, Mass.
b. 5 APR 1711 Topsfield, Mass.
b. 12 APR 1687 Bradford, Mass.
b. 22 APR 1762 Lunenburg, Mass.
b. 19 SEP 1759 Topsfield, Mass.
Marriage: 26 NOV 1772 Rindge, Cheshire, NH.
b. 22 DEC 1749 Lunenburg, Mass.
d. 31 JAN 1835 Rindge, Cheshire, NH.
b. 24 NOV 1774 Rindge, Cheshire, NH.
b. 20 OCT 1771 Orford, NH.
d. 19 FEB 1884 Orford, NH.
Marriage: 10 APR 1798 Framington, Mass.
b. 30 MAY 1773 Rindge, Cheshire, NH.
d. 12 DEC 1857 Orford, NH.
b. 19 DEC 1802 Orford, NH.
b. 26 JUL 1804 Orford, NH.
b. 20 FEB 1806 Orford, NH.
b. 6 SEP 1807 Orford, NH.
b. 22 APR 1809 Orford, NH.
b. 13 MAY 1813 Orford, NH.
b. 15 NOV 1814 Orford, NH.
b. 14 OCT 1677 Boston, Mass.
d. 5 MAY 1765 York, ME.
d. OCT 1676 Malden, Mass.
d. 28 OCT 1661 Malden, Mass.
d. 3 OCT 1658 Malden, Mass.
b. 27 DEC 1652 Malden, Mass.
d. MAR 1673/74 Malden, Mass.
Marriage: 7 FEB 1673/74 Malden, Mass.
b. ABT. 1649 Malden, Mass.
d. 24 APR 1674 Malden, Mass.
d. 17 NOV 1717 Malden, Mass.
b. ABT. 1656 Malden, Mass.
d. 19 FEB 1672/73 Malden, Mass.
d. 14 JUL 1667 Reading, Mass.
d. 28 JAN 1675/76 Reading, Mass.
b. 20 JUL 1660 Reading, Mass.
d. 4 NOV 1660 Reading, Mass.
b. 14 JUL 1638 Salem, Mass.
Marriage: 11 JUN 1670 Salem, Mass.
b. 1 OCT 1651 Reading, Mass.
Marriage: 22 JAN 1677/78 Reading, Mass.
b. 16 JAN 1653/54 Reading, Mass.
d. 21 JUL 1695 Reading, Mass.
d. 7 DEC 1849 Shoreham, VT.
b. 7 JAN 1868 Scotland, CT.
b. 28 NOV 1748 Windham, CT.
d. 27 DEC 1834 Scotland, CT.
Marriage: 13 DEC 1769 Windham, CT.
b. 26 OCT 1746 Scotland, CT.
d. 6 MAR 1814 Scotland, CT.
b. 7 OCT 1726 Norwich, CT.
d. 18 JAN 1789 Windham, CT.
b. 19 JUL 1725 Windham, CT.
d. 10 SEP 1810 Windham, CT.
b. 22 APR 1750 Windham, CT.
b. 11 OCT 1754 Windham, CT.
b. 20 NOV 1756 Windham, CT.
d. 3 OCT 1834 Windham, CT.
b. 5 OCT 1758 Windham, CT.
b. 5 AUG 1760 Windham, CT.
b. 10 SEP 1762 Windham, CT.
d. 30 JUL 1864 Syracuse, NY.
b. 15 AUG 1766 Windham, CT.
b. 20 JUN 1770 Windham, CT.
b. 21 APR 1768 Windham, CT.
b. 13 NOV 1747 Windham, CT.
Marriage: 14 NOV 1793 Canterbury, CT.
b. 9 MAY 1769 Canterbury, CT.
b. 21 FEB 1796 Canterbury, CT.
b. 16 JUN 1734 Canterbury, CT.
Marriage: 25 APR 1753 Canterbury, CT.
d. 12 MAY 1795 Canterbury, CT.
b. 25 JAN 1754 Canterbury, CT.
b. 4 SEP 1755 Canterbury, CT.
b. 16 JAN 1757 Canterbury, CT.
b. 21 NOV 1758 Canterbury, CT.
b. 21 SEP 1760 Canterbury, CT.
b. 27 AUG 1762 Canterbury, CT.
b. 3 OCT 1764 Canterbury, CT.
b. 7 NOV 1766 Canterbury, CT.
b. MAR 1771 Canterbury, CT.
b. 17 SEP 1773 Canterbury, CT.
b. 22 AUG 1775 Canterbury, CT.
Marriage: 19 APR 1798 West Springfield, Mass.
b. 24 OCT 1768 Springfield, Mass.
d. 24 DEC 1842 West Springfield, Mass.
b. 25 AUG 1809 West Springfield, Mass.
b. 18 DEC 1802 Darien, Fairfield, CT.
Marriage: 12 OCT 1824 West Springfield, Mass.
b. AUG 1807 West Springfield, Mass.
d. 19 JAN 1715/16 Stamford, CT.
b. 9 APR 1775 Stamford, CT.
b. 9 SEP 1766 Stamford, CT.
Marriage: 4 JAN 1786 Stamford, CT.
b. 18 APR 1763 Stamford, CT.
d. 27 NOV 1828 Greenwich, Fairfield, CT.
Marriage: 18 OCT 1789 Stamford, CT.
b. 15 SEP 1766 Stamford, CT.
b. 11 FEB 1744/45 Stamford, CT.
Marriage: 22 SEP 1763 Stamford, CT.
b. 21 DEC 1740 Stamford, CT.
b. 11 JUL 1768 Stamford, CT.
b. 28 DEC 1774 Stamford, CT.
b. 11 DEC 1766 Stamford, CT.
d. 18 JUN 1831 Stamford, CT.
b. 17 APR 1772 Stamford, CT.
Marriage: 9 FEB 1792 Stamford, CT.
b. 5 JAN 1772 Stamford, CT.
b. 14 APR 1779 Stamford, CT.
Marriage: 24 DEC 1799 Stamford, CT.
b. 27 MAR 1777 Stamford, CT.
Marriage: 27 APR 1709 Stamford, CT.
b. 2 DEC 1681 Stamford, CT.
d. APR 1746 Stamford, CT.
b. 19 APR 1710 Stamford, CT.
b. 23 JUL 1711 Stamford, CT.
b. 2 DEC 1716 Stamford, CT.
b. ABT. 1718 Stamford, CT.
Marriage: 5 JAN 1748/49 Greenwich, CT.
b. ABT. 1724 Stamford, CT.
b. 15 JUL 1750 Stamford, CT.
d. 19 MAY 1812 New Canaan, CT.
b. 24 APR 1723 Stratfield, CT.
Marriage: 7 NOV 1745 Greenwich, CT.
b. 2 SEP 1746 Stamford, CT.
b. 10 MAY 1754 Stamford, CT.
b. 25 SEP 1758 Stamford, CT.
b. 11 MAY 1784 Stamford, CT.
d. 9 DEC 1864 Stamford, CT.
b. 26 APR 1780 Stamford, CT.
b. 26 MAR 1808 Long Ridge, Fairfield, CT.
b. 10 JAN 1810 Long Ridge, Fairfield, CT.
b. 28 SEP 1811 Long Ridge, Fairfield, CT.
d. 23 SEP 1812 Long Ridge, Fairfield, CT.
b. 7 JUL 1813 Long Ridge, Fairfield, CT.
b. 21 JAN 1768 Greenwich, Fairfield, CT.
d. 7 FEB 1816 Stamford, CT.
Marriage: 25 FEB 1796 Stamford, CT.
b. 25 OCT 1777 Stamford, CT.
d. 16 JUL 1814 Stamford, CT.
b. 20 JUN 1811 Stamford, CT.
b. ABT. 1814 Stamford, CT.
Marriage: 17 SEP 1746 New Canaan, CT.
b. 2 JUL 1731 Stamford, CT.
b. 5 MAY 1748 Stamford, CT.
b. 8 SEP 1750 Stamford, CT.
b. 8 JAN 1726/27 Stamford, CT.
d. ABT. 1758 Stamford, CT.
Marriage: 14 APR 1751 Stamford, CT.
b. 14 JUL 1733 Stamford, CT.
b. 9 JUL 1754 Stamford, CT.
b. 10 JAN 1756 Stamford, CT.
b. 8 DEC 1698 Stamford, CT.
b. 30 NOV 1690 Stamford, CT.
d. 10 JUN 1778 Stamford, CT.
b. 2 NOV 1739 Stamford, CT.
b. 23 JUL 1754 New Canaan, CT.
b. 6 SEP 1765 Stamford, CT.
b. 27 OCT 1717 Stamford, CT.
Marriage: 22 OCT 1741 Stamford, CT.
b. 1 JUN 1721 Stamford, CT.
b. 15 SEP 1744 Stamford, CT.
b. 9 AUG 1746 Stamford, CT.
b. 23 NOV 1748 Stamford, CT.
b. 2 FEB 1749/50 Stamford, CT.
b. 6 NOV 1751 Stamford, CT.
b. 6 NOV 1753 Stamford, CT.
Marriage: 3 OCT 1712 Stamford, CT.
b. 13 NOV 1719 Stamford, CT.
b. 27 OCT 1732 Stamford, CT.
b. 18 NOV 1675 Stamford, CT.
Marriage: 11 AUG 1698 Stamford, CT.
d. 20 NOV 1706 Stamford, CT.
b. 3 JUN 1699 Stamford, CT.
b. 9 NOV 1701 Stamford, CT.
b. 12 MAR 1702/03 Stamford, CT.
Marriage: 19 FEB 1764 Stamford, CT.
b. 2 DEC 1745 Stamford, CT.
b. 12 JUN 1747 Stratford, Fairfield, CT.
d. 28 DEC 1808 New Canaan, CT.
Marriage: 7 DEC 1769 Stamford, CT.
b. 26 FEB 1772 Stamford, CT.
b. 23 OCT 1773 Stamford, CT.
b. 30 JUN 1775 Stamford, CT.
b. 29 JUL 1779 Stamford, CT.
b. 22 FEB 1782 Stamford, CT.
b. 29 FEB 1784 Stamford, CT.
b. 11 MAY 1790 Stamford, CT.
b. 26 MAY 1792 Stamford, CT.
Marriage: 8 JUL 1792 Canaan, CT.
b. 13 DEC 1795 Canaan, CT.
b. 30 MAR 1798 Canaan, CT.
b. 23 APR 1804 Canaan, CT.
d. 29 NOV 1852 Cornwall, CT.
b. 13 APR 1791 Canaan, CT.
d. 6 NOV 1862 Cornwall, CT.
b. 2 MAR 1659/60 Taunton, Mass.
d. 10 JUN 1743 Taunton, Mass.
b. 3 MAR 1696/97 Mansfield, CT.
b. 10 NOV 1678 Preston, CT.
Marriage: 6 JAN 1695/96 Preston, CT.
d. ABT. 1724 Pomfret, CT.
b. 6 JAN 1699/00 Preston, CT.
b. 9 AUG 1709 Norwich, CT.
b. 4 JUL 1709 Pomfret, CT.
d. 17 APR 1777 Brooklyn, CT.
Marriage: 12 FEB 1728/29 Pomfret, CT.
b. 20 JAN 1702/03 Preston, CT.
d. 21 JUL 1757 Brooklyn, CT.
b. 12 MAR 1710/11 Lebanon, CT.
b. 5 APR 1711 Mansfield, CT.
b. 25 APR 1714 Mansfield, CT.
b. 12 JAN 1814 Sherburne, Chenango, NY.
d. 16 SEP 1898 Stamford, CT.
b. 6 AUG 1806 Long Ridge, Fairfield, CT.
b. 22 JUL 1843 Long Ridge, Fairfield, CT.
b. 24 JUN 1845 Long Ridge, Fairfield, CT.
b. 5 SEP 1847 Long Ridge, Fairfield, CT.
b. 5 JUN 1850 Long Ridge, Fairfield, CT.
b. 3 AUG 1852 Long Ridge, Fairfield, CT.
b. 26 MAR 1856 Long Ridge, Fairfield, CT.
d. 29 MAY 1825 Lisle, Broome, NY.
Marriage: 9 MAY 1808 Sherburne, NY.
b. 16 MAR 1759 Rocky Hill, CT.
Marriage: 25 SEP 1787 Wethersfield, CT.
b. 25 APR 1764 Wethersfield, CT.
d. 1 JUN 1839 Sherburne, Chenango, NY.
b. 9 JUN 1768 Middletown, CT.
b. 7 MAR 1761 Rocky Hill, CT.
d. 2 JAN 1839 Jamestown, Chautauqua, NY.
b. 18 APR 1815 Long Ridge, Fairfield, CT.
b. ABT. 1810 Stamford, CT.
Marriage: 14 FEB 1847 Stamford, CT.
b. 12 OCT 1820 Long Ridge, Fairfield, CT.
b. 25 SEP 1812 New Canaan, CT.
d. 13 MAY 1859 Stamford, CT.
Marriage: 8 APR 1844 Stamford, CT.
b. 1 APR 1822 Long Ridge, Fairfield, CT.
d. 20 SEP 1855 South Norwalk, CT.
d. 6 DEC 1825 New Canaan, CT.
d. 5 SEP 1833 New Canaan, CT.
Marriage: 22 APR 1821 New Canaan, CT.
b. 8 APR 1730 Stamford, CT.
d. 6 MAY 1810 New Canaan, CT.
Marriage: 19 JUL 1760 New Canaan, CT.
b. 23 JUL 1735 New Canaan, CT.
d. 1818 New Canaan, CT.
b. 12 NOV 1761 Stamford, CT.
b. 9 NOV 1763 Stamford, CT.
b. 26 MAY 1766 Stamford, CT.
b. 1 DEC 1768 Stamford, CT.
b. 26 DEC 1770 Stamford, CT.
b. 5 APR 1772 Stamford, CT.
b. 22 NOV 1773 Stamford, CT.
b. 26 MAY 1775 Stamford, CT.
b. 21 FEB 1777 Stamford, CT.
b. 17 OCT 1778 Stamford, CT.
|
# -*- coding: utf-8 -*-
# @COPYRIGHT_begin
#
# Copyright [2010-2014] Institute of Nuclear Physics PAN, Krakow, Poland
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @COPYRIGHT_end
"""@package src.wi.forms.cm
@author Krzysztof Danielowski
@author Piotr Wójcik
@date 03.12.2010
"""
from django import forms
from django.utils.translation import ugettext_lazy as _
from wi.utils.forms import PasswordForm, attrs_dict
from wi.utils.regexp import regexp, regexp_text
class EditCMForm(forms.Form):
"""
Class for <b>CM edition</b> form.
"""
name = forms.RegexField(regex=regexp['dev_name'],
max_length=40,
label=_("Name"),
widget=forms.TextInput(attrs=attrs_dict),
error_messages={'invalid': regexp_text['dev_name']})
address = forms.CharField(widget=forms.TextInput(attrs=dict(attrs_dict, maxlength=45)),
label=_("Address"))
port = forms.IntegerField(label=_("Port"))
class CreateCMForm(EditCMForm, PasswordForm):
"""
Class for <b>CM creation</b> form.
"""
|
Senate Republican Whip John Cornyn, of Texas, is having a little "buyer's remorse" over his decision to vote in favor of confirming Hillary as Secretary of State back in 2009.
Cornyn was one of the Republicans that originally held up Hillary's confirmation over concerns that the Clinton Foundation created an insurmountable "conflict of interest." That said, Republicans, including Cornyn, ended up breaking party lines and confirmed Clinton after the Foundation entered into a "memorandum of understanding" that called for, among other things, transparency around donations, particularly those from foreign governments.
“When I put a hold on Mrs. Clinton’s nomination as Secretary of State, she reassured me that they would take appropriate steps. As seems to be usual for the Clintons, they crossed the line and all the concerns that she reassured me would not occur did in fact occur."
The MOU, included below in its entirety for your reading pleasure, stated that, among other things, the Clinton Foundation would publish the names of all its existing contributors as well as the names of all new contributors. The MOU was executed in December 2008 between the Clinton Foundation and President Obama's office.
The Parties seek to ensure that the activities of the Foundation, however beneficial, do no create conflicts or the appearance of conflicts for the Senator Clinton as Secretary of State.
In anticipation of Senator Clinton's nomination and confirmation as Secretary of State, the Foundation will publish its contributors this year. During any service by Senator Clinton as Secretary of State, the Foundation will publish annually the names of new contributors.
Should an existing contributing country elect to increase materially is commitment, or should a new contributor country elect to support CHAJ, the Foundation will share such countries and the circumstances of the anticipated contribution with the State Department designated agency ethics official for review, and as appropriate, the State Department's designated agency ethics official will submit the matter for review by a designated official in the White House Counsel's office. In the event the State Department or White House has concerns about a proposed contribution that are related to Senator Clinton's service as Secretary of State, those concerns will be conveyed to her and to the Clinton Foundation for appropriate action. For purposes of this paragraph, an agency or department of a foreign country, as well as a government-owned corporation, will be treated as a foreign country.
But it wasn't long before the Clinton Foundation was found to be in breach of the MOU.
Per The Hill, the Clinton Health Access Initiative, a fund within the foundation, did not meet its reporting requirements from 2009 to 2013. Moreover, Clinton Foundation officials acknowledged to The Washington Post last year that they made a mistake by not seeking prior approval from the State Department ethics office for a $500,000 donation from the Algerian government.
In addition, State Department records obtained by the conservative group Judicial Watch and made public last month showed that Doug Band, a senior executive at the Clinton Foundation, helped set up a meeting between Crown Prince Salman of Bahrain with Clinton in 2009 after the prince’s efforts to reach out through normal channels failed. Band described Salman as a “good friend of ours.” By 2010, a scholarship fund set up by Salman gave $32 million to the Clinton Global Initiative, according to Judicial Watch.
Finally, just yesterday we called out the "curious" timing of a meeting between Dow Chemical CEO, Andrew Liveris, and Hillary back in July 2009 (see "Did Foundation Donor Dow Chemical Seek Hillary "Favor" To Settle $9 Billion Lawsuit With Kuwait?"). Per the email below from Huma, apparently Bill was really eager for Hillary to meet up with the CEO of the large Clinton Foundation donor. That said, we're sure it had nothing to do with open litigation initiated by Dow Chemical against Kuwait (another large Clinton Foundation donor) for backing out of the $17 billion K-Dow joint venture that would have netted Dow Chemical $9 billion in cash.
Wjc wants to be sure hrc sees Andrew Liveris, ceo of dow tomorrow night. Apparently he is head of us china business council. Is he definitely going to be there?
But we urge Senator Cornyn to relax. While there seems to be a lot of smoke here, Hillary has assured us that these are all simple, honest mistakes and there "is no fire."
|
import shutil
import tempfile
import time
import logging
import sys
import spv
import pyethereum
import pyethereum.db as db
import pyethereum.opcodes as opcodes
from pyethereum.slogging import get_logger, LogRecorder, configure_logging
serpent = None
u = pyethereum.utils
t = pyethereum.transactions
b = pyethereum.blocks
pb = pyethereum.processblock
vm = pyethereum.vm
accounts = []
keys = []
for i in range(10):
keys.append(u.sha3(str(i)))
accounts.append(u.privtoaddr(keys[-1]))
k0, k1, k2, k3, k4, k5, k6, k7, k8, k9 = keys[:10]
a0, a1, a2, a3, a4, a5, a6, a7, a8, a9 = accounts[:10]
seed = 3 ** 160
# Pseudo-RNG (deterministic for now for testing purposes)
def rand():
global seed
seed = pow(seed, 2, 2 ** 512)
return seed % 2 ** 256
class state():
def __init__(self, num_accounts=len(keys)):
global serpent
if not serpent:
serpent = __import__('serpent')
self.temp_data_dir = tempfile.mkdtemp()
self.db = db.DB(u.db_path(self.temp_data_dir))
o = {}
for i in range(num_accounts):
o[accounts[i]] = 10 ** 24
self.block = b.genesis(self.db, o)
self.blocks = [self.block]
self.block.timestamp = 1410973349
self.block.coinbase = a0
self.block.gas_limit = 10 ** 9
def __del__(self):
shutil.rmtree(self.temp_data_dir)
def contract(self, code, sender=k0, endowment=0):
evm = serpent.compile(code)
print('>>> contract() evm type = {}'.format(type(evm)))
print('>>> contract() evm = {}'.format(evm))
o = self.evm(evm, sender, endowment)
assert len(self.block.get_code(o)), "Contract code empty"
return o
def contract_from_evm(self, evm, sender=k0, endowment=0):
print('>>> contract_from_evm() evm type = {}'.format(type(evm)))
o = self.evm(evm, sender, endowment)
assert len(self.block.get_code(o)), "Contract code empty"
return o
def abi_contract(me, code, sender=k0, endowment=0):
class _abi_contract():
def __init__(self, _state, code, sender=k0, endowment=0):
evm = serpent.compile(code)
self.address = me.evm(evm, sender, endowment)
assert len(me.block.get_code(self.address)), \
"Contract code empty"
sig = serpent.mk_signature(code)
sig = sig[sig.find('[')+1:sig.rfind(']')].split(',')
for i, s in enumerate(sig):
fun = s[:s.find(':')].strip()
funsig = s[s.find(':')+1:].strip()
def kall_factory(fun, funsig):
def kall(*abi, **kwargs):
if len(funsig) != len(abi):
raise Exception("Wrong number of arguments!")
for typ, val in zip(funsig, abi):
typ2 = 'i' if isinstance(val, (int, long)) else \
's' if isinstance(val, (str, unicode)) else \
'a' if isinstance(val, list) else 'err'
if typ != typ2:
raise Exception('Type mismatch!')
return _state.send(kwargs.get('sender', k0),
self.address,
kwargs.get('value', 0),
funid=i, abi=abi)
return kall
vars(self)[fun] = kall_factory(fun, funsig)
return _abi_contract(me, code, sender, endowment)
def evm(self, evm, sender=k0, endowment=0):
sendnonce = self.block.get_nonce(u.privtoaddr(sender))
tx = t.contract(sendnonce, 1, gas_limit, endowment, evm)
tx.sign(sender)
(s, a) = pb.apply_transaction(self.block, tx)
if not s:
raise Exception("Contract creation failed")
return a
def send(self, sender, to, value, data=[], funid=None, abi=None):
sendnonce = self.block.get_nonce(u.privtoaddr(sender))
if funid is not None:
evmdata = serpent.encode_abi(funid, *abi)
else:
evmdata = serpent.encode_datalist(*data)
tx = t.Transaction(sendnonce, 1, gas_limit, to, value, evmdata)
self.last_tx = tx
tx.sign(sender)
(s, r) = pb.apply_transaction(self.block, tx)
if not s:
raise Exception("Transaction failed")
o = serpent.decode_datalist(r)
return map(lambda x: x - 2 ** 256 if x >= 2 ** 255 else x, o)
def profile(self, sender, to, value, data=[], funid=None, abi=None):
tm, g = time.time(), self.block.gas_used
o = self.send(sender, to, value, data, funid, abi)
zero_bytes = self.last_tx.data.count(chr(0))
non_zero_bytes = len(self.last_tx.data) - zero_bytes
intrinsic_gas_used = opcodes.GTXDATAZERO * zero_bytes + \
opcodes.GTXDATANONZERO * non_zero_bytes
return {
"time": time.time() - tm,
"gas": self.block.gas_used - g - intrinsic_gas_used,
"output": o
}
def mkspv(self, sender, to, value, data=[], funid=None, abi=None):
sendnonce = self.block.get_nonce(u.privtoaddr(sender))
if funid is not None:
evmdata = serpent.encode_abi(funid, *abi)
else:
evmdata = serpent.encode_datalist(*data)
tx = t.Transaction(sendnonce, 1, gas_limit, to, value, evmdata)
self.last_tx = tx
tx.sign(sender)
return spv.mk_transaction_spv_proof(self.block, tx)
def verifyspv(self, sender, to, value, data=[],
funid=None, abi=None, proof=[]):
sendnonce = self.block.get_nonce(u.privtoaddr(sender))
if funid is not None:
evmdata = serpent.encode_abi(funid, *abi)
else:
evmdata = serpent.encode_datalist(*data)
tx = t.Transaction(sendnonce, 1, gas_limit, to, value, evmdata)
self.last_tx = tx
tx.sign(sender)
return spv.verify_transaction_spv_proof(self.block, tx, proof)
def trace(self, sender, to, value, data=[]):
# collect log events (independent of loglevel filters)
recorder = LogRecorder()
self.send(sender, to, value, data)
return recorder.pop_records()
def mine(self, n=1, coinbase=a0):
for i in range(n):
self.block.finalize()
t = self.block.timestamp + 6 + rand() % 12
self.block = b.Block.init_from_parent(self.block, coinbase, '', t)
self.blocks.append(self.block)
def snapshot(self):
return self.block.serialize()
def revert(self, data):
self.block = b.Block.deserialize(self.db, data)
# logging
def set_logging_level(lvl=1):
trace_lvl_map = [
':info',
'eth.vm.log:trace',
':info,eth.vm.log:trace,eth.vm.exit:trace',
':info,eth.vm.log:trace,eth.vm.op:trace,eth.vm.stack:trace',
':info,eth.vm.log:trace,eth.vm.op:trace,eth.vm.stack:trace,' +
'eth.vm.storage:trace,eth.vm.memory:trace'
]
configure_logging(config_string=trace_lvl_map[lvl])
print 'Set logging level: %d' % lvl
def set_log_trace(logger_names=[]):
"""
sets all named loggers to level 'trace'
attention: vm.op.* are only active if vm.op is active
"""
for name in logger_names:
assert name in slogging.get_logger_names()
slogging.set_level(name, 'trace')
def enable_logging():
set_logging_level(1)
def disable_logging():
set_logging_level(0)
gas_limit = 1000000
|
In 01995 two astronomers observed a slight wobble of the star 51 Pegasi. And with that the centuries of speculation about the existence of planets beyond our solar system (extrasolar planets or "exoplanets") began to be scientifically observable fact.
In two decades since, new science and technology have accelerated exoplanet discovery. Beyond mapping their existence, a more sophisticated search can look for planets similar to our own with conditions that might support life as we know it. Franck Marchis, the Chair of Exoplanet Research at SETI Institute, tells us about how he and others are searching, from this distant vantage point, for another "Pale Blue Dot."
Join Long Now to support this series and for free access to live video of this and other Long Now events among many benefits.
Dr. Franck Marchis is a Principal Investigator at the Carl Sagan Center of the SETI Institute since July 2007. Over the past 15 years, he has dedicated his research to the study of our solar system using mainly ground-based telescopes equipped with adaptive optics. He made the first ground-based observations of the volcanoes on the jovian moon Io, using the first Adaptive Optics (AO) systems available on the European Southern Observatory (ESO) 3.6 m telescope at Chile’s La Silla Observatory.
Recently he has been working on a new generation of AOs; developing algorithms to process and enhance the quality of images, both astronomical and biological, using fluorescence microscopy; and developing the Gemini Planet Imager, an extreme AO system for the Gemini South telescope which will image and record spectra of exoplanets orbiting around nearby stars.
He holds a PhD from Toulouse III university in his native France. His doctoral research described the application of adaptive optics to the study of the solar system. The asteroid 6639 Marchis was named in his honor in 02007.
|
def set_1D_boundary_condition(settings):
from expresso.pycas import exp
s = settings.simulation_box
pe = settings.partial_differential_equation
pe.u_boundary = pe.u0.subs(s.z,s.zmin) * exp(pe.F.subs(s.z,s.zmin)*s.z)
def set_plane_wave_initial_conditions(settings):
"""Sets the boundary conditions to a plane wave with intensity 1.
The boundary are set to the index of refraction at z=0."""
s = settings.simulation_box
pe = settings.partial_differential_equation
pe.u0 = 1
set_1D_boundary_condition(settings)
def add_padding(array,factor,mode = 'edge',**kwargs):
import numpy as np
from ..coordinate_ndarray import CoordinateNDArray
padding_points = [[int(x*factor)]*2 for x in array.data.shape]
new_data = np.pad(array.data,padding_points,mode,**kwargs)
extension = [d*p[0] for d,p in zip(array._dbounds,padding_points)]
new_bounds = [(b-i,e+i) for i,(b,e) in zip(extension,array.bounds)]
return CoordinateNDArray(new_data,new_bounds,array.axis,array.evaluate)
def set_initial(settings,initial_array):
import expresso.pycas as pc
from ..coordinate_ndarray import CoordinateNDArray
if isinstance(initial_array,CoordinateNDArray):
initial = pc.array("initial",initial_array.data)
else:
initial = pc.array("initial",initial_array)
sb = settings.simulation_box
if tuple(initial_array.axis) == (sb.x,):
settings.partial_differential_equation.u0 = initial(sb.xi)
elif tuple(initial_array.axis) == (sb.x,sb.y):
settings.partial_differential_equation.u0 = initial(sb.yi,sb.xi)
sb.Ny = initial_array.shape[1]
if isinstance(initial_array,CoordinateNDArray):
sb.unlock('ymin')
sb.unlock('ymax')
sb.unlock('sy')
sb.ymin = initial_array.bounds[1][0]
sb.ymax = initial_array.bounds[1][1]
sb.sy = sb.ymax - sb.ymin
sb.lock('ymin','defined by initial array')
sb.lock('ymax','defined by initial array')
sb.lock('sy','defined by ymin and ymax')
else:
raise ValueError('initial array axis must be (x,) or (x,y)')
sb.Nx = initial_array.shape[0]
if isinstance(initial_array,CoordinateNDArray):
sb.unlock('xmin')
sb.unlock('xmax')
sb.unlock('sx')
sb.xmin = initial_array.bounds[0][0]
sb.xmax = initial_array.bounds[0][1]
sb.sx = sb.xmax - sb.xmin
sb.lock('xmin','defined by initial array')
sb.lock('xmax','defined by initial array')
sb.lock('sx','defined by xmin and xmax')
|
Keep the files that create mess in the room by spreading here and there in an arranged manner with a file holder. Holders that are designed to store the files in a systematic way find wide applications in homes, offices and other places to avoid the problem of cluttering. Jute file holders have become the first and foremost choice of those who want an eco friendly solution to keep their files arranged. Jute File Holders designed from jute are available to provide the user with a better option to store their files along with enhancing the beauty of the space. They are manufactured from the organic material to add natural features to them.
These jute file holders are a blend of high utility and beauty that provide a long lasting impression on the guests with elegance and style. They are highly durable and strong that ensures their long lasting life as well as performance with no maintenance. Compact in nature, jute file holder can be kept any corner of the interior. They are light in weight, handy as well as portable that make a user able to take them anywhere with him or her. After use, they can be folded easily without any wear and tear.
As jute file holders are designed from the natural fiber, they are 100% environment friendly and biodegradable. A user can decompose them when they are in no need. With their eco friendly nature, they help in keeping the environment safe and clean with no pollution. These highly valued holders loaded with unique features are available in plethora of shapes, designs and styles. Jute file holders are manufactured in simple as well as decorative styles. To meet the different demands, they are available decorated with bamboo sheets, as well as other material. Besides natural look, they are also embroidered, painted and dyed in various vibrant colors.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Export a Wiki category into a cohort.
The aim of this script is to allow program leaders to export a category
filled with User pages into a WikiMetrics cohort CSV file in order to
perform their evaluation analysis.
Test:
python cat2cohort.py -l fr -c "Utilisateur participant au projet Afripédia"
"""
import mw_api
import mw_util
def api_url(lang):
"""Return the URL of the API based on the language of Wikipedia."""
return "https://%s.wikipedia.org/w/api.php" % lang
def list_users(mw, category, lang):
"""List users from a wiki category and print lines of the cohort CSV."""
list_query = mw_api.MwApiQuery(properties={
"list": "categorymembers",
"cmtitle": category,
"cmprop": "ids|title|timestamp",
"cmtype": "page",
"cmsort": "sortkey",
"cmdir": "asc",
"cmlimit": "max"
})
for page in mw.process_query(list_query):
if ":" in page['title']:
username = page['title'].split(":")[1]
yield (username, lang)
def cat_to_cohort(language, category):
"""Return the CSV cohort from the given category and language."""
mw = mw_api.MwWiki(url_api=api_url(language))
user_list = list_users(mw, mw_util.str2cat(category), language)
csv_text = _userlist_to_CSV_cohort(user_list)
return csv_text
def _userlist_to_CSV_cohort(user_list):
"""Return the given user list as a CSV cohort."""
return '\n'.join([_make_CSV_line(username, language)
for (username, language) in user_list])
def _make_CSV_line(username, language):
"""Return a WikiMetrics compatible CSV line."""
return "%s, %swiki" % (username, language)
def main():
"""Main function of the script cat2cohort."""
from argparse import ArgumentParser
description = "Export a Wiki category into a cohort"
parser = ArgumentParser(description=description)
parser.add_argument("-c", "--category",
type=str,
dest="category",
metavar="CAT",
required=True,
help="The wiki category to export")
parser.add_argument("-l", "--lang",
type=str,
dest="lang",
metavar="LANG",
required=True,
help="Wiki language")
args = parser.parse_args()
print cat_to_cohort(args.lang, args.category)
if __name__ == "__main__":
main()
|
Orange Congregational Church is an Open and Affirming Congregation, located at 205 Meeting House Lane in Orange. On Sunday December 24th, the Fourth Sunday of Advent, there will be a ‘Christmas Eve’ morning service at 10 a.m. in the Sanctuary; our Crib Room will also be open for all birth to Pre-K children. Please note, there will be no regular coffee hour following the Christmas Eve morning service. Later in the day, there will be two ‘Christmas Eve’ evening services in our Sanctuary: a family service entitled, “The Message of St. Nicholas” will take place at 5 p.m. This service will include music from the Junior Choir. A traditional Candlelight service of Lessons and Carols will take place at 10 p.m. with music from the Senior Choir. On New Year’s Eve we will have regular Sunday morning worship services at 8 a.m. in the Chapel and 10 a.m. in the Sanctuary, with fellowship hour and coffee following each. For more information or any questions, please call the church office at 203-795-9749.
|
# -*- coding: utf-8 -*-
import numpy as np
from pandas import DataFrame, Series
print("## Redesignate index and order:")
obj = Series([4.5, 7.2, -5.3, 3.6], index=['d', 'b', 'a', 'c'])
print(obj)
obj2 = obj.reindex(['a', 'b', 'd', 'c', 'e'])
print(obj2)
print("### fill with specified value if the index not exist:")
obj3 = obj.reindex(['a', 'b', 'd', 'c', 'e'], fill_value=0)
print(obj3)
print()
print("## Redesignate index and fill method:")
obj4 = Series(['blue', 'purple', 'yellow'], index=[0, 2, 4])
print(obj4)
print(obj4.reindex(range(6), method='ffill'))
print()
print("## Redesignate index of DataFrame:")
frame = DataFrame(np.arange(9).reshape(3, 3),
index = ['a', 'c', 'd'],
columns = ['Ohio', 'Texas', 'California'])
print(frame)
frame2 = frame.reindex(['a', 'b', 'c', 'd'])
print(frame2)
print()
print("## Redesignate column:")
states = ['Texas', 'Utah', 'California']
print(frame.reindex(columns=states))
print()
print("## Redesignate index of DataFrame and fill method:")
print(frame.reindex(index = ['a', 'b', 'c', 'd'],
method = 'ffill',
columns = states))
print(frame.ix[['a', 'b', 'd', 'c'], states])
|
This is how to Unlock your LG G Flex 2 cell phone from any GSM carrier such as Rogers, Telus, and AT&T.
When you insert a SIM card that is not from the original carrier, your device will ask for the "SIM Network Unlock PIN". Simply enter the Unlock Code we email to you and this will permanently unlock your LG G Flex 2 to be used with any GSM carrier.
|
# Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of SickGear.
#
# SickGear is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickGear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickGear. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import time
import traceback
import threading
import datetime
import sickbeard
from sickbeard import db, logger, common, exceptions, helpers, network_timezones, generic_queue, search, \
failed_history, history, ui, properFinder
from sickbeard.search import wantedEpisodes
search_queue_lock = threading.Lock()
BACKLOG_SEARCH = 10
RECENT_SEARCH = 20
FAILED_SEARCH = 30
MANUAL_SEARCH = 40
PROPER_SEARCH = 50
MANUAL_SEARCH_HISTORY = []
MANUAL_SEARCH_HISTORY_SIZE = 100
class SearchQueue(generic_queue.GenericQueue):
def __init__(self):
generic_queue.GenericQueue.__init__(self)
self.queue_name = 'SEARCHQUEUE'
def is_in_queue(self, show, segment):
with self.lock:
for cur_item in self.queue:
if isinstance(cur_item, BacklogQueueItem) and cur_item.show == show and cur_item.segment == segment:
return True
return False
def is_ep_in_queue(self, segment):
with self.lock:
for cur_item in self.queue:
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and cur_item.segment == segment:
return True
return False
def is_show_in_queue(self, show):
with self.lock:
for cur_item in self.queue:
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and cur_item.show.indexerid == show:
return True
return False
def get_all_ep_from_queue(self, show):
with self.lock:
ep_obj_list = []
for cur_item in self.queue:
if isinstance(cur_item, (ManualSearchQueueItem, FailedQueueItem)) and str(cur_item.show.indexerid) == show:
ep_obj_list.append(cur_item)
if ep_obj_list:
return ep_obj_list
return False
def pause_backlog(self):
with self.lock:
self.min_priority = generic_queue.QueuePriorities.HIGH
def unpause_backlog(self):
with self.lock:
self.min_priority = 0
def is_backlog_paused(self):
# backlog priorities are NORMAL, this should be done properly somewhere
with self.lock:
return self.min_priority >= generic_queue.QueuePriorities.NORMAL
def _is_in_progress(self, itemType):
with self.lock:
for cur_item in self.queue + [self.currentItem]:
if isinstance(cur_item, itemType):
return True
return False
def is_manualsearch_in_progress(self):
# Only referenced in webserve.py, only current running manualsearch or failedsearch is needed!!
return self._is_in_progress((ManualSearchQueueItem, FailedQueueItem))
def is_backlog_in_progress(self):
return self._is_in_progress(BacklogQueueItem)
def is_recentsearch_in_progress(self):
return self._is_in_progress(RecentSearchQueueItem)
def is_propersearch_in_progress(self):
return self._is_in_progress(ProperSearchQueueItem)
def is_standard_backlog_in_progress(self):
with self.lock:
for cur_item in self.queue + [self.currentItem]:
if isinstance(cur_item, BacklogQueueItem) and cur_item.standard_backlog:
return True
return False
def type_of_backlog_in_progress(self):
limited = full = other = False
with self.lock:
for cur_item in self.queue + [self.currentItem]:
if isinstance(cur_item, BacklogQueueItem):
if cur_item.standard_backlog:
if cur_item.limited_backlog:
limited = True
else:
full = True
else:
other = True
types = []
for msg, variant in ['Limited', limited], ['Full', full], ['On Demand', other]:
if variant:
types.append(msg)
message = 'None'
if types:
message = ', '.join(types)
return message
def queue_length(self):
length = {'backlog': [], 'recent': 0, 'manual': [], 'failed': [], 'proper': 0}
with self.lock:
for cur_item in [self.currentItem] + self.queue:
if isinstance(cur_item, RecentSearchQueueItem):
length['recent'] += 1
elif isinstance(cur_item, BacklogQueueItem):
length['backlog'].append([cur_item.show.indexerid, cur_item.show.name, cur_item.segment, cur_item.standard_backlog, cur_item.limited_backlog, cur_item.forced])
elif isinstance(cur_item, ProperSearchQueueItem):
length['proper'] += 1
elif isinstance(cur_item, ManualSearchQueueItem):
length['manual'].append([cur_item.show.indexerid, cur_item.show.name, cur_item.segment])
elif isinstance(cur_item, FailedQueueItem):
length['failed'].append([cur_item.show.indexerid, cur_item.show.name, cur_item.segment])
return length
def add_item(self, item):
if isinstance(item, (RecentSearchQueueItem, ProperSearchQueueItem)):
# recent and proper searches
generic_queue.GenericQueue.add_item(self, item)
elif isinstance(item, BacklogQueueItem) and not self.is_in_queue(item.show, item.segment):
# backlog searches
generic_queue.GenericQueue.add_item(self, item)
elif isinstance(item, (ManualSearchQueueItem, FailedQueueItem)) and not self.is_ep_in_queue(item.segment):
# manual and failed searches
generic_queue.GenericQueue.add_item(self, item)
else:
logger.log(u'Not adding item, it\'s already in the queue', logger.DEBUG)
class RecentSearchQueueItem(generic_queue.QueueItem):
def __init__(self):
self.success = None
self.episodes = []
generic_queue.QueueItem.__init__(self, 'Recent Search', RECENT_SEARCH)
def run(self):
generic_queue.QueueItem.run(self)
try:
self._change_missing_episodes()
self.update_providers()
show_list = sickbeard.showList
fromDate = datetime.date.fromordinal(1)
for curShow in show_list:
if curShow.paused:
continue
self.episodes.extend(wantedEpisodes(curShow, fromDate))
if not self.episodes:
logger.log(u'No search of cache for episodes required')
self.success = True
else:
num_shows = len(set([ep.show.name for ep in self.episodes]))
logger.log(u'Found %d needed episode%s spanning %d show%s'
% (len(self.episodes), helpers.maybe_plural(len(self.episodes)),
num_shows, helpers.maybe_plural(num_shows)))
try:
logger.log(u'Beginning recent search for episodes')
found_results = search.searchForNeededEpisodes(self.episodes)
if not len(found_results):
logger.log(u'No needed episodes found')
else:
for result in found_results:
# just use the first result for now
logger.log(u'Downloading %s from %s' % (result.name, result.provider.name))
self.success = search.snatchEpisode(result)
# give the CPU a break
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
except Exception:
logger.log(traceback.format_exc(), logger.DEBUG)
if self.success is None:
self.success = False
finally:
self.finish()
@staticmethod
def _change_missing_episodes():
if not network_timezones.network_dict:
network_timezones.update_network_dict()
if network_timezones.network_dict:
curDate = (datetime.date.today() + datetime.timedelta(days=1)).toordinal()
else:
curDate = (datetime.date.today() - datetime.timedelta(days=2)).toordinal()
curTime = datetime.datetime.now(network_timezones.sb_timezone)
myDB = db.DBConnection()
sqlResults = myDB.select('SELECT * FROM tv_episodes WHERE status = ? AND season > 0 AND airdate <= ?',
[common.UNAIRED, curDate])
sql_l = []
show = None
wanted = False
for sqlEp in sqlResults:
try:
if not show or int(sqlEp['showid']) != show.indexerid:
show = helpers.findCertainShow(sickbeard.showList, int(sqlEp['showid']))
# for when there is orphaned series in the database but not loaded into our showlist
if not show:
continue
except exceptions.MultipleShowObjectsException:
logger.log(u'ERROR: expected to find a single show matching ' + str(sqlEp['showid']))
continue
try:
end_time = network_timezones.parse_date_time(sqlEp['airdate'], show.airs, show.network) + datetime.timedelta(minutes=helpers.tryInt(show.runtime, 60))
# filter out any episodes that haven't aired yet
if end_time > curTime:
continue
except:
# if an error occurred assume the episode hasn't aired yet
continue
ep = show.getEpisode(int(sqlEp['season']), int(sqlEp['episode']))
with ep.lock:
# Now that it is time, change state of UNAIRED show into expected or skipped
ep.status = (common.WANTED, common.SKIPPED)[ep.show.paused]
result = ep.get_sql()
if None is not result:
sql_l.append(result)
wanted |= (False, True)[common.WANTED == ep.status]
else:
logger.log(u'No unaired episodes marked wanted')
if 0 < len(sql_l):
myDB = db.DBConnection()
myDB.mass_action(sql_l)
if wanted:
logger.log(u'Found new episodes marked wanted')
@staticmethod
def update_providers():
origThreadName = threading.currentThread().name
threads = []
logger.log('Updating provider caches with recent upload data')
providers = [x for x in sickbeard.providers.sortedProviderList() if x.is_active() and x.enable_recentsearch]
for curProvider in providers:
# spawn separate threads for each provider so we don't need to wait for providers with slow network operation
threads.append(threading.Thread(target=curProvider.cache.updateCache, name=origThreadName +
' :: [' + curProvider.name + ']'))
# start the thread we just created
threads[-1].start()
# wait for all threads to finish
for t in threads:
t.join()
logger.log('Finished updating provider caches')
class ProperSearchQueueItem(generic_queue.QueueItem):
def __init__(self):
generic_queue.QueueItem.__init__(self, 'Proper Search', PROPER_SEARCH)
self.priority = generic_queue.QueuePriorities.HIGH
self.success = None
def run(self):
generic_queue.QueueItem.run(self)
try:
properFinder.searchPropers()
finally:
self.finish()
class ManualSearchQueueItem(generic_queue.QueueItem):
def __init__(self, show, segment):
generic_queue.QueueItem.__init__(self, 'Manual Search', MANUAL_SEARCH)
self.priority = generic_queue.QueuePriorities.HIGH
self.name = 'MANUAL-' + str(show.indexerid)
self.success = None
self.show = show
self.segment = segment
self.started = None
def run(self):
generic_queue.QueueItem.run(self)
try:
logger.log('Beginning manual search for: [' + self.segment.prettyName() + ']')
self.started = True
searchResult = search.searchProviders(self.show, [self.segment], True)
if searchResult:
# just use the first result for now
logger.log(u'Downloading ' + searchResult[0].name + ' from ' + searchResult[0].provider.name)
self.success = search.snatchEpisode(searchResult[0])
# give the CPU a break
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
else:
ui.notifications.message('No downloads were found',
'Couldn\'t find a download for <i>%s</i>' % self.segment.prettyName())
logger.log(u'Unable to find a download for: [' + self.segment.prettyName() + ']')
except Exception:
logger.log(traceback.format_exc(), logger.DEBUG)
finally:
### Keep a list with the 100 last executed searches
fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE)
if self.success is None:
self.success = False
self.finish()
class BacklogQueueItem(generic_queue.QueueItem):
def __init__(self, show, segment, standard_backlog=False, limited_backlog=False, forced=False):
generic_queue.QueueItem.__init__(self, 'Backlog', BACKLOG_SEARCH)
self.priority = generic_queue.QueuePriorities.LOW
self.name = 'BACKLOG-' + str(show.indexerid)
self.success = None
self.show = show
self.segment = segment
self.standard_backlog = standard_backlog
self.limited_backlog = limited_backlog
self.forced = forced
def run(self):
generic_queue.QueueItem.run(self)
try:
logger.log('Beginning backlog search for: [' + self.show.name + ']')
searchResult = search.searchProviders(self.show, self.segment, False)
if searchResult:
for result in searchResult:
# just use the first result for now
logger.log(u'Downloading ' + result.name + ' from ' + result.provider.name)
search.snatchEpisode(result)
# give the CPU a break
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
else:
logger.log(u'No needed episodes found during backlog search for: [' + self.show.name + ']')
except Exception:
logger.log(traceback.format_exc(), logger.DEBUG)
finally:
self.finish()
class FailedQueueItem(generic_queue.QueueItem):
def __init__(self, show, segment):
generic_queue.QueueItem.__init__(self, 'Retry', FAILED_SEARCH)
self.priority = generic_queue.QueuePriorities.HIGH
self.name = 'RETRY-' + str(show.indexerid)
self.show = show
self.segment = segment
self.success = None
self.started = None
def run(self):
generic_queue.QueueItem.run(self)
self.started = True
try:
for epObj in self.segment:
logger.log(u'Marking episode as bad: [' + epObj.prettyName() + ']')
failed_history.markFailed(epObj)
(release, provider) = failed_history.findRelease(epObj)
if release:
failed_history.logFailed(release)
history.logFailed(epObj, release, provider)
failed_history.revertEpisode(epObj)
logger.log('Beginning failed download search for: [' + epObj.prettyName() + ']')
searchResult = search.searchProviders(self.show, self.segment, True)
if searchResult:
for result in searchResult:
# just use the first result for now
logger.log(u'Downloading ' + result.name + ' from ' + result.provider.name)
search.snatchEpisode(result)
# give the CPU a break
time.sleep(common.cpu_presets[sickbeard.CPU_PRESET])
else:
pass
#logger.log(u"No valid episode found to retry for: [" + self.segment.prettyName() + "]")
except Exception:
logger.log(traceback.format_exc(), logger.DEBUG)
finally:
### Keep a list with the 100 last executed searches
fifo(MANUAL_SEARCH_HISTORY, self, MANUAL_SEARCH_HISTORY_SIZE)
if self.success is None:
self.success = False
self.finish()
def fifo(myList, item, maxSize = 100):
if len(myList) >= maxSize:
myList.pop(0)
myList.append(item)
|
In a recent DUI (driving under the influence of alcohol or drugs) case south of Jacksonville Florida, the defendant was driving on I-95 and failed to stop at an open weigh station. The defendant was driving a large pickup truck, and the police officer believed that the defendant was required to stop his truck at the weigh station. When the defendant did not stop, the police officer pulled him over and told him to drive back to the weigh station. Apparently, according to the police officer, the defendant committed a traffic violation on the drive back to the weigh station so the police officer stopped him. The police officer indicated that he smelled alcohol on the defendant and initiated a DUI investigation. The police officer ultimately arrested the defendant for DUI and searched his vehicle. The police officer found marijuana in the vehicle and arrested the defendant for possession of marijuana in addition to DUI.
The criminal defense lawyer moved to have all of the evidence obtained by the police officer after the stop suppressed based on an illegal stop. This would include all of the police officer’s observations related to the DUI, including the field sobriety tests and the breathalyzer results, as well as the evidence of the marijuana in the vehicle. Once the criminal defense lawyer files a motion to suppress the evidence, the state has the burden of proving the stop was valid. In this case, the state could not establish that the defendant was required to stop at the weigh station with his big truck. Therefore, the state could not establish that the police officer had a right to stop the defendant. When the state cannot establish that the traffic stop was legal, the evidence of criminal activity obtained after the stop is typically thrown out. As a result, all evidence of the DUI and possession of marijuana charge was thrown out, and the charges were ultimately dismissed.
|
# -*- coding: utf-8 -*-
from django.forms import ModelForm
from apps.registro.models import ExtensionAulica, Establecimiento
from apps.registro.forms import ExtensionAulicaCreateForm
from django.core.exceptions import ValidationError
from django import forms
class ExtensionAulicaModificarCueForm(ExtensionAulicaCreateForm):
class Meta:
model = ExtensionAulica
fields = ['codigo_tipo_unidad_educativa', 'cue', 'codigo_jurisdiccion']
def __init__(self, *args, **kwargs):
super(ExtensionAulicaModificarCueForm, self).__init__(*args, **kwargs)
self.fields['norma_creacion'].required = False
def clean(self):
# Armar el CUE correctamente
cleaned_data = self.cleaned_data
try:
cue = str(cleaned_data['cue'])
codigo_jurisdiccion = cleaned_data['codigo_jurisdiccion']
codigo_tipo_unidad_educativa = cleaned_data['codigo_tipo_unidad_educativa']
cleaned_data['cue'] = str(codigo_jurisdiccion) + str(cue) + str(codigo_tipo_unidad_educativa)
except KeyError:
pass
return cleaned_data
|
I’m not an avid NFL fan, but I do have a few favorites (Saints, Chiefs, and Dolphins). I pick who’s going to win games (I was second in a group going into this week) and I monitor results, but that’s mostly it. This is my only other NFL Blog this year: “How I Would Re-Align the NFL”. I just don’t get into it enough to blog about it generally.
I’m writing this because even though I’m pretty good with this stuff, I had some trouble figuring out the playoff scenarios with today’s results in mind, so I thought I’d share what I found out. I’m sorry if this comes across as too pedantic, but I try to write it so that people who might not be extremely familiar with the process can still follow along.
I’ll go into more details about the divisional spots at the end (at least one team makes the playoffs from each division regardless of how they compare to teams in other divisions).
If you didn’t know, New York (Giants), Washington, Detroit, Minnesota, Atlanta, Tampa Bay, and St. Louis have all been eliminated.
Less complicated for the top 5 seeds here, since we at least know who the 5 teams are. Four teams are competing for one wild card spot. All four division winners are known: New England, Cincinnati, Indianapolis, Denver. Kansas City has clinched the #5 seed (top wild card).
I guess it depends on how your brain works, but I’m going to explain it in a less formulaic way first. Baltimore wins a two-way tie with Miami or a three- or four-way tie that includes the Jets. Miami wins any tie that (1) doesn’t involve the Jets or (2) isn’t a two-way tie with Baltimore. Pittsburgh would win a five-way tie that includes the Jets. San Diego wins no relevant ties, but they can finish with the #6 AFC record outright.
I won’t be addressing what happens if a relevant game is tied next week, so keep the previous paragraph in mind. Since Pittsburgh is the the only team still alive with 8 losses, they would be eliminated if they tie next week regardless of any other results. The Jets have the same record as the Steelers do (7-8), but they have been eliminated due to their 4-7 record in conference.
If you didn’t know, New York (Jets), Buffalo, Cleveland, Tennessee, Jacksonville, Houston, and Oakland have all been eliminated.
I’m putting this last because I thought people would worry more about who’s going to be completely eliminated from the playoffs first.
As of right now, Seattle and Carolina have clinched the playoffs in the NFC; but no division has been clinched.
NFC East – The Eagles win the East by beating or tying the Cowboys. If the Cowboys win, they win the East.
NFC North – The Bears win the North by beating or tying the Packers. If not, the Packers win the North.
NFC South – If the Saints win and the Panthers lose, the Saints win the South. If not, the Panthers win the South.
NFC West – If Seattle beats (or ties) the Rams or San Francisco loses (or ties) either remaining game, the Seahawks win. San Francisco only wins the division with wins in both remaining games and a Seattle loss.
I’m not one for letting my fandom show excessively; but there is a time and a place for everything, and it’s called college football.
Before I get into that, I just wanted to note that I will have a lot to cover as far as my post-regular season, pre-bowl thoughts; but we have a couple weeks until there are any big games again, so I’ll spread them out over a few blog posts. At the least I’ll do some kind of review of the major BCS results and another look at the conferences. I might also do something about LSU’s history against the Big Ten, something like what I did with the Pac-12.
Anyway, onto being a fan, I have to say that once again I’m disappointed with LSU’s bowl selection. I guess the addition of Texas A&M and Missouri to the SEC means that for Cotton Bowl purposes, there are 8 teams in the SEC West. Maybe Auburn will be the exception in the other direction one year, but I don’t remember that happening before.
My bowl projections didn’t turn out too well because a couple of bowls didn’t pick the way I thought they should have, so there was a domino effect all the way down the list. I guess having “insider” knowledge helps after all.
The top non-BCS SEC team is supposed to go to the CapitalOne Bowl, which also pays the most. Then the next-best West team normally goes to the Cotton and the next-best East team normally goes to the Outback. Missouri won the SEC East and finished ahead of South Carolina in the BCS, so they would have been the “normal” CapitalOne pick. Last season, the SEC made sure that Georgia got to go to the CapitalOne Bowl (multiple reports said that the CapOne preferred A&M, who ended up in the Cotton Bowl ahead of LSU, a team that beat the Aggies and tied them in the standings for the year).
I think the difference this season was that Missouri wanted to go to the Cotton Bowl anyway, so they didn’t care about the CapitalOne picking South Carolina instead. Anyway, the result was that LSU couldn’t go to the Cotton Bowl, which is where the top available team in the SEC West is supposed to go after the CapitalOne Bowl makes its selection. So I’m not exactly expecting an inspired performance from the Fighting Tigers, but Iowa isn’t as good as Clemson was last year, so maybe the game will turn out better. LSU still should have won last year. I’m still annoyed with that even though I didn’t even care that much about the game at first.
You would have thought a great defensive coach like Saban could have told a couple guys to run back toward the goal line ahead of the Iowa receivers, but I guess he was busy daydreaming about going to Miami.
To put a positive spin on this year’s bowl game, having a replacement quarterback might actually be something that generates excitement, with no disrespect to Zach Mettenberger intended. It could be something along the lines of Matt Flynn’s performance in the Peach Bowl in 2005 (a 40-3 win over Miami) or at least Matt Mauck’s or Ryan Perrilloux’s wins in the SEC Championships in 2001 and 2007, respectively. In all three games, the quarterback of the future was suddenly made the starter, and LSU won the game. Mauck and Flynn would have championships in other seasons, so it was a preview of things to come (two years later in both cases: Mauck was hurt in 2002, and Flynn had a whole season of backup duty to JaMarcus Russell left); but that win was Perrilloux’s one big moment in the sun at LSU. Of course, I’m hoping Anthony Jennings is more like Mauck or Flynn but without having to wait on the sidelines next season.
Anyway, my incorrect guess as to what the CapitalOne would do caused me to be wrong about both the SEC Cotton and the SEC Outback picks.
My incorrect guess that the Orange would take Oklahoma caused me to be wrong several other slots. The first was the Sugar Bowl: had I known Oklahoma would still be available, I would have picked them for the Sugar Bowl. Then, since I thought the Sugar would take Oregon, this caused me to be wrong about all the Pac-12 bowls I picked. Everyone got knocked down a peg when only one team from the conference made a BCS bowl. Also the ACC teams moved one slot higher than I thought they would be. Had Clemson not made it, they probably would have returned to the Chik-fil-A (Peach) Bowl. Duke was my next choice had I known Clemson was going to the Orange.
Aside from the remaining obvious BCS picks, I was right about a few things: Wisconsin is in the CapitalOne Bowl, Texas is in the Alamo Bowl, Texas Tech is in the Holiday Bowl, Georgia is in the Gator Bowl, and Texas A&M is in the Peach Bowl. I’m not sure why the Outback didn’t prefer Nebraska to Iowa. If they had, I would have been right about both teams in that one.
I also think the bowls I had would have been better games. Alabama-Oregon, Ohio St.-Oklahoma, and Clemson-Texas A&M sounds much better to me than Alabama—Oklahoma, Ohio St.—Clemson, and Duke—Texas A&M. Georgia—Iowa and LSU—Nebraska (with our old friend Bo Pelini) also seems much better than the games we got. I was right about Nebraska going to Florida instead of to Arizona, where a lot of people had them. I just thought it would be Outback instead of Gator. That was really the only secondary pick I got wrong that wasn’t influenced by the two big ones.
There was a change at #3 and yet another at #4. All of the top four will be going to Pasadena.
Not much to say about the rankings. I think this might be the first time every top 25 team was either in the AP top 25 or the BCS top 25. I guess it will be the last time too, since this is the last BCS top 25 ever. I’m wondering if they’ll give us some kind of standings to look at next season.
There was one team in both the AP and BCS top 25s that I don’t have, and that team is Texas A&M. The Aggies have one too many losses at this point without the corresponding quality wins necessary. It also doesn’t help that their only meaningful win since late October was over Mississippi St. I still think they’re one of the top 25 best, but the system is designed so that one-loss teams like Northern Illinois and Fresno St. don’t go too low, since if there is a Boise St. or Utah (like the ones who won major bowl games in recent years) with one or two strong wins they can be up into the top 10 somewhere. Not everyone deserves a shot at the top 2 either though.
The only other thing I thought was interesting was the switch at #25. USC and U. Miami were both idle, but it’s interesting that USC played 9 conference games as well as playing Notre Dame, which happened to have played both Stanford and Arizona St. Since the Cardinal and the Sun Devils strengthened their own schedules by playing one another, they also strengthened the rest of the Pac-12 and Notre Dame. Also, it helped that Notre Dame beat Michigan St., which of course added its most impressive victory of the season by beating Ohio St.
The final BCS championship with be decided on January 7.
The BCS Standings have not been released yet, so I’m going to use my own rankings.
(1) #1 plays #2, based on the BCS Standings. There is almost no doubt the two teams will be Florida St. and Auburn. Florida St. is the only unbeaten team in major college football. Auburn has clearly had the best schedule among one-loss teams and won the SEC over Alabama and Missouri, who would both be in the top 5 had their seasons ended before playing Auburn.
(2) If the #1 team would otherwise be designated for a certain bowl game, that bowl gets a replacement pick. I’m pretty sure the #1 team will remain Florida St. since they were #1 going in and won convincingly. So that means the Orange would get to pick first. I think they’ll pick Ohio St. That’s the best team not in the BCS Championship or Rose Bowl, and they bring fans to games pretty well. There is an agreement not to pick the #2 team from another conference, so I don’t think Alabama would be going here.
(3) If the #2 team would otherwise be designated for a certain bowl game, that bowl gets a replacement pick. The second bowl to pick will be the Sugar Bowl. Despite a great season by Missouri, Alabama is a better draw. Most people already regarded Alabama as the better team before Missouri’s loss to Auburn. Unfortunately, there are only two teams allowed per conference.
(4) Remaining slots will be selected based upon the rotation for a given year. In order to pick an “at-large” team, slots must be available for the automatic bids.
(4a) Orange My guess is the Orange will jump at an opportunity to recall the old days of picking Big 8 champions and go with Oklahoma. They could pick Clemson instead, but other than not losing to anyone but South Carolina and Florida St., what has Clemson really done? Also, I know South Carolina is a crazy football state, but they’re just not on the same level.
(4b) Sugar You might remember that a week ago people were saying that for the second selection, the Sugar would have to decide between UCF and Northern Illinois. That would have been true had Northern Illinois won; but since the Huskies lost, that opens up another at-large slot. Clemson might have a good argument here, but I think people would be more excited about Oregon. It wasn’t too long ago that Alabama was #1 and Oregon was #2. I think that would be a more exciting match-up. Ducks fans probably wouldn’t travel as well as Clemson Tiger fans though, so I’m not 100% sold on this.
(4c) Fiesta Unless the Orange or Sugar pick Central Florida, the Fiesta Bowl will have to. Baylor automatically goes as a result of winning the Big XII.
As to the non-BCS Bowls, there is more leeway.
Last year, a very good SEC team that lost the Championship for its second loss went to the CapitalOne Bowl, which is meant to be the top SEC non-BCS bowl. I expect the same this year.
Although conceivably Missouri could lobby to be in the Cotton instead of LSU, usually the Cotton goes to an SEC West team; and I think the Cotton would be happy with LSU, which has been a good draw in Arlington a few times recently, including playing Texas A&M in the Cotton a few years ago and playing Oregon and TCU to start the year in 2011 and 2013, respectively. Les Miles vs. Oklahoma St. would of course add some intrigue to the game.
Although Texas was one win away from the Fiesta Bowl, I think it would only be fair if it is the #4 Big XII team selected with its two non-conference losses. So that slot is the Alamo Bowl, which is of course in Texas and should be happy to have them. Arizona St. is pretty close for a Pac-12 school, so they’ll be a good pick to play the Longhorns. The Pac-12 #2 (or top available after the BCS) has gone to the Holiday Bowl before, but that’s not the way it works now from what I understand.
South Carolina always seems to end up in the Outback Bowl, but it’s a result of being in the SEC East and not one of the top 3 selections when there are two SEC teams in BCS bowls. I don’t see another logical place to put them. There is some talk of LSU going to the Outback Bowl if Missouri goes to the Cotton and frees up the CapitalOne Bowl for South Carolina, but I don’t think that’s fair to either the CapitalOne Bowl or to LSU. LSU was brushed aside for the Cotton last year in favor of A&M despite having beaten the Aggies. But Texas A&M wasn’t given the option of the “better” CapitalOne Bowl, which got Georgia. Iowa beat Nebraska, but they’re both 5-3 and I would think the Outback would go with Nebraska for its well-known supportive fan base.
I mentioned the Holiday Bowl earlier. They may pass on UCLA to vary things up a bit, but I suspect they won’t. It just seems to make too much sense. It’s also possible that the Alamo and Holiday could switch Pac-12 teams, but I don’t think that would be an improvement for either. If Oregon does not get into a BCS bowl, they may bump either UCLA or Arizona St. to the Sun Bowl. The pickings in the Big XII start to get slim, but Texas Tech looks like the best possibility. Kansas St. is another.
Texas A&M has been slotted for the Chick-fil-A bowl for a while, particularly by those prognosticators who didn’t pick them to beat Missouri. Since the time it was the Peach Bowl, this has been known for fun, competitive games, and I don’t think Clemson would disappoint. The Tigers won by a single point last year, but I don’t think the bowl would be enticed by Duke. It’s basketball season at that school.
Georgia has had an up-and-down year and they play in Jacksonville every year anyway, but I still think the Gator makes a lot of sense given its proximity to the state of Georgia. I think enough Georgia fans live far enough away to get hotel rooms and so forth, but they’ll still show up in big numbers. I’ve seen Michigan projected here, and they’re always a good draw too. I’m hoping the politics don’t allow the Wolverines to pass up Iowa and Minnesota, who both finished with better records. It wouldn’t shock me though.
Sorry for getting this out late. I was waiting on responses longer than usual (there are only 5 entries this week), and it just wasn’t possible to do this yesterday.
It’s contentious toward the top. Voters that really like Auburn seem not to like Ohio St. and vice-versa. Florida St. still got a majority of the #1 votes and got #2 votes from both Ohio St. supporters for the top spot. No. 4 was interesting because two voters had Alabama two spots below Missouri, and another had the Tide two spots ahead of the Tigers. The other two had Alabama one vote ahead. So three of the five did think the Tide should go in front. That’s why they’re on top in the image above.
The Pac-12 seemed to benefit the most from the absent voters as all three teams moved up significantly even though they were all supposed to win and Oregon didn’t even look good doing it.
LSU was “jumped” by Central Florida and Oklahoma, who also gained four spots. LSU remained #17 though.
Duke went up the most, improving by six spots. Clemson and Wisconsin fell eight spots apiece after losses. Fresno St. only fell six spots.
Georgia also gained several spots to get into the top 25 (although they’re still significantly behind those other Bulldogs in points), in part because of Auburn’s win over Alabama. Georgia is ranked for the first time since Week 7. Texas rejoined the top 25 after a two-week absence. It didn’t take much to become ranked this week, as there was a lot of disagreement over who the last couple of teams should be.
Very little separates the top four, especially the top two.
In case you missed it, I wrote a blog yesterday about the changes to the Thanksgiving rivalries.
The first part of this is based on the national-championship race, then I’ll get into the other changes to the rankings.
I’ll just get to the point. If there weren’t championship games, I would absolutely want Florida St. to play Auburn for the title. If Auburn beats Missouri, I’m not changing my mind about that one, but I’ll wait and see on Ohio St. I don’t factor in margin of victory in my ratings; but as a fan, I lean heavily toward Florida St. because no one has given them a game yet. It would be a shame to do everything they could be expected to do and not get to play for a title, assuming they beat Duke.
The main thing I wanted to do was just to give a summary of where the big opponents (defined as the top 60 teams in the FBS) of the four major title competitors rank in my new ratings.
Ohio St. has more wins from #30 to #60, but I think based on what I’d call good opponents (top 30), there are more teams that can go undefeated against Ohio St.’s schedule than Florida St.’s schedule. I think even 7-5 Michigan was perfectly capable of going undefeated against Ohio St.’s schedule so far. Ohio St. didn’t play Michigan St. (the only team to beat Michigan convincingly) or Nebraska (who beat Michigan by 4), and Michigan only lost by a field goal apiece to Penn St. (in OT) and Iowa. Michigan did beat Notre Dame, which I think is about as good as Wisconsin, Ohio St.’s best win.
You can see why I wouldn’t necessarily endorse Missouri over an undefeated team. Even with the win over Auburn, that’s still just one top-20 win. Two top-25 wins, but Florida St. would have three. If you start talking about teams below the top 25, that might be an advantage in arguing against Florida St., but it would be a disadvantage in arguing for Missouri against Ohio St. I just think it should be a clearer case before you put a team with a loss in instead of an undefeated team. Auburn qualifies; Missouri probably doesn’t.
This is another way of looking at it that I saw on another blog.
I had a feeling I was going to end up regretting that the 4-team playoff didn’t start this year (Alabama would be the natural fourth, although I have to admit I’m personally glad they seem to be out of the running); but if Ohio St. or Florida St. ends up losing, then it would be the perfect two-team outcome. If they both lose, then maybe we can get another all-SEC championship.
To get to the rest of the top 10, Alabama stayed in the top 4 in the major polls, but computer formulas treat a loss as just a loss. It also doesn’t help that the Tide’s SEC East opponents were Tennessee and Kentucky. Arizona St. also edged in front by beating Arizona. It’s likely that Stanford would take Arizona St.’s place in front of Alabama with a win. Northern Illinois could pass up one or two of the conference-championship losers. Michigan St. of course could gain a few spots by beating Ohio St. Baylor can also capitalize on others’ losses if they get past Texas.
Below the top 10, Oklahoma St. fell a few spots by being idle. The same principle applies to them as to Baylor except that OSU plays a btter team. One team they may pass up is South Carolina, which jumped five spots by beating Clemson, which went down six spots. Oregon went up six spots after beating Oregon St. UCLA and Duke also went up considerably after winning their rivalry games.
Fresno St., Wisconsin, and Notre Dame all fell a bit by losing. USC and Texas A&M were in the last few teams last week, so they couldn’t stay in the top 25 with their losses. Georgia beat Georgia Tech, and it also helped out that Auburn and Missouri won. Miami’s win over Pittsburgh, combined with with losses by others, put the Hurricanes back into the top 25.
I did make a small adjustment to how losses to FCS teams are treated, but of course all the revelant teams are outside of the top 25.
|
from HTMLParser import HTMLParser
from htmlentitydefs import name2codepoint
class Tag(object):
name = ''
text = ''
first_child = 0
parent = 0
next_sibling = 0
closed = 0
depth = 0
def get_tag_info_str(self):
c, p, s = 'none', 'none', 'none'
if self.first_child != 0:
c = self.first_child.name
if self.parent != 0:
p = self.parent.name
if self.next_sibling != 0:
s = self.next_sibling.name
return "name = {}, text = {}\nParent = {}, First Child = {}, Next Sibling = {}\nClosed = {}, Depth = {}\n".format(self.name, self.text, p, c, s, self.closed, self.depth)
class MyHTMLParser(HTMLParser):
tag_list = []
depth = 0
previous_tag = 'none'
mode = 'silent'
def handle_starttag(self, tag, attrs):
if self.mode != 'silent':
print "Start tag:", tag
for attr in attrs:
print " attr:", attr
self.depth = self.depth + 1
t = Tag()
t.name = tag
t.depth = self.depth
if self.previous_tag == 'start':
# current tag is a first child of the last tag
t.parent = self.tag_list[len(self.tag_list)-1]
self.tag_list[len(self.tag_list)-1].first_child = t
elif self.previous_tag == 'end':
# current tag is next sibling of the last tag
for x in reversed(self.tag_list):
if x.depth == self.depth:
x.next_sibling = t
if t.parent == 0:
t.parent = x.parent
break
elif self.previous_tag == 'startend':
# current tag is the next sibling of the previous tag
t.parent = self.tag_list[len(self.tag_list)-1].parent
self.tag_list[len(self.tag_list)-1].next_sibling = t
self.tag_list.append(t)
self.previous_tag = 'start'
def handle_endtag(self, tag):
if self.mode != 'silent':
print "End tag :", tag
for x in reversed(self.tag_list):
if x.name == tag and x.closed == 0:
x.closed = 1
break
self.depth = self.depth - 1
self.previous_tag = 'end'
def handle_startendtag(self, tag, attrs):
if self.mode != 'silent':
print "Start/End tag :", tag
for attr in attrs:
print " attr:", attr
t = Tag()
self.depth = self.depth + 1
t.name = tag
t.depth = self.depth
t.closed = 1
if self.previous_tag == 'start':
# current tag is first child of the last tag
t.parent = self.tag_list[len(self.tag_list)-1]
self.tag_list[len(self.tag_list)-1].first_child = t
elif self.previous_tag == 'startend':
# current tag is next sibling of last tag
t.parent = self.tag_list[len(self.tag_list)-1].parent
self.tag_list[len(self.tag_list)-1].next_sibling = t
elif self.previous_tag == 'end':
#current tag is next sibling of a previous tag of depth=self.depth
for x in reversed(self.tag_list):
if x.depth == self.depth:
x.next_sibling = t
if t.parent == 0:
t.parent = x.parent
break
self.tag_list.append(t)
self.depth = self.depth - 1
self.previous_tag = 'startend'
def handle_data(self, data):
if self.mode != 'silent':
print "Data :", data
self.depth = self.depth + 1
# add data to last tag in list with depth = current depth - 1
for x in reversed(self.tag_list):
if x.depth == self.depth - 1:
x.text = (x.text + ' ' + data.strip(' \n\t')).strip(' \n\t')
break
self.depth = self.depth - 1
def handle_comment(self, data):
if self.mode != 'silent':
print "Comment :", data
def handle_entityref(self, name):
if self.mode != 'silent':
c = unichr(name2codepoint[name])
print "Named ent:", c
def handle_charref(self, name):
if self.mode != 'silent':
if name.startswith('x'):
c = unichr(int(name[1:], 16))
else:
c = unichr(int(name))
print "Num ent :", c
def handle_decl(self, data):
if self.mode != 'silent':
print "Decl :", data
def print_tag_list(self, u):
for l in self.tag_list:
print l.get_tag_info_str()
def clear_tag_list(self):
self.tag_list.__delslice__(0, len(self.tag_list))
def pretty_print_tags(self):
for t in self.tag_list:
s = ''
s = s + self.get_indent_str(t.depth-1)
s = s + self.get_tag_str(t.name)
print s
def get_indent_str(self, n):
s = ''
while(n != 0):
s = s + ' '
n = n - 1
return s
def get_tag_str(self, name):
return '<{}>'.format(name)
def find_first_tag(self, name):
r = 0
for t in self.tag_list:
if t.name == name:
r = t
break
return r
def print_first_tag_info(self, name):
t = self.find_first_tag(name)
if t == 0:
print "Tag: {} not found".format(name)
else:
print t.get_tag_info_str()
|
Due to the high level of interest in this group we are not able to take any more children. A waiting list is being managed.
In the first instance, please email Liz Flitcroft who will contact you when we are able to offer your child a place. Please include your child’s name and date of birth when you e-mail.
Please check on here frequently for details of changed or cancelled training sessions.
Training session for new starters is Tuesdays 5-6pm, with progression to the main club training times when appropriate.
Main Club training sessions are every Tuesday and Thursday from 6:30pm to 7:30pm.
If you come to training without at least two (preferably three) layers, we will send you home.
From the start of September it can be cold and it will be dark so you must have layers you can remove and put back on easily.
In addition, your child should always bring a drink (preferably water), and please try to ensure they have been to the toilet before arriving at training.
At all times your child should have layers of clothes on that they can take off as they warm up during a training session. In particular, long trousers over shorts and long sleeve top over t-shirt, jackets or sweatshirts.
Trainers with laces are better than slip-on trainers.
In the winter we take part in the Destination Basingstoke Junior Cross Country league – School year 3 to Year 9 inclusive.
There are also open competitions run by our own club and others athletic clubs and information will be here on how to enter yourselves.
More details on Fixtures page.
We also have our Junior Club Championships in September and all juniors are encouraged to take part.
|
#!/usr/bin/python
# Copyright 2010-2012 RethinkDB, all rights reserved.
import os, sys, socket, random, time
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, "common")))
import memcached_workload_common
from line import *
key_padding = ''.zfill(20)
def gen_key(prefix, num):
return prefix + key_padding + str(num).zfill(6)
value_padding = ''.zfill(240)
large_value_padding = ''.zfill(512)
def gen_value(prefix, num):
if num % 5 == 4:
return prefix + large_value_padding + str(num).zfill(6)
else:
return prefix + value_padding + str(num).zfill(6)
def sock_readline(sock_file):
ls = []
while True:
l = sock_file.readline()
ls.append(l)
if len(l) >= 2 and l[-2:] == '\r\n':
break
return ''.join(ls)
value_line = line("^VALUE\s+([^\s]+)\s+(\d+)\s+(\d+)\r\n$", [('key', 's'), ('flags', 'd'), ('length', 'd')])
def get_results(s):
res = []
f = s.makefile()
while True:
l = sock_readline(f)
if l == 'END\r\n':
break
val_def = value_line.parse_line(l)
if not val_def:
raise ValueError("received unexpected line from rget: %s" % l)
val = sock_readline(f).rstrip()
if len(val) != val_def['length']:
raise ValueError("received value of unexpected length (expected %d, got %d: '%s')" % (val_def['length'], len(val), val))
res.append({'key': val_def['key'], 'value': val})
return res
class TimeoutException(Exception):
pass
op = memcached_workload_common.option_parser_for_socket()
opts = op.parse(sys.argv)
# Test:
# start rget query, then start a write concurrently (but after rget got to the bottom of the tree)
# if the write blocks, then we do not do copy-on-write/snapshotting/etc.
# Also check that rget gets consistent data (i.e. is not affected by concurrent write), and that
# the write actually updates the data
rget_keys = 10000
updated_key_id = rget_keys-1
updated_key = gen_key('foo', updated_key_id)
updated_value = gen_value('changed', updated_key_id)
orig_value = gen_value('foo', updated_key_id)
host, port = opts["address"]
with memcached_workload_common.MemcacheConnection(host, port) as mc:
print "Creating test data"
for i in range(0, rget_keys):
mc.set(gen_key('foo', i), gen_value('foo', i))
with memcached_workload_common.make_socket_connection(opts) as s:
print "Starting rget"
s.send('rget %s %s %d %d %d\r\n' % (gen_key('foo', 0), gen_key('fop', 0), 0, 1, rget_keys))
print "Started rget"
# we don't read the data, we just stop here, preventing rget from proceding
# rget is slow to start, so we need to wait a bit, before it locks down the path.
# This is a crude way, but is probably the simplest
time.sleep(5)
with memcached_workload_common.make_socket_connection(opts) as us:
print "Starting concurrent update"
us.setblocking(1)
print " Sending concurrent set"
us.send('set %s 0 0 %d\r\n%s\r\n' % (updated_key, len(updated_value), updated_value))
uf = us.makefile()
us.settimeout(10.0)
print " Waiting for set result"
set_res = sock_readline(uf).rstrip()
if set_res != 'STORED':
raise ValueError("update failed: %s" % set_res)
print " Concurrent set finished"
v = mc.get(updated_key)
if v != updated_value:
raise ValueError("update didn't take effect")
res = get_results(s)
if len(res) != rget_keys:
raise ValueError("received unexpected number of results from rget (expected %d, got %d)" % (rget_keys, len(res)))
if res[updated_key_id]['value'] != orig_value:
raise ValueError("rget results are not consistent (update changed the contents of a part of running rget query)")
v = mc.get(updated_key)
if v != updated_value:
raise ValueError("update didn't take effect")
print "Done"
|
This Comexim bra is absolutely gorgeous. This was my second order from Comexim and I was pretty excited to get it because I also decided to get the matching briefs. The color of the lace is a subtle black-brown with brownish-red on top of a cream base. The cream fabric underneath the lace has a matte finish. I tried to get photos of the bra in natural light so you could see how lovely it is when the light hits it.
The cut of the Geisha is very much like the other Comexim "plunge" styles I have. Unlike plunges in other brands, the cups feel very supportive and my breasts feel very enclosed, like they're not going anywhere. The gore comes up slightly higher than most plunge style bras. It is similar to Ewa Michalak's S line. The cups are nice and deep: if you are looking for a narrow cup that offers good projection, then this is the bra for you. The tops of the cup encase the breast tissue up there without cutting into the tissue. The underwires don't stab me in the underarm. Actually, if they were ANY lower, they would be too low. The placement of the straps is also ideal for my shoulders.
This is a comfortable bra with a nice firm band. It stretches to just about 31" so I don't know if that is quite true to size. I wouldn't say the band is super-stretchy because the lace pattern extends to the band area (which is a nice touch). I'd say the Comexim Magic Blue's band is slightly stretchier.
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2016-2018 script.module.python.twitch
This file is part of script.module.python.twitch
SPDX-License-Identifier: GPL-3.0-only
See LICENSES/GPL-3.0-only for more information.
"""
import re
import logging
import copy
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
try:
import xbmc
except ImportError:
xbmc = None
def _mask(message):
mask = '*' * 11
masked_message = re.sub(r'((?:OAuth|Bearer)\s)[^\'"]+', r'\1' + mask, message)
masked_message = re.sub(r'(["\']email["\']:\s*[\'"])[^\'"]+', r'\1' + mask, masked_message)
masked_message = re.sub(r'(USER-IP=[\'"])[^\'"]+', r'\1' + mask, masked_message)
masked_message = re.sub(r'(["\']client_secret["\']:\s*[\'"])[^\'"]+', r'\1' + mask, masked_message)
masked_message = re.sub(r'(client_secret=).+?(&|$|\|)', r'\1' + mask + r'\2', masked_message)
masked_message = re.sub(r'(\\*["\']user_ip\\*["\']:\\*["\']).+?(\\*["\'])', r'\1' + mask + r'\2', masked_message)
masked_message = re.sub(r'(["\'](?:nauth)*sig["\']: ["\'])[^\'"]+', r'\1' + mask, masked_message)
return masked_message
def _add_leader(message):
if xbmc:
message = 'script.module.python.twitch: %s' % message
return message
def prep_log_message(message):
message = copy.deepcopy(message)
message = _mask(message)
message = _add_leader(message)
return message
class Log:
def __init__(self):
if xbmc:
self._log = xbmc.log
else:
self._log = logging.getLogger('twitch')
self._log.addHandler(NullHandler())
def info(self, message):
message = prep_log_message(message)
if xbmc:
self._log(message, xbmc.LOGINFO)
else:
self._log.info(message)
def debug(self, message):
message = prep_log_message(message)
if xbmc:
self._log(message, xbmc.LOGDEBUG)
else:
self._log.debug(message)
def warning(self, message):
message = prep_log_message(message)
if xbmc:
self._log(message, xbmc.LOGWARNING)
else:
self._log.debug(message)
def error(self, message):
message = prep_log_message(message)
if xbmc:
self._log(message, xbmc.LOGERROR)
else:
self._log.error(message)
def critical(self, message):
message = prep_log_message(message)
if xbmc:
self._log(message, xbmc.LOGFATAL)
else:
self._log.critical(message)
def deprecated_query(self, old, new=None):
if new:
self.warning('DEPRECATED call to |{0}| detected, please use |{1}| instead'.format(old, new))
else:
self.warning('DEPRECATED call to |{0}| detected, no alternatives available'.format(old))
def deprecated_endpoint(self, old):
self.warning('DEPRECATED call to |{0}| endpoint detected'.format(old))
def deprecated_api_version(self, old, new, eol_date):
self.warning('API version |{0}| is deprecated, update to |{1}| by |{2}|'.format(old, new, eol_date))
log = Log()
|
Healthy and Gluten Free: The trouble with "Nightshades"
The nightshade family is often a problem for people with Celiac disease. Nightshades contain alkaloids which act as a natural pesticide for the plants. Many people can tolerate nightshades, but it's difficult for anyone with damaged intestines. The most common nightshades are tomatoes, potatoes, peppers, chili and egg plant (aubergine). There are theories that if Celiac disease is undiagnosed it often leads to extreme sensitivity and potentially leaky gut syndrome, where the toxins in nightshades can pass into the blood and cause a reaction. If this is the case then when the intestines recover there is a chance that alkaloids can be eaten again with no reaction.
Unfortunately potato flour is perhaps the savior of gluten free foods. It is what makes them work. You'll notice it in almost everything. To get a flavour and texture that can help to imitate wheat, potato flour is the magic ingredient. So for those with a problem with nightshades the downside is an upset, inflamed stomach and possibly other symptoms too.
Thanks for posting this! I have an issue with nightshades, as I'm learning.
Glad to hear that it's helpful.
|
"""
Copyright (c) 2012-2013 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django.db import models
from storageadmin.models import Pool
class PoolBalance(models.Model):
pool = models.ForeignKey(Pool)
status = models.CharField(max_length=10, default='started')
#django ztask uuid
tid = models.CharField(max_length=36, null=True)
message = models.CharField(max_length=1024, null=True)
start_time = models.DateTimeField(auto_now=True)
end_time = models.DateTimeField(null=True)
percent_done = models.IntegerField(default=0)
class Meta:
app_label = 'storageadmin'
|
WASHINGTON, D.C., Dec. 6, 2017 -- Microwave ovens are often a fast way of warming food and have become a staple cooking appliance in both household kitchens and restaurants alike. If you have looked closely at the microwave's warnings or have experienced an accidental explosion, you know that certain foods pose a risk due to an increase in their internal pressure. Potatoes and hard-boiled eggs are among the most common culprits of potentially dangerous explosions. While both potatoes and eggs might explode, their mechanisms of bursting are different.
Anthony Nash and Lauren von Blohn, from Charles M. Salter Associates, will present their research on the sound pressures generated by exploding eggs at the 174th Meeting of the Acoustical Society of America, being held Dec. 4-8, 2017, in New Orleans, Louisiana.
Nash and von Blohn explored the mechanism of exploding eggs as part of expert witness testimony for litigation where a plaintiff allegedly suffered severe burns and hearing damage at a restaurant after a microwaved hard-boiled egg exploded in his mouth.
"We needed to quantify the peak sound pressures from an exploding egg so we could compare it to hearing damage risk criteria," said Nash. "At one foot away, the peak sound pressure levels from microwaved eggs covered a wide range from 86 up to 133 decibels. Only 30% of the tested eggs survived the microwave heating cycle and exploded when pierced by a sharp object. On a statistical basis, the likelihood of an egg exploding and damaging someone's hearing is quite remote. It's a little bit like playing egg roulette."
Because there was little scientific literature on the subject, the investigators initially took an unorthodox approach by reviewing YouTube's collection of microwave explosions.
"Those experiments had been done by non-scientists who were casually detonating eggs in a microwave," said Nash. Since their experiments seemed to be more for personal entertainment than for scientific exploration, they did not control for a number of important variables, including measurement of sound levels or internal temperatures, or documentation of the various kinds and sizes of eggs.
For Nash and von Blohn's experiments, they did account for these variables, which were highly controlled. First, selected hard-boiled eggs were placed in a water bath and heated for three minutes, and the temperature of the water bath was then measured both at the middle and end of the heating cycle. Finally, the eggs were removed from the water bath, placed on the floor and pierced with a fast-acting meat thermometer to induce an explosion.
"For both the exploded eggs and eggs that didn't explode, we would probe the inside of the yolk with the thermometer," said Nash. "We discovered that the yolk's temperature was consistently higher than the surrounding water bath."
The implication is that the egg yolk is more receptive to microwave radiation than is pure water (water constitutes about half the weight of an egg yolk). The duo hypothesized that the egg's protein matrix traps small pockets of water within the yolk, causing the pockets to superheat well above the nominal boiling temperature of ordinary tap water. When these superheated pockets are disturbed by a penetrating device, or if one attempts to bite into the egg yolk, the water pockets all boil in a furious chain reaction leading to an explosion-like phenomenon.
Applications of this research may extend past the obvious warnings by manufacturers of microwave ovens and contribute to the growing understanding of impulsive sound sources that cause hearing damage.
We will grant free registration to credentialed journalists and professional freelance journalists. If you are a reporter and would like to attend, contact Julia Majors (jmajors@aip.org, 301-209-3090), who can also help with setting up interviews and obtaining images, sound clips or background information.
A press briefing featuring will be webcast live from the conference Tuesday, Dec. 5, 2017, in room Studio 1 at the New Orleans Marriott. Time to be announced. Register at https://www1.webcastcanada.ca/webcast/registration/asa617.php to watch the live webcast.
|
from django import template
import random
register = template.Library()
@register.filter
def shuffle(arg):
aux = list(arg)[:]
random.shuffle(aux)
return aux
@register.assignment_tag(takes_context=True)
def get_request_parameters(context, exclude=None):
"""Returns a string with all the request parameters except the exclude
parameter."""
params = ''
request = context['request']
for key, value in request.GET.items():
if key != exclude:
params += '&{key}={value}'.format(key=key, value=value)
return params
@register.assignment_tag(takes_context=True)
def get_site_root(context):
"""Returns the site root Page, not the implementation-specific model used.
Object-comparison to self will return false as objects would differ.
:rtype: `wagtail.wagtailcore.models.Page`
"""
return context['request'].site.root_page
@register.inclusion_tag('cms/tags/main_menu.html', takes_context=True)
def main_menu(context, root, current_page=None):
"""Returns the main menu items, the children of the root page. Only live
pages that have the show_in_menus setting on are returned."""
menu_pages = root.get_children().live().in_menu()
root.active = (current_page.url == root.url
if current_page else False)
for page in menu_pages:
page.active = (current_page.url.startswith(page.url)
if current_page else False)
return {'request': context['request'], 'root': root,
'current_page': current_page, 'menu_pages': menu_pages}
@register.inclusion_tag('cms/tags/footer_menu.html', takes_context=True)
def footer_menu(context, root, current_page=None):
"""Returns the main menu items, the children of the root page. Only live
pages that have the show_in_menus setting on are returned."""
menu_pages = root.get_children().live().in_menu()
root.active = (current_page.url == root.url
if current_page else False)
for page in menu_pages:
page.active = (current_page.url.startswith(page.url)
if current_page else False)
return {'request': context['request'], 'root': root,
'current_page': current_page, 'menu_pages': menu_pages}
|
It’s a scary day for S.H.Figuarts Mario figures everywhere… If he wasn’t traumatized enough by the enemies from our last Diorama Play Set D, he will when these formidable fellows come around. Introducing the ever menacing Diorama Play Set E! Neither the ground nor the sky is safe with Hammer Bro, Spiny and Lakitu coming your way. This set also includes a display stage and support parts.
£37.99 Estimated Release: June 2019.
£30.99 Estimated Release: June 2019.
£43.99 Estimated Release: June 2019.
£124.99 Estimated Release: June 2019.
|
#
# Copyright 2001 - 2006 Ludek Smid [http://www.ospace.net/]
#
# This file is part of IGE - Outer Space.
#
# IGE - Outer Space is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# IGE - Outer Space is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with IGE - Outer Space; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
import pygameui as ui
from osci import client, gdata, res
class ChangeQtyDlg:
def __init__(self, app):
self.app = app
self.createUI()
def display(self, quantity, confirmAction = None):
self.confirmAction = confirmAction
self.win.vQuantity.text = str(quantity)
self.win.show()
self.app.setFocus(self.win.vQuantity)
# register for updates
if self not in gdata.updateDlgs:
gdata.updateDlgs.append(self)
def hide(self):
self.win.setStatus(_("Ready."))
self.win.hide()
# unregister updates
if self in gdata.updateDlgs:
gdata.updateDlgs.remove(self)
def update(self):
self.show()
def onOK(self, widget, action, data):
try:
self.quantity = int(self.win.vQuantity.text)
except ValueError:
self.win.setStatus(_("Specify quantity (1, 2, 3, ...)."))
return
self.hide()
if self.confirmAction:
self.confirmAction()
def onCancel(self, widget, action, data):
self.quantity = None
self.hide()
def createUI(self):
w, h = gdata.scrnSize
self.win = ui.Window(self.app,
modal = 1,
escKeyClose = 1,
movable = 0,
title = _('Change quantity'),
rect = ui.Rect((w - 264) / 2, (h - 104) / 2, 264, 104),
layoutManager = ui.SimpleGridLM(),
)
# creating dialog window
self.win.subscribeAction('*', self)
ui.Label(self.win,
text = _("New quantity:"),
align = ui.ALIGN_E,
layout = (0, 1, 6, 1)
)
ui.Entry(self.win, id = 'vQuantity',
align = ui.ALIGN_W,
layout = (6, 1, 3, 1),
#text = self.quantity,
)
ui.Title(self.win, layout = (0, 3, 3, 1))
ui.TitleButton(self.win, layout = (3, 3, 5, 1), text = _("Cancel"), action = "onCancel")
okBtn = ui.TitleButton(self.win, layout = (8, 3, 5, 1), text = _("OK"), action = 'onOK')
self.win.acceptButton = okBtn
|
Best match results for dancing classes & lessons in Canada + 30km.
CanadianPartyPlanning is creating the most comprehensive online marketplace in Canada for party and event planning including experts and professionals for all types of parties, celebration, events, weddings and birthdays. If you are a business providing dancing classes & lessons, please add your details to CanadianPartyPlanning.
|
import binascii
import os
from uuid import UUID
import txaio
txaio.use_twisted()
from autobahn.twisted.component import Component, run
from autobahn.twisted.util import sleep
from autobahn.twisted.xbr import SimpleSeller
from autobahn.wamp.types import PublishOptions
market_maker_adr = os.environ.get('XBR_MARKET_MAKER_ADR', '0x3e5e9111ae8eb78fe1cc3bb8915d5d461f3ef9a9')
print('market_maker_adr', market_maker_adr)
market_maker_adr = binascii.a2b_hex(market_maker_adr[2:])
seller_priv_key = os.environ.get('XBR_SELLER_PRIVKEY', '0xadd53f9a7e588d003326d1cbf9e4a43c061aadd9bc938c843a79e7b4fd2ad743')
print('seller_priv_key', seller_priv_key)
seller_priv_key = binascii.a2b_hex(seller_priv_key[2:])
comp = Component(
transports=os.environ.get('XBR_INSTANCE', 'ws://edge1:8080/ws'),
realm=os.environ.get('XBR_REALM', 'realm1'),
extra={
'market_maker_adr': market_maker_adr,
'seller_privkey': seller_priv_key,
}
)
running = False
@comp.on_join
async def joined(session, details):
print('Seller session joined', details)
global running
running = True
# market_maker_adr = binascii.a2b_hex(session.config.extra['market_maker_adr'][2:])
market_maker_adr = session.config.extra['market_maker_adr']
print('Using market maker adr:', session.config.extra['market_maker_adr'])
# seller_privkey = binascii.a2b_hex(session.config.extra['seller_privkey'][2:])
seller_privkey = session.config.extra['seller_privkey']
api_id = UUID('627f1b5c-58c2-43b1-8422-a34f7d3f5a04').bytes
topic = 'io.crossbar.example'
counter = 1
seller = SimpleSeller(market_maker_adr, seller_privkey)
price = 35 * 10 ** 18
interval = 10
seller.add(api_id, topic, price, interval, None)
balance = await seller.start(session)
balance = int(balance / 10 ** 18)
print("Remaining balance: {} XBR".format(balance))
while running:
payload = {'data': 'py-seller', 'counter': counter}
key_id, enc_ser, ciphertext = await seller.wrap(api_id,
topic,
payload)
pub = await session.publish(topic, key_id, enc_ser, ciphertext,
options=PublishOptions(acknowledge=True))
print('Published event {}: {}'.format(pub.id, payload))
counter += 1
await sleep(1)
@comp.on_leave
def left(session, details):
print('Seller session left', details)
global running
running = False
if __name__ == '__main__':
run([comp])
|
As a restaurant manager or operator, you know that food cost is one of your biggest challenges. You’re under continuing pressure to keep this expense as low as possible, despite external forces — such as weather events that affect crops and can drive up prices — that are well beyond your control. The good news, however, is that there’s something you can control: choosing a solid inventory management solution to get a grip on your food costs. Learn why this technology has become indispensable for many restaurants today.
Take a good look at your sales history. This is the jumping off point to understand exactly what is happening with the inventory coming through your establishment and the menu items emerging from your kitchen. Perhaps the chicken parm sells out every time but the seared tuna, not so much.
By understanding trends and sales patterns, you can take corrective measures to right-size your inventory. This will help to reduce costs by eliminating as much food waste as possible. If you have too much of an ingredient on hand, you may have to mark down a particular dish or otherwise risk spoilage and waste. And if you have too little of an ingredient used in a popular dish, you could be missing out on sales.
Many restaurants use an inventory management solution to simplify the daily task of taking stock of the ingredients on hand. Instead of manually reviewing the contents of your fridge, freezer and pantry, tap into your inventory management system to get a quick status of your ingredients. With these insights, you can quickly fill in any holes to avoid lost revenue for the day.
Automating your processes is perhaps the secret weapon for reducing food cost. This is where an inventory management solution really does the heavy lifting. When you automate your processes, you save time, maximize labor output, boost productivity and of course — start to turn food waste around.
Take advantage of an inventory management solution’s wealth of features. Reduce the burden on you and your staff by programming automatic ordering, which leverages up-to-the-minute sales and inventory data to determine when a particular ingredient needs to be replenished.
Another way to decrease food waste is to improve order accuracy. For example, a kitchen display system with a user-friendly screen and interface improves order communication, preventing some of the human errors that occur when illegible handwriting is involved. Wait staff equipped with tablets can take orders more quickly and effectively and send them to the kitchen while still tableside. And restaurants that choose self-service kiosks allow customers to customize their orders and translate them seamlessly to the kitchen, circumventing anything getting “lost in translation.” All of these processes reduce errors and decrease food waste. Automating these processes also provides an opportunity to integrate them with your inventory management system, enabling you to have a real-time view of inventory and place orders as stock falls below a certain amount.
If you haven’t yet deployed an inventory management solution, you’re missing out on the opportunity to reduce food waste, boost productivity and simplify your operations. What is it costing you to not have tight control over inventory and operations?
|
# -*- coding: UTF-8 -*-
# Copyright 2014-2015 Luc Saffre
# This file is part of Lino Welfare.
#
# Lino Welfare is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Lino Welfare is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with Lino Welfare. If not, see
# <http://www.gnu.org/licenses/>.
"""
"""
from __future__ import unicode_literals
from __future__ import print_function
import logging
logger = logging.getLogger(__name__)
from django.db import models
from django.utils.translation import ugettext_lazy as _
from lino.api import dd
from lino import mixins
config = dd.plugins.badges
class Badge(mixins.BabelNamed):
class Meta:
verbose_name = _("Badge")
verbose_name_plural = _("Badges")
class Badges(dd.Table):
model = 'badges.Badge'
required_roles = dd.login_required(dd.SiteStaff)
class Award(dd.Model):
class Meta:
verbose_name = _("Badge Award")
verbose_name_plural = _("Badge Awards")
holder = dd.ForeignKey(
config.holder_model,
verbose_name=_("Holder"))
badge = dd.ForeignKey('badges.Badge')
date = models.DateField(
_("Date"), default=dd.today)
result = models.CharField(
_("Result"),
blank=True, max_length=200)
remark = models.CharField(
_("Remark"),
blank=True, max_length=200)
class Awards(dd.Table):
model = 'badges.Award'
required_roles = dd.login_required(dd.SiteStaff)
class AwardsByHolder(Awards):
label = _("Awards")
required_roles = dd.login_required()
master_key = 'holder'
column_names = 'date badge result remark'
auto_fit_column_widths = True
class AwardsByBadge(Awards):
label = _("Awards")
required_roles = dd.login_required()
master_key = 'badge'
column_names = 'date holder result remark'
auto_fit_column_widths = True
|
East Africa’s leading apparel and household goods retailers, Deacons East Africa (EA) posted a net loss after tax of KSh180.4 million for the first half of 2017 from KSh52.6 million reported last year.
Deacons’ revenue increased five per cent to KSh1.07 billion compared to Sh1.03 billion in a similar period last year, while net operating profit shrunk by 32 per cent.
Deacons EA chief executive Muchiri Wahome attributed the loss to Kenya’s elections which reduced consumer demand and spending.
It should be noted that Centum Investments is set to buy a 5.53 per cent stake in the firm following an agreement to purchase Aureos East Africa Fund’s entire stake.
Deacons has more than 40 stores spread across the East African region including Kenya, Rwanda, Uganda, and Mauritius. It has also been going through financial turmoil with an operating loss of Ksh 385 million for the 2016 Financial Year from an operating profit of Ksh141.6 million in the 2015 Financial Year.
|
'''
Rogentos IrcBot: Irc Bot
Copyright (C) Blaga Florentin Gabriel
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
#! /usr/bin/env Python
import socket
import sys
import datetime
from time import sleep
root_admins = ["bgs", "blacknoxis"]
class BotCore:
def __init__(self, host, port, nick, channel, password=""):
self.irc_host = host
self.irc_port = port
self.irc_nick = nick
self.irc_channel = channel
self.joined_channels = []
self.irc_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.is_connected = False
self.reconnect = False
self.command = ""
self.connect()
def connect(self):
self.reconnect = True
try:
self.irc_sock.connect (( self.irc_host, self.irc_port))
except:
print ("Error: Could not connect to IRC; Host: %s Port: %s" % (self.irc_host, self.irc_port))
print ("Connected to: %s:%s" %(self.irc_host,self.irc_port))
self.irc_sock.send("USER %s %s %s :This bot belongs to BGS.\n" % (self.irc_nick, self.irc_nick, self.irc_nick))
self.irc_sock.send("NICK %s\n" % self.irc_nick)
self.irc_sock.send("JOIN %s\n" % self.irc_channel)
self.is_connected = True
self.listen()
def listen(self):
while self.is_connected:
recv = self.irc_sock.recv(4096)
recv = recv.strip('\n\r')
print recv
if str(recv).find ("PING") != -1:
self.irc_sock.send("PONG :pingis\n")
if str(recv).find ("JOIN") != -1:
irc_user_nick = str(recv).split()[0].split('!')[0].split(':')[1]
channel = str(recv).split()[2]
if channel == self.irc_channel and irc_user_nick != self.irc_nick:
self.send_message_to_channel("""Bine ai venit %s pe canalul %s! \n""" % (irc_user_nick, channel) , channel)
if str(recv).find ("PRIVMSG") != -1:
self.irc_user_nick = str(recv).split()[0].split('!')[0].split(':')[1]
irc_user_message = self.message_to_data(str(recv))
print ( self.irc_user_nick + ": " + ''.join(irc_user_message))
try:
if (''.join(irc_user_message)[0] == "."):
if str(recv).split()[2] == self.irc_nick:
self.command = ''.join(irc_user_message)[1:]
self.process_command(self.irc_user_nick.lower(), self.irc_channel)
else:
self.command = ''.join(irc_user_message)[1:]
self.process_command(self.irc_user_nick.lower(), ((str(recv)).split()[2]))
except IndexError:
pass
if self.reconnect:
self.connect()
def message_to_data(self, message):
data = message.split()
data = ' '.join(data[3:]).split(':')[1:]
return data
def send_message_to_channel(self,message,channel):
print (( "%s: %s") % (self.irc_nick, message))
self.irc_sock.send( (("PRIVMSG %s :%s\r\n") % (channel, message)).encode() )
def process_command(self, user, channel):
if (len(self.command.split()) == 0):
return
command = (self.command).lower()
command = command.split()
if (user in root_admins):
if (command[0] == 'help'):
self.send_message_to_channel("""Available Admin Only Commands:\n""", self.irc_user_nick)
sleep(0.5)
self.send_message_to_channel(""".jchs chan1 chan2 chan3 chan4 \n""", self.irc_user_nick)
sleep(0.5)
self.send_message_to_channel("""Join speciffied channels.\n""", self.irc_user_nick)
sleep(0.5)
self.send_message_to_channel(""".gmsg <message> \n""", self.irc_user_nick)
sleep(0.5)
self.send_message_to_channel("""Send a global message to joined channels! \n""", self.irc_user_nick)
sleep(0.5)
self.send_message_to_channel(""".say <message>""", self.irc_user_nick)
sleep(0.5)
self.send_message_to_channel("""Write message on channel.""", self.irc_user_nick)
if (command[0] == "say"):
self.send_message_to_channel( ' '.join(command[1:]), channel)
if (command[0] == 'jchs'):
channels = command[1:]
for c in channels:
self.irc_sock.send("JOIN %s\n" % c)
self.joined_channels.append(c)
if (command[0] == 'gmsg'):
for c in self.joined_channels:
self.send_message_to_channel( ' '.join(command[1:]), c )
if (command[0] == "hello"):
self.send_message_to_channel("Hello to you too, %s . Today is : %s" % (user, datetime.datetime.now().strftime("%Y-%m-%d %H:%M")), channel)
if (command[0] == "help"):
self.send_message_to_channel("""Available Unprivileged Commands:\n""", self.irc_user_nick)
self.send_message_to_channel(""".hello""", self.irc_user_nick)
self.send_message_to_channel("""Say hi!""", self.irc_user_nick)
if (command[0] == "owner"):
self.send_message_to_channel("""I belong to %s.""" % root_admins[0] , channel)
if __name__ == '__main__':
BotCore("irc.freenode.net", 6667, "DarthNoxis", "#rogentos-dezvoltare")
|
Your product not on there? Not a problem –we can probably still pack it! Just get in touch to find out.
|
from dependencies.dependency import ClassSecurityInfo
from dependencies.dependency import schemata
from dependencies import atapi
from dependencies.dependency import registerType
from dependencies.dependency import getToolByName
from lims.browser.bika_listing import BikaListingView
from lims.config import PROJECTNAME
from lims import bikaMessageFactory as _
from lims.utils import t
from lims.content.bikaschema import BikaFolderSchema
from lims.interfaces import IContainerTypes
from dependencies.dependency import IViewView
from dependencies.dependency import IFolderContentsView
from dependencies.folder import ATFolder, ATFolderSchema
from dependencies.dependency import implements
from operator import itemgetter
class ContainerTypesView(BikaListingView):
implements(IFolderContentsView, IViewView)
def __init__(self, context, request):
super(ContainerTypesView, self).__init__(context, request)
self.catalog = 'bika_setup_catalog'
self.contentFilter = {'portal_type': 'ContainerType',
'sort_on': 'sortable_title'}
self.context_actions = {_('Add'):
{'url': 'createObject?type_name=ContainerType',
'icon': '++resource++bika.lims.images/add.png'}}
self.title = self.context.translate(_("Container Types"))
self.icon = self.portal_url + "/++resource++bika.lims.images/container_big.png"
self.description = ""
self.show_sort_column = False
self.show_select_row = False
self.show_select_column = True
self.pagesize = 25
self.columns = {
'Title': {'title': _('Container Type'),
'index':'sortable_title'},
'Description': {'title': _('Description'),
'index': 'description',
'toggle': True},
}
self.review_states = [
{'id':'default',
'title': _('Active'),
'contentFilter': {'inactive_state': 'active'},
'transitions': [{'id':'deactivate'}, ],
'columns': ['Title',
'Description']},
{'id':'inactive',
'title': _('Dormant'),
'contentFilter': {'inactive_state': 'inactive'},
'transitions': [{'id':'activate'}, ],
'columns': ['Title',
'Description']},
{'id':'all',
'title': _('All'),
'contentFilter':{},
'columns': ['Title',
'Description']},
]
def folderitems(self):
items = BikaListingView.folderitems(self)
for x in range(len(items)):
if not items[x].has_key('obj'): continue
obj = items[x]['obj']
items[x]['Description'] = obj.Description()
items[x]['replace']['Title'] = "<a href='%s'>%s</a>" % \
(items[x]['url'], items[x]['Title'])
return items
schema = ATFolderSchema.copy()
class ContainerTypes(ATFolder):
implements(IContainerTypes)
displayContentsTab = False
schema = schema
schemata.finalizeATCTSchema(schema, folderish = True, moveDiscussion = False)
atapi.registerType(ContainerTypes, PROJECTNAME)
|
E xperiential marketing goes far beyond just a brand activation or event. The idea of “experiential” can be (and should be) implemented in every touchpoint of a consumer experience with any brand, whether it be a restaurant or a car shop. However, how do you define a touchpoint and what exactly makes something be experiential?
Have you ever been to a car repair or car wash where you didn’t want to touch a single thing in sight? After that, you may have tried another place where they greeted you warmly then invited you inside to wait for your car, where you had cozy modern furniture to sit on, a coffee machine with snacks, a play area for your kids and an option to be dropped off somewhere for the long waits. What a completely different experience, right? After all, both places are giving the same service of washing or repairing your car, but which one would you go back to next time?
There is not a specific set of standards to make something “experiential.” Creating an experience for your customer varies based on what your brand, product or service is and how your customer interacts with it. An experiential interaction for an online brand will be hugely different than an experiential interaction for a hospitality or retail brand.
One way to consider how to create an experience for your customer rather than just an interaction or transaction is to define your customer touchpoints and create an enhanced experience at each one. A little “cheat sheet” for creating an experience is stimulating as many of the five senses as you can with each interaction – smell, sight, sound, touch and taste. A touchpoint is the different ways your customer interacts with you or the brand through their purchase journey. A purchase journey is not just the actual purchase of a product or service but rather what led the customer to the product or service, the actual purchase or experience with it, and the interaction after the sale or transaction was made.
One way to illustrate customer touchpoints and experiential interactions is to show an example of someone who absolutely killed it with experiential with their brand.
I would like to introduce you to one of my favorite restaurants in Cabo called Acre. It also provides lodging in tree houses and a spa, but for most people, it is known as a must-try farm-to-table restaurant. Below I will illustrate the different touchpoints in the dining experience followed by how they created experiential components in each one. Remember – a touchpoint is your customer’s interaction with your brand through different parts of the purchase (in this case dining) journey.
After a long drive and a few winding dirt roads up through a desert hill, we finally pulled up to this highly recommended restaurant. If you have ever watched “Breaking Bad,” it somewhat reminded me of a random house in the desert that cannot possibly have anything good inside of it. We certainly wondered what the hype was about and whether we were overdressed or not. We parked the car and found our way around a cemented wall into a narrow walkway. That is when the experience really started. The long desert drive was part of the shock and delight experience we were about to have.
The narrow walkway was covered with lush trees and greenery everywhere. The air smelled fresh and tropical from the plants, and beautiful lighting lead us down the right pathway. There were smaller dirt walkways off the main one that led into a little “jungle,” but we figured we should keep following the main one or else we would be totally lost. We learned that the smaller dirt walkways take you to the tree-house bungalows for overnight guests – how cool!
Beautifully lit, yet still dark and sultry, walkways led you from the entrance to the restaurant.
The walk to the main entrance was long but very mesmerizing, building excitement for what else was in store for us. Although the restaurant/hotel is very popular, there was certainly a sense of serenity and peace. You could not see the tree-house bungalows or really any people while walking in, so it made for a cool solo experience. Once we approached the main entrance with a grandiose wall and a well-lit stairway, we were greeted by a friendly smile from the hotel hostesses who showed us the way to the restaurant. The restaurant was much more brightly lit than the hotel and walkway. It was also buzzing with energy from a packed house of guests. The smell inside was also very pleasant – not like food but rather an inviting, fresh perfumed aroma.
The hostess walked us to our table where we saw the different sitting areas of the restaurant, each with its own décor and energy, from the bar to the dining room to the brick oven area. There was live music from a musician on stage closer to the bar, and we had a view of the bright big fire in the brick oven and a peak into the open kitchen just across the room from us. To the right of our table was more lush greenery that we went to explore later.
While our table was in a more intimate area, the musician could still be heard as we peeked at him through the decor.
Senses explored and stimulated: smell, sight and sound.
Once we took in all our surroundings, we were hooked and loved this place already, but the experience at the table seriously knocked my marketing geek socks off! For starters, the farm-to-table restaurant had fresh picked wildflowers at each table centerpiece – adorable. It certainly makes you think your food will be freshly picked too. Each table also had a bottle of filtered fresh water set, which is a hot commodity in Mexico considering that tap water is not drinkable (and the bottled water is never free!).
My favorite part, however, was the menus. As a restaurant marketer by experience, I appreciated the detail that went into their menus. Still illustrating the whole organic and farm-to-table thing, the menu boards were made of wood with their logo burned into the back. The menu itself was printed daily on paper to feature the changing menu, and the cocktail menu was straight on wood with each word and detail burned into the wood.
The drink coaster was also the same wood with cork on top, and the napkins were a beautiful gray linen. Everything in the table décor looked organic, just like their menu presents itself to be. It was a beautiful presentation.
Adorable table setting, complete with differentiated menus and wildflowers that fit their theme.
Sometimes even the most common things can have small touches and details that set it apart from the norm – THAT is what building an experience is all about. For example, the bread basket was served to us in a cute basket with a napkin and aromatic fresh herbs surrounding the bread.
Even the bread basket presentation offered something new and unique, stimulating multiple senses.
The cocktail I ordered was served with a burning lime half that was theatrically lit as the drink was presented – a feast for the eyes! The lime was then removed before the drink was handed over…safety first!
ACRE created an experience with the cocktail presentation instead of just dropping it on the table.
The entrée was served artistically on a wooden cutting board and arranged perfectly. The pita bread was on the side, the fresh, garden-grown veggies were in the middle, and the juicy protein on the other side. It not only looked gorgeous, but it SMELLED and TASTED delicious. The presentation also made it perfect for sharing (rather than having it on traditional plates).
This kind of presentation encourages sharing, another key component to a restaurant experience.
Senses explored and stimulated: sight, smell and taste.
Post dinner, we decided to check out the grounds. Everywhere you walk you see lush greenery coupled with romantically lit walkways. The first stop was to the bathroom, which did NOT disappoint. It felt like a jungle outhouse that was better than your best powder room at home. You entered through a cement wall/walkway to a ceiling made of wooden sticks and hanging vines.
Not a single detail was missing, not even in the bathrooms, to create a full 360 degree experience at ACRE.
The agave plants planted around the grounds are the largest I have ever seen – almost as tall as me! They fit in perfectly with the tequila I drank all night and the Mexican surroundings we came to see! Of course, I had to touch them and get plenty of pictures with them.
The agave plants were as tall as me, and they were EVERYWHERE. Is it bad that I kept touching them?
As we made our way around, we ran into a gorgeous open space filled with mango trees – yes, fresh mangos! All I could think about is the awesome events I could throw for someone there. In case you are wondering, I am indeed keeping a list of awesome venues for incredible, out of the ordinary, events in the future!
The grounds were gorgeous, and I can only imagine the potential of fabulous events among the mango trees!
Senses explored and stimulated: sight and touch (and the fresh air and lush plants smell!).
The key to creating an experience, or making your marketing experiential rather than traditional, is to build as many of the five senses into every customer touchpoint as they take a journey with your brand or product.
As a refresh, an experience stimulates as many of the five senses as possible with each interaction – smell, sight, sound, touch and taste – to create an association or memory to the consumer.
A touchpoint is the different ways your customer interacts with you or the brand through their purchase journey.
A purchase journey is what led the customer to the product or service, the actual purchase or experience with it, and the interaction after the sale or transaction was made. Touching on every part of that journey from the beginning (whether it started with a social media ad or by stumbling upon it physically) to the end with experiential elements will differentiate your brand from the rest, much like Acre stood out to us from many other nice restaurants in Cabo!
If you are interested in learning more about creating a memorable trip or event, please reach out on our contact form. Thank you to Acre for the amazing dining experience!
|
# import gc
# import os
# import hashlib
# import inspect
# import shutil
# import zipfile
# from juju.errors import CharmError, FileNotFound
# from juju.charm.errors import InvalidCharmFile
# from juju.charm.metadata import MetaData
# from juju.charm.directory import CharmDirectory
# from juju.charm.bundle import CharmBundle
# from juju.lib import serializer
# from juju.lib.filehash import compute_file_hash
# from juju.charm import tests
# from juju.charm.tests.test_repository import RepositoryTestBase
# sample_directory = os.path.join(
# os.path.dirname(
# inspect.getabsfile(tests)), "repository", "series", "dummy")
# class DirectoryTest(RepositoryTestBase):
# def setUp(self):
# super(DirectoryTest, self).setUp()
# # Ensure the empty/ directory exists under the dummy sample
# # charm. Depending on how the source code is exported,
# # empty directories may be ignored.
# empty_dir = os.path.join(sample_directory, "empty")
# if not os.path.isdir(empty_dir):
# os.mkdir(empty_dir)
# def copy_charm(self):
# dir_ = os.path.join(self.makeDir(), "sample")
# shutil.copytree(sample_directory, dir_)
# return dir_
# def delete_revision(self, dir_):
# os.remove(os.path.join(dir_, "revision"))
# def set_metadata_revision(self, dir_, revision):
# metadata_path = os.path.join(dir_, "metadata.yaml")
# with open(metadata_path) as f:
# data = serializer.yaml_load(f.read())
# data["revision"] = 999
# with open(metadata_path, "w") as f:
# f.write(serializer.yaml_dump(data))
# def test_metadata_is_required(self):
# directory = self.makeDir()
# self.assertRaises(FileNotFound, CharmDirectory, directory)
# def test_no_revision(self):
# dir_ = self.copy_charm()
# self.delete_revision(dir_)
# charm = CharmDirectory(dir_)
# self.assertEquals(charm.get_revision(), 0)
# with open(os.path.join(dir_, "revision")) as f:
# self.assertEquals(f.read(), "0\n")
# def test_nonsense_revision(self):
# dir_ = self.copy_charm()
# with open(os.path.join(dir_, "revision"), "w") as f:
# f.write("shifty look")
# err = self.assertRaises(CharmError, CharmDirectory, dir_)
# self.assertEquals(
# str(err),
# "Error processing %r: invalid charm revision 'shifty look'" % dir_)
# def test_revision_in_metadata(self):
# dir_ = self.copy_charm()
# self.delete_revision(dir_)
# self.set_metadata_revision(dir_, 999)
# log = self.capture_logging("juju.charm")
# charm = CharmDirectory(dir_)
# self.assertEquals(charm.get_revision(), 999)
# self.assertIn(
# "revision field is obsolete. Move it to the 'revision' file.",
# log.getvalue())
# def test_competing_revisions(self):
# dir_ = self.copy_charm()
# self.set_metadata_revision(dir_, 999)
# log = self.capture_logging("juju.charm")
# charm = CharmDirectory(dir_)
# self.assertEquals(charm.get_revision(), 1)
# self.assertIn(
# "revision field is obsolete. Move it to the 'revision' file.",
# log.getvalue())
# def test_set_revision(self):
# dir_ = self.copy_charm()
# charm = CharmDirectory(dir_)
# charm.set_revision(123)
# self.assertEquals(charm.get_revision(), 123)
# with open(os.path.join(dir_, "revision")) as f:
# self.assertEquals(f.read(), "123\n")
# def test_info(self):
# directory = CharmDirectory(sample_directory)
# self.assertTrue(directory.metadata is not None)
# self.assertTrue(isinstance(directory.metadata, MetaData))
# self.assertEquals(directory.metadata.name, "dummy")
# self.assertEquals(directory.type, "dir")
# def test_make_archive(self):
# # make archive from sample directory
# directory = CharmDirectory(sample_directory)
# f = self.makeFile(suffix=".charm")
# directory.make_archive(f)
# # open archive in .zip-format and assert integrity
# from zipfile import ZipFile
# zf = ZipFile(f)
# self.assertEqual(zf.testzip(), None)
# # assert included
# included = [info.filename for info in zf.infolist()]
# self.assertEqual(
# set(included),
# set(("metadata.yaml", "empty/", "src/", "src/hello.c",
# "config.yaml", "hooks/", "hooks/install", "revision")))
# def test_as_bundle(self):
# directory = CharmDirectory(self.sample_dir1)
# charm_bundle = directory.as_bundle()
# self.assertEquals(type(charm_bundle), CharmBundle)
# self.assertEquals(charm_bundle.metadata.name, "sample")
# self.assertIn("sample-1.charm", charm_bundle.path)
# total_compressed = 0
# total_uncompressed = 0
# zip_file = zipfile.ZipFile(charm_bundle.path)
# for n in zip_file.namelist():
# info = zip_file.getinfo(n)
# total_compressed += info.compress_size
# total_uncompressed += info.file_size
# self.assertTrue(total_compressed < total_uncompressed)
# def test_as_bundle_file_lifetime(self):
# """
# The temporary bundle file created should have a life time
# equivalent to that of the directory object itself.
# """
# directory = CharmDirectory(self.sample_dir1)
# charm_bundle = directory.as_bundle()
# gc.collect()
# self.assertTrue(os.path.isfile(charm_bundle.path))
# del directory
# gc.collect()
# self.assertFalse(os.path.isfile(charm_bundle.path))
# def test_compute_sha256(self):
# """
# Computing the sha256 of a directory will use the bundled
# charm, since the hash of the file itself is needed.
# """
# directory = CharmDirectory(self.sample_dir1)
# sha256 = directory.compute_sha256()
# charm_bundle = directory.as_bundle()
# self.assertEquals(type(charm_bundle), CharmBundle)
# self.assertEquals(compute_file_hash(hashlib.sha256,
# charm_bundle.path),
# sha256)
# def test_as_bundle_with_relative_path(self):
# """
# Ensure that as_bundle works correctly with relative paths.
# """
# current_dir = os.getcwd()
# os.chdir(self.sample_dir2)
# self.addCleanup(os.chdir, current_dir)
# charm_dir = "../%s" % os.path.basename(self.sample_dir1)
# directory = CharmDirectory(charm_dir)
# charm_bundle = directory.as_bundle()
# self.assertEquals(type(charm_bundle), CharmBundle)
# self.assertEquals(charm_bundle.metadata.name, "sample")
# def test_charm_base_inheritance(self):
# """
# get_sha256() should be implemented in the base class,
# and should use compute_sha256 to calculate the digest.
# """
# directory = CharmDirectory(self.sample_dir1)
# bundle = directory.as_bundle()
# digest = compute_file_hash(hashlib.sha256, bundle.path)
# self.assertEquals(digest, directory.get_sha256())
# def test_as_directory(self):
# directory = CharmDirectory(self.sample_dir1)
# self.assertIs(directory.as_directory(), directory)
# def test_config(self):
# """Validate that ConfigOptions are available on the charm"""
# from juju.charm.tests.test_config import sample_yaml_data
# directory = CharmDirectory(sample_directory)
# self.assertEquals(directory.config.get_serialization_data(),
# sample_yaml_data)
# def test_file_type(self):
# charm_dir = self.copy_charm()
# os.mkfifo(os.path.join(charm_dir, "foobar"))
# directory = CharmDirectory(charm_dir)
# e = self.assertRaises(InvalidCharmFile, directory.as_bundle)
# self.assertIn("foobar' Invalid file type for a charm", str(e))
# def test_internal_symlink(self):
# charm_path = self.copy_charm()
# external_file = self.makeFile(content='baz')
# os.symlink(external_file, os.path.join(charm_path, "foobar"))
# directory = CharmDirectory(charm_path)
# e = self.assertRaises(InvalidCharmFile, directory.as_bundle)
# self.assertIn("foobar' Absolute links are invalid", str(e))
# def test_extract_symlink(self):
# charm_path = self.copy_charm()
# external_file = self.makeFile(content='lorem ipsum')
# os.symlink(external_file, os.path.join(charm_path, "foobar"))
# directory = CharmDirectory(charm_path)
# e = self.assertRaises(InvalidCharmFile, directory.as_bundle)
# self.assertIn("foobar' Absolute links are invalid", str(e))
|
Critically acclaimed singer, songwriter and storyteller Josh Wilson will release his fourth project with Sparrow Records, 'Carry Me' on 9th April 2013.
Sharing about a very personal health issue dealing with anxiety and panic attacks, his new songs ultimately spread the message that through life's difficult trials, it is only God who can carry us through it all.
"Philippians 4 says, 'Do not be anxious about anything... the peace of God will guard your hearts and minds in Christ Jesus,'" Wilson shares. "I prayed for that and each day would say, 'God, I can't make it without you. I need you to carry me.' And He did, and He still does. He carried me through each day even before I experienced the anxiety, but I just didn't realize it. It took these difficult moments for me to realize how much I need Jesus."
With production by Matt Bronleewe (Chris Tomlin), each of the dozen songs stand out for its rhythmic contributions coupled with the addition of strings and woodwinds to Wilson's assuring voice.
Josh Wilson is currently on the road with Third Day and Colton Dixon for The Miracle Tour.
|
# Copyright (c) 2014 by Ecreall under licence AGPL terms
# avalaible on http://www.gnu.org/licenses/agpl.html
# licence: AGPL
# author: Amen Souissi
from pyramid.view import view_config
from dace.processinstance.core import DEFAULTMAPPING_ACTIONS_VIEWS
from pontus.default_behavior import Cancel
from pontus.form import FormView
from pontus.schema import select
from novaideo.connectors.google.content.behaviors import CreateConnector
from novaideo.connectors.google import GoogleConnectorSchema, GoogleConnector
from novaideo.content.novaideo_application import NovaIdeoApplication
from novaideo import _
@view_config(
name='creategoogleconnector',
context=NovaIdeoApplication,
renderer='pontus:templates/views_templates/grid.pt',
)
class CreateGoogleConnectorView(FormView):
title = _('Add a Google connector')
schema = select(GoogleConnectorSchema(factory=GoogleConnector, editable=True),
['auth_conf'])
behaviors = [CreateConnector, Cancel]
formid = 'formcreategoogleconnector'
name = 'creategoogleconnector'
css_class = 'panel-transparent'
DEFAULTMAPPING_ACTIONS_VIEWS.update(
{CreateConnector: CreateGoogleConnectorView})
|
An absolutely incredible year Class 6. I am so proud of you all and wish you all the happiness and success that I know is coming your way. Dream big!
Although I am sad that I didn't get to see you all go off last night to start the next step in your journey, I know that you had a fabulous time and understand that I had a personal commitment that I had to attend.
|
# Copyright 1999-2012 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
from _emerge.SubProcess import SubProcess
import sys
from portage.cache.mappings import slot_dict_class
import portage
from portage import _encodings
from portage import _unicode_encode
from portage import os
from portage.const import BASH_BINARY
import fcntl
import errno
import gzip
class SpawnProcess(SubProcess):
"""
Constructor keyword args are passed into portage.process.spawn().
The required "args" keyword argument will be passed as the first
spawn() argument.
"""
_spawn_kwarg_names = ("env", "opt_name", "fd_pipes",
"uid", "gid", "groups", "umask", "logfile",
"path_lookup", "pre_exec")
__slots__ = ("args",) + \
_spawn_kwarg_names + ("_log_file_real", "_selinux_type",)
_file_names = ("log", "process", "stdout")
_files_dict = slot_dict_class(_file_names, prefix="")
def _start(self):
if self.fd_pipes is None:
self.fd_pipes = {}
fd_pipes = self.fd_pipes
self._files = self._files_dict()
files = self._files
master_fd, slave_fd = self._pipe(fd_pipes)
fcntl.fcntl(master_fd, fcntl.F_SETFL,
fcntl.fcntl(master_fd, fcntl.F_GETFL) | os.O_NONBLOCK)
files.process = master_fd
logfile = None
if self._can_log(slave_fd):
logfile = self.logfile
null_input = None
if not self.background or 0 in fd_pipes:
# Subclasses such as AbstractEbuildProcess may have already passed
# in a null file descriptor in fd_pipes, so use that when given.
pass
else:
# TODO: Use job control functions like tcsetpgrp() to control
# access to stdin. Until then, use /dev/null so that any
# attempts to read from stdin will immediately return EOF
# instead of blocking indefinitely.
null_input = os.open('/dev/null', os.O_RDWR)
fd_pipes[0] = null_input
fd_pipes.setdefault(0, sys.__stdin__.fileno())
fd_pipes.setdefault(1, sys.__stdout__.fileno())
fd_pipes.setdefault(2, sys.__stderr__.fileno())
# flush any pending output
stdout_filenos = (sys.__stdout__.fileno(), sys.__stderr__.fileno())
for fd in fd_pipes.values():
if fd in stdout_filenos:
sys.__stdout__.flush()
sys.__stderr__.flush()
break
if logfile is not None:
fd_pipes_orig = fd_pipes.copy()
fd_pipes[1] = slave_fd
fd_pipes[2] = slave_fd
files.log = open(_unicode_encode(logfile,
encoding=_encodings['fs'], errors='strict'), mode='ab')
if logfile.endswith('.gz'):
self._log_file_real = files.log
files.log = gzip.GzipFile(filename='', mode='ab',
fileobj=files.log)
portage.util.apply_secpass_permissions(logfile,
uid=portage.portage_uid, gid=portage.portage_gid,
mode=0o660)
if not self.background:
files.stdout = os.dup(fd_pipes_orig[1])
output_handler = self._output_handler
else:
# Create a dummy pipe so the scheduler can monitor
# the process from inside a poll() loop.
fd_pipes[self._dummy_pipe_fd] = slave_fd
if self.background:
fd_pipes[1] = slave_fd
fd_pipes[2] = slave_fd
output_handler = self._dummy_handler
kwargs = {}
for k in self._spawn_kwarg_names:
v = getattr(self, k)
if v is not None:
kwargs[k] = v
kwargs["fd_pipes"] = fd_pipes
kwargs["returnpid"] = True
kwargs.pop("logfile", None)
self._reg_id = self.scheduler.register(files.process,
self._registered_events, output_handler)
self._registered = True
retval = self._spawn(self.args, **kwargs)
os.close(slave_fd)
if null_input is not None:
os.close(null_input)
if isinstance(retval, int):
# spawn failed
self._unregister()
self._set_returncode((self.pid, retval))
self.wait()
return
self.pid = retval[0]
portage.process.spawned_pids.remove(self.pid)
def _can_log(self, slave_fd):
return True
def _pipe(self, fd_pipes):
"""
@type fd_pipes: dict
@param fd_pipes: pipes from which to copy terminal size if desired.
"""
return os.pipe()
def _spawn(self, args, **kwargs):
spawn_func = portage.process.spawn
if self._selinux_type is not None:
spawn_func = portage.selinux.spawn_wrapper(spawn_func,
self._selinux_type)
# bash is an allowed entrypoint, while most binaries are not
if args[0] != BASH_BINARY:
args = [BASH_BINARY, "-c", "exec \"$@\"", args[0]] + args
return spawn_func(args, **kwargs)
def _output_handler(self, fd, event):
files = self._files
while True:
buf = self._read_buf(fd, event)
if buf is None:
# not a POLLIN event, EAGAIN, etc...
break
if not buf:
# EOF
self._unregister()
self.wait()
break
else:
if not self.background:
write_successful = False
failures = 0
while True:
try:
if not write_successful:
os.write(files.stdout, buf)
write_successful = True
break
except OSError as e:
if e.errno != errno.EAGAIN:
raise
del e
failures += 1
if failures > 50:
# Avoid a potentially infinite loop. In
# most cases, the failure count is zero
# and it's unlikely to exceed 1.
raise
# This means that a subprocess has put an inherited
# stdio file descriptor (typically stdin) into
# O_NONBLOCK mode. This is not acceptable (see bug
# #264435), so revert it. We need to use a loop
# here since there's a race condition due to
# parallel processes being able to change the
# flags on the inherited file descriptor.
# TODO: When possible, avoid having child processes
# inherit stdio file descriptors from portage
# (maybe it can't be avoided with
# PROPERTIES=interactive).
fcntl.fcntl(files.stdout, fcntl.F_SETFL,
fcntl.fcntl(files.stdout,
fcntl.F_GETFL) ^ os.O_NONBLOCK)
files.log.write(buf)
files.log.flush()
self._unregister_if_appropriate(event)
return True
def _dummy_handler(self, fd, event):
"""
This method is mainly interested in detecting EOF, since
the only purpose of the pipe is to allow the scheduler to
monitor the process from inside a poll() loop.
"""
while True:
buf = self._read_buf(fd, event)
if buf is None:
# not a POLLIN event, EAGAIN, etc...
break
if not buf:
# EOF
self._unregister()
self.wait()
break
self._unregister_if_appropriate(event)
return True
def _unregister(self):
super(SpawnProcess, self)._unregister()
if self._log_file_real is not None:
# Avoid "ResourceWarning: unclosed file" since python 3.2.
self._log_file_real.close()
self._log_file_real = None
|
The memorial Bulgakov museum is located in the legendary flat 50 on Bolshaya Sadovaya Street, 10, where he lived in 1921-1924. This place is considered to be the real prototype for the wicked flat from his world-famous Master and Margarita novel. Today the museum exposition includes the richest collection of items connected both with Bulgakov and the epoch he lived in. Apart from the major exposition, various exhibitions, excursions and concerts take place here.
If you have not read the Master and Margarita novel yet, do it after visiting the museum with the Moscow CityPass!
The Moscow CityPass recommends to visit both of two Bulgakov museums: Mikhail Bulgakov Museum and Bulgakov's House Museum-Theatre.
12:00 - 19:00 Ticket office closes 15 min before the Museum closing time.
14:00 - 21:00 Ticket office closes 15 min before the Museum closing time.
|
import math
import copy
import json
from OpenGL.GL import *
from Box2D import *
from utils.opengl import *
from utils import *
from rocket.object import Missile
class Player():
def __init__(self, keyAdapter, mouse, world, level, position):
self.keyAdapter = keyAdapter
self.world = world
self.level = level
self.mouse = mouse
self.fuel = 500.0
self.possessions = []
self.rockets = 100
self.elapsed_since_rocket = 0
w = 1.0
h = 2.5
self.body = self.world.CreateDynamicBody(
position=position,
userData=self,
fixtures=b2FixtureDef(
#shape=b2PolygonShape(box=(1.0, 1.5)),
shape=b2PolygonShape(vertices=[(-1, -1.5), (1, -1.5), (1, 0), (0, 1.5), (-1, 0)]),
density=1,
angularDamping=50,
friction=5,
#linearDamping=0.1,
)
)
w*=2.5
h*=1.5
self.size = [([0.0, 1.0], [-w*0.5, h*0.5]),
([0.0, 0.0], [-w*0.5, -h*0.5-1]),
([1.0, 0.0], [ w*0.5, -h*0.5-1]),
([1.0, 1.0], [ w*0.5, h*0.5])]
self.wasRotating = False
def update(self, dt):
pressed = lambda x: x in self.keyAdapter.pressed
self.elapsed_since_rocket += dt
if pressed("right") or pressed("d"):
if abs(self.body.angularVelocity) <= 3.0:
self.body.ApplyAngularImpulse(-5.0, True)
self.wasRotating = True
elif pressed("left") or pressed("a"):
if abs(self.body.angularVelocity) <= 3.0:
self.body.ApplyAngularImpulse(5.0, True)
self.wasRotating = True
elif self.wasRotating:
self.body.angularVelocity = 0
self.wasRotating = False
if self.fuel > 0.0 and (pressed("up") or pressed("w")):
if pressed("shift"):
f = self.body.GetWorldVector(localVector=(0.0, 500.0*0.5))
self.fuel -= 3 * dt
else:
f = self.body.GetWorldVector(localVector=(0.0, 100.0*0.5))
self.fuel -= 1 * dt
p = self.body.GetWorldPoint(localPoint=(0.0, 0.0))
self.body.ApplyForce(f, p, True)
self.fuel = max(0.0, self.fuel)
if pressed(" ") and self.elapsed_since_rocket > 1.0 and self.rockets > 0:
self.rockets -= 1
self.elapsed_since_rocket = 0
position = self.body.transform.position
#dir = Vec2d(0.0, 1.0)
#dir.rotate(self.body.transform.angle * 180.0/3.14)
#dir = b2Vec2(dir.x, dir.y)
target = self.mouse["wpos"]()
target = b2Vec2(target[0], target[1])
dir = target - position
dir.Normalize()
position = position + dir * 3
missile = Missile.launch(world=self.world, target=None, position=position, velocity=dir * 50)
self.level.objects.append(missile)
def render(self):
pressed = lambda x: x in self.keyAdapter.pressed
texture = "textures/rocket-idle"
if pressed("up") or pressed("left") or pressed("right") or pressed("a") or pressed("w") or pressed("d"):
texture = "textures/rocket"
if pressed("shift"):
texture = "textures/rocket-afterburner"
glColor3f(1.0, 1.0, 1.0)
TextureManager.instance()[texture].bind()
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glEnable(GL_TEXTURE_2D)
glPushMatrix()
glTranslatef(self.body.transform.position.x, self.body.transform.position.y, 0.0)
glRotatef(self.body.transform.angle * 180.0/3.14, 0.0, 0.0, 1.0)
glBegin(GL_QUADS)
for v in self.size:
glTexCoord2fv(v[0]) ; glVertex2fv(v[1])
glEnd()
glPopMatrix()
glDisable(GL_TEXTURE_2D)
|
Italy's caretaker prime minister was assembling a cabinet lineup on Tuesday despite almost certain rejection by populist parties whose bid for power collapsed at the weekend.
Carlo Cottarelli, a former IMF economist known as "Mr Scissors", was tasked with naming a technocrat government on Monday after President Sergio Mattarella blocked a cabinet proposed by the far-right League and anti-establishment Five Star Movement (M5S).
Cottarelli said Italy would face new elections "after August" if parliament did not endorse his team, a near certainty given that Five Star and the League together hold a majority.
Elections could benefit Salvini, however, as recent polling by IndexResearch put the League at 22 percent, five points up from its vote share in the March 4 ballot.
However, Di Maio said that Mattarella, a former constitutional court judge, had "gone beyond his legal prerogatives".
He said an impeachment trial for Mattarella, 76, would be "almost a certainty".
|
'''
Created on 8 juin 2014
@author: Romain
'''
from src.com.mailsystem.orm import Address
class AddressService:
@staticmethod
def listAll(db_users):
s = db_users.session()
ret = s.query(Address).all()
s.close()
return ret
@staticmethod
def selectById(db_users, idadd):
s = db_users.session()
ret = s.query(Address).get(idadd)
s.close()
return ret
@staticmethod
def add(db_users, name):
insertStatement = db_users.statement(Address, "insert")\
.values(name = name)
result = db_users.execute(insertStatement)
if result is not None:
return result.inserted_primary_key[0]
return -1
@staticmethod
def update(db_users, idadd, name):
updateStatement = db_users.statement(Address, "update")\
.where(Address.__table__.c.idaddress == idadd)\
.values(name = name)
result = db_user.execute(updateStatement)
return result is not None
|
Joe W. Kidd is a licensed CPA located in Gainesville, GA 30506. Joe W. Kidd has been issued a Georgia license number CPA002032. All CPAs, including Joe W. Kidd, have at the minimum an undergraduate degree in accounting, passed a rigorous national exam and adhere to mandated continuing education requirements of their states in which they are licensed. CPAs can work in private industry, education or government but most people think of CPAs during tax season as the experts in tax preparation. Their overall training in business and knowledge in principles of general law and taxation provide CPAs with the skills to help individuals with both personal and business financial decisions.
Looking for the best accountants located in Gainesville, GA? Joe W. Kidd is a CPA located in Gainesville, GA. Joe W. Kidd, CPA and other public accountants located in Gainesville, GA will help you with tax preparation, tax planning, bookkeeping, accounting services, estate and trust taxes, and much more.
Are you Joe W. Kidd, a CPA from Gainesville, GA? Upgrade your basic listing to a featured listing with client reviews and ratings.
|
# coding: utf-8
""" API to access remote Nuxeo documents for synchronization. """
import os
import unicodedata
import urllib2
from collections import namedtuple
from datetime import datetime
from dateutil import parser
from nxdrive.client.base_automation_client import BaseAutomationClient
from nxdrive.client.common import NotFound, safe_filename
from nxdrive.logging_config import get_logger
from nxdrive.options import Options
log = get_logger(__name__)
# Make the following an optional binding configuration
FILE_TYPE = 'File'
FOLDER_TYPE = 'Folder'
DEFAULT_TYPES = ('File', 'Note', 'Workspace', 'Folder')
MAX_CHILDREN = 1000
# Data transfer objects
BaseNuxeoDocumentInfo = namedtuple('NuxeoDocumentInfo', [
'root', # ref of the document that serves as sync root
'name', # title of the document (not guaranteed to be locally unique)
'uid', # ref of the document
'parent_uid', # ref of the parent document
'path', # remote path (useful for ordering)
'folderish', # True is can host child documents
'last_modification_time', # last update time
'last_contributor', # last contributor
'digest_algorithm', # digest algorithm of the document's blob
'digest', # digest of the document's blob
'repository', # server repository name
'doc_type', # Nuxeo document type
'version', # Nuxeo version
'state', # Nuxeo lifecycle state
'has_blob', # If this doc has blob
'filename', # Filename of document
'lock_owner', # lock owner
'lock_created', # lock creation time
'permissions', # permissions
])
class NuxeoDocumentInfo(BaseNuxeoDocumentInfo):
"""Data Transfer Object for doc info on the Remote Nuxeo repository"""
# Consistency with the local client API
def get_digest(self):
return self.digest
class RemoteDocumentClient(BaseAutomationClient):
"""Nuxeo document oriented Automation client
Uses Automation standard document API. Deprecated in NuxeDrive
since now using FileSystemItem API.
Kept here for tests and later extraction of a generic API.
"""
# Override constructor to initialize base folder
# which is specific to RemoteDocumentClient
def __init__(self, server_url, user_id, device_id, client_version,
proxies=None, proxy_exceptions=None,
password=None, token=None, repository=Options.remote_repo,
base_folder=None, timeout=20, blob_timeout=None,
cookie_jar=None, upload_tmp_dir=None, check_suspended=None):
super(RemoteDocumentClient, self).__init__(
server_url, user_id, device_id, client_version,
proxies=proxies, proxy_exceptions=proxy_exceptions,
password=password, token=token, repository=repository,
timeout=timeout, blob_timeout=blob_timeout,
cookie_jar=cookie_jar, upload_tmp_dir=upload_tmp_dir,
check_suspended=check_suspended)
# fetch the root folder ref
self.set_base_folder(base_folder)
def set_base_folder(self, base_folder):
if base_folder is not None:
base_folder_doc = self.fetch(base_folder)
self._base_folder_ref = base_folder_doc['uid']
self._base_folder_path = base_folder_doc['path']
else:
self._base_folder_ref, self._base_folder_path = None, None
#
# API common with the local client API
#
def get_info(self, ref, raise_if_missing=True, fetch_parent_uid=True,
use_trash=True, include_versions=False):
if not self.exists(ref, use_trash=use_trash,
include_versions=include_versions):
if raise_if_missing:
raise NotFound("Could not find '%s' on '%s'" % (
self._check_ref(ref), self.server_url))
return None
return self.doc_to_info(self.fetch(self._check_ref(ref)),
fetch_parent_uid=fetch_parent_uid)
def get_content(self, ref):
"""
Download and return the binary content of a document
Beware that the content is loaded in memory.
"""
if not isinstance(ref, NuxeoDocumentInfo):
ref = self._check_ref(ref)
return self.get_blob(ref)
# TODO: allow getting content by streaming the response to an output file
# See RemoteFileSystemClient.stream_content
def get_children_info(self, ref, types=DEFAULT_TYPES, limit=MAX_CHILDREN):
ref = self._check_ref(ref)
query = (
"SELECT * FROM Document"
" WHERE ecm:parentId = '%s'"
" AND ecm:primaryType IN ('%s')"
" AND ecm:currentLifeCycleState != 'deleted'"
" AND ecm:isCheckedInVersion = 0"
" ORDER BY dc:title, dc:created LIMIT %d"
) % (ref, "', '".join(types), limit)
entries = self.query(query)[u'entries']
if len(entries) == MAX_CHILDREN:
# TODO: how to best handle this case? A warning and return an empty
# list, a dedicated exception?
raise RuntimeError("Folder %r on server %r has more than the"
"maximum number of children: %d" % (
ref, self.server_url, MAX_CHILDREN))
return self._filtered_results(entries)
def make_folder(self, parent, name, doc_type=FOLDER_TYPE):
# TODO: make it possible to configure context dependent:
# - SocialFolder under SocialFolder or SocialWorkspace
# - Folder under Folder or Workspace
# This configuration should be provided by a special operation on the
# server.
parent = self._check_ref(parent)
doc = self.create(parent, doc_type, name=name,
properties={'dc:title': name})
return doc[u'uid']
def make_file(self, parent, name, content=None, doc_type=FILE_TYPE):
"""Create a document of the given type with the given name and content
Creates a temporary file from the content then streams it.
"""
parent = self._check_ref(parent)
properties = {'dc:title': name}
if doc_type is 'Note' and content is not None:
properties['note:note'] = content
doc = self.create(parent, doc_type, name=name, properties=properties)
ref = doc[u'uid']
if doc_type is not 'Note' and content is not None:
self.attach_blob(ref, content, name)
return ref
def stream_file(self, parent, name, file_path, filename=None,
mime_type=None, doc_type=FILE_TYPE):
"""Create a document by streaming the file with the given path"""
ref = self.make_file(parent, name, doc_type=doc_type)
self.execute_with_blob_streaming("Blob.Attach", file_path,
filename=filename, document=ref,
mime_type=mime_type)
return ref
def update_content(self, ref, content, filename=None):
"""Update a document with the given content
Creates a temporary file from the content then streams it.
"""
if filename is None:
filename = self.get_info(ref).name
self.attach_blob(self._check_ref(ref), content, filename)
def stream_update(
self,
ref,
file_path,
filename=None,
mime_type=None,
apply_versioning_policy=False,
):
"""Update a document by streaming the file with the given path"""
ref = self._check_ref(ref)
op_name = ('NuxeoDrive.AttachBlob'
if self.is_nuxeo_drive_attach_blob()
else 'Blob.Attach')
params = {'document': ref}
if self.is_nuxeo_drive_attach_blob():
params.update({'applyVersioningPolicy': apply_versioning_policy})
self.execute_with_blob_streaming(
op_name, file_path, filename=filename, mime_type=mime_type, **params)
def delete(self, ref, use_trash=True):
op_input = "doc:" + self._check_ref(ref)
if use_trash:
try:
return self.execute("Document.SetLifeCycle", op_input=op_input,
value='delete')
except urllib2.HTTPError as e:
if e.code == 500:
return self.execute("Document.Delete", op_input=op_input)
raise
else:
return self.execute("Document.Delete", op_input=op_input)
def undelete(self, ref):
op_input = "doc:" + self._check_ref(ref)
return self.execute("Document.SetLifeCycle", op_input=op_input,
value='undelete')
def delete_content(self, ref, xpath=None):
return self.delete_blob(self._check_ref(ref), xpath=xpath)
def exists(self, ref, use_trash=True, include_versions=False):
# type: (unicode, bool, bool) -> bool
"""
Check if a document exists on the server.
:param ref: Document reference (UID).
:param use_trash: Filter documents inside the trash.
:param include_versions:
:rtype: bool
"""
ref = self._check_ref(ref)
id_prop = 'ecm:path' if ref.startswith('/') else 'ecm:uuid'
if use_trash:
lifecyle_pred = "AND ecm:currentLifeCycleState != 'deleted'"
else:
lifecyle_pred = ""
if include_versions:
version_pred = ""
else:
version_pred = "AND ecm:isCheckedInVersion = 0"
query = ("SELECT * FROM Document WHERE %s = '%s' %s %s"
" LIMIT 1") % (
id_prop, ref, lifecyle_pred, version_pred)
results = self.query(query)
return len(results[u'entries']) == 1
def check_writable(self, ref):
# TODO: which operation can be used to perform a permission check?
return True
def _check_ref(self, ref):
if ref.startswith('/') and self._base_folder_path is not None:
# This is a path ref (else an id ref)
if self._base_folder_path.endswith('/'):
ref = self._base_folder_path + ref[1:]
else:
ref = self._base_folder_path + ref
return ref
def doc_to_info(self, doc, fetch_parent_uid=True, parent_uid=None):
"""Convert Automation document description to NuxeoDocumentInfo"""
props = doc['properties']
name = props['dc:title']
filename = None
folderish = 'Folderish' in doc['facets']
try:
last_update = datetime.strptime(doc['lastModified'],
"%Y-%m-%dT%H:%M:%S.%fZ")
except ValueError:
# no millisecond?
last_update = datetime.strptime(doc['lastModified'],
"%Y-%m-%dT%H:%M:%SZ")
last_contributor = props['dc:lastContributor']
# TODO: support other main files
has_blob = False
if folderish:
digest_algorithm = None
digest = None
else:
blob = props.get('file:content')
if blob is None:
note = props.get('note:note')
if note is None:
digest_algorithm = None
digest = None
else:
import hashlib
m = hashlib.md5()
m.update(note.encode('utf-8'))
digest = m.hexdigest()
digest_algorithm = 'md5'
ext = '.txt'
mime_type = props.get('note:mime_type')
if mime_type == 'text/html':
ext = '.html'
elif mime_type == 'text/xml':
ext = '.xml'
elif mime_type == 'text/x-web-markdown':
ext = '.md'
if not name.endswith(ext):
filename = name + ext
else:
filename = name
else:
has_blob = True
digest_algorithm = blob.get('digestAlgorithm')
if digest_algorithm is not None:
digest_algorithm = digest_algorithm.lower().replace('-', '')
digest = blob.get('digest')
filename = blob.get('name')
# Lock info
lock_owner = doc.get('lockOwner')
lock_created = doc.get('lockCreated')
if lock_created is not None:
lock_created = parser.parse(lock_created)
# Permissions
permissions = doc.get('contextParameters', {}).get('permissions', None)
# XXX: we need another roundtrip just to fetch the parent uid...
if parent_uid is None and fetch_parent_uid:
parent_uid = self.fetch(os.path.dirname(doc['path']))['uid']
# Normalize using NFC to make the tests more intuitive
if 'uid:major_version' in props and 'uid:minor_version' in props:
version = str(props['uid:major_version']) + '.' + str(props['uid:minor_version'])
else:
version = None
if name is not None:
name = unicodedata.normalize('NFC', name)
return NuxeoDocumentInfo(
self._base_folder_ref, name, doc['uid'], parent_uid,
doc['path'], folderish, last_update, last_contributor,
digest_algorithm, digest, self.repository, doc['type'],
version, doc['state'], has_blob, filename,
lock_owner, lock_created, permissions)
def _filtered_results(self, entries, fetch_parent_uid=True,
parent_uid=None):
# Filter out filenames that would be ignored by the file system client
# so as to be consistent.
filtered = []
for info in [self.doc_to_info(d, fetch_parent_uid=fetch_parent_uid,
parent_uid=parent_uid)
for d in entries]:
name = info.name.lower()
if (name.endswith(Options.ignored_suffixes)
or name.startswith(Options.ignored_prefixes)):
continue
filtered.append(info)
return filtered
#
# Generic Automation features reused from nuxeolib
#
# Document category
def create(self, ref, doc_type, name=None, properties=None):
name = safe_filename(name)
return self.execute("Document.Create", op_input="doc:" + ref,
type=doc_type, name=name, properties=properties)
def update(self, ref, properties=None):
return self.execute("Document.Update", op_input="doc:" + ref,
properties=properties)
def set_property(self, ref, xpath, value):
return self.execute("Document.SetProperty", op_input="doc:" + ref,
xpath=xpath, value=value)
def get_children(self, ref):
return self.execute("Document.GetChildren", op_input="doc:" + ref)
def get_parent(self, ref):
return self.execute("Document.GetParent", op_input="doc:" + ref)
def is_locked(self, ref):
data = self.fetch(ref, extra_headers={'fetch-document': 'lock'})
return 'lockCreated' in data
def lock(self, ref):
return self.execute("Document.Lock", op_input="doc:" + self._check_ref(ref))
def unlock(self, ref):
return self.execute("Document.Unlock", op_input="doc:" + self._check_ref(ref))
def create_user(self, user_name, **kwargs):
return self.execute('User.CreateOrUpdate', username=user_name, **kwargs)
def move(self, ref, target, name=None):
return self.execute("Document.Move",
op_input="doc:" + self._check_ref(ref),
target=self._check_ref(target), name=name)
def copy(self, ref, target, name=None):
return self.execute("Document.Copy",
op_input="doc:" + self._check_ref(ref),
target=self._check_ref(target), name=name)
def create_version(self, ref, increment='None'):
doc = self.execute("Document.CreateVersion",
op_input="doc:" + self._check_ref(ref),
increment=increment)
return doc['uid']
def get_versions(self, ref):
extra_headers = {'X-NXfetch.document': 'versionLabel'}
versions = self.execute(
'Document.GetVersions',
op_input='doc:' + self._check_ref(ref),
extra_headers=extra_headers)
return [(v['uid'], v['versionLabel']) for v in versions['entries']]
def restore_version(self, version):
doc = self.execute("Document.RestoreVersion",
op_input="doc:" + self._check_ref(version))
return doc['uid']
def block_inheritance(self, ref, overwrite=True):
op_input = "doc:" + self._check_ref(ref)
self.execute("Document.SetACE",
op_input=op_input,
user="Administrator",
permission="Everything",
overwrite=overwrite)
self.execute("Document.SetACE",
op_input=op_input,
user="Everyone",
permission="Everything",
grant="false",
overwrite=False)
# These ones are special: no 'op_input' parameter
def fetch(self, ref, **kwargs):
try:
return self.execute("Document.Fetch", value=ref, **kwargs)
except urllib2.HTTPError as e:
if e.code == 404:
raise NotFound("Failed to fetch document %r on server %r" % (
ref, self.server_url))
raise e
def query(self, query, language=None):
return self.execute("Document.Query", query=query, language=language)
# Blob category
def get_blob(self, ref, file_out=None):
if isinstance(ref, NuxeoDocumentInfo):
doc_id = ref.uid
if not ref.has_blob and ref.doc_type == "Note":
doc = self.fetch(doc_id)
content = doc['properties'].get('note:note')
if file_out is not None and content is not None:
with open(file_out, 'wb') as f:
f.write(content.encode('utf-8'))
return content
else:
doc_id = ref
return self.execute("Blob.Get", op_input="doc:" + doc_id,
timeout=self.blob_timeout, file_out=file_out)
def attach_blob(self, ref, blob, filename):
file_path = self.make_tmp_file(blob)
try:
return self.execute_with_blob_streaming(
'Blob.Attach', file_path, filename=filename, document=ref)
finally:
os.remove(file_path)
def delete_blob(self, ref, xpath=None):
return self.execute("Blob.Remove", op_input="doc:" + ref, xpath=xpath)
def log_on_server(self, message, level='WARN'):
""" Log the current test server side. Helpful for debugging. """
return self.execute('Log', message=message, level=level.lower())
#
# Nuxeo Drive specific operations
#
def get_roots(self):
entries = self.execute('NuxeoDrive.GetRoots')['entries']
return self._filtered_results(entries, fetch_parent_uid=False)
def get_update_info(self):
return self.execute('NuxeoDrive.GetClientUpdateInfo')
def register_as_root(self, ref):
self.execute(
'NuxeoDrive.SetSynchronization',
op_input='doc:' + self._check_ref(ref),
enable=True)
return True
def unregister_as_root(self, ref):
self.execute(
'NuxeoDrive.SetSynchronization',
op_input='doc:' + self._check_ref(ref),
enable=False)
return True
|
On the first day it rained and we arrived at the alpine hut all wet.
On the second day we got caught in a lightning storm, just five minutes away from the next hut. We hid under some rocks for some time, not wanting to take any risks of getting struck by lightning. There a French guy that had been running on the mountain that day sat next to us, shivering from the cold rain and loud thunder. After 20 minutes we all left—the French runner ran straight down the valley, and we went pushed on to the hut, soaking wet for the second time in two days.
The next eight days our luck changed along with the weather. It was all sun and beautiful weather from there on out.
"I would not trust any photo on the internet. You have to see that beauty with your own eyes."
The Tour du Mont Blanc trail has been in the back of my mind for a very long time, though it never materialized until this year, when together with my girlfriend and another friend of ours we decided to commit. It took a lot of planning, phone calls to each of the huts, and plenty of map studying.
But no amount of research could ever prepare us for the terrain we saw and things we did on this trail.
We never thought that on this trail there would be so many nice people, from Canada all the way to New Zealand. People that love mountains and nature more that anything, people that will join you on the trail and share their food with you.
And the food...well, I thought I would loose some weight from 10 days on the mountains. But that was not the case. The food is extraordinary in every hut on the trail.
And the landscape...there is nothing to be said. I would not trust any photo on the internet, even my own. You have to go out there and see that beauty with your own eyes.
|
'''
Copyright (c) 2008 Georgios Giannoudovardis, <vardis.g@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
import logging
from pano.actions import builtinActions
class GameActions:
def __init__(self, game):
self.log = logging.getLogger('pano.actions')
self.game = game
self.actions = { }
builtinActions.registerBultins(self)
def getAction(self, name):
return self.actions[name]
def registerAction(self, action):
self.actions[action.getName()] = action
def unregisterAction(self, name):
del self.actions[name]
def execute(self, name, *params):
self.log.debug('executing action %s' % name)
try:
act = self.actions[name]
if act is not None:
act.execute(self.game, params)
except:
self.log.exception('unexpected error')
def isAction(self, name):
"""
Returns True if the given name corresponds to a known and registered action.
"""
return self.actions.has_key(name)
def builtinNames(self):
return builtinActions.BuiltinActionsNames
|
I was wondering if you could help, I've downloaded and used these g4tw sims downloads many times but I'm completely stumped! I've followed the how to install guide but I've just got a new laptop so now want to put sims 4 on it, I've added all the suggested files to my anti-virus software but I'm still getting the dynamic library rld.dll problem.
Before I remember using the ultimate fix but I see that's now obsolete so I don't know what else to try, I hope that someone can help me.
Also my new laptop is windows 10 and my other one was windows 7 if that makes any difference?
Thank you for any help, it's very much appreciated!
What's the error message exactly? And what have you installed and downloaded so far? "rld.dll" was present only in the first 4 G4TW releases (base game, Get to Work, Get Together and City Living).
I downloaded the games from the original website are they old links? I’m not at home right now but as soon as I’m there I’ll post the error message.
Should I try installing the rest of the expansion packs to see if it overrides it or is that not going to help?
IMPORTANT NOTE: we have abandoned the "games4theworld.org" website; the current G4TW team does NOT have any access to either that old website or its associated domain anymore. You can treat the old website, however, as a sort of archive. Please follow the Forum here for more up-to-date information on our work. Thank you!
The links are just not updated anymore. But they should link to the same torrents. There's "All Games4theworld downloads" button at the top of the forum, you should download from there.
@Jo-Jo wrote: I downloaded the games from the original website are they old links?
These torrents are all the same; the website just hasn't been updated in ages.
Thank you very much for your help, I followed your advice and installed all the expansion packs and it's now working correctly!
Everyone has always been so helpful here and quick to respond so thanks again!
|
#!/usr/bin/env python3
#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
from peacock.Input.InputFile import InputFile
from peacock.utils import Testing
from peacock import PeacockException
from PyQt5 import QtWidgets
from pyhit import hit
class Tests(Testing.PeacockTester):
qapp = QtWidgets.QApplication([])
def setUp(self):
super(Tests, self).setUp()
self.tmp_file = "tmp_input.i"
self.tmp_bad_file = "tmp_input"
self.basic_input = "[foo]\n[./bar]\ntype = bar\nother = 'bar'[../]\n[./foobar]\nactive = 'bar'\n[../]\n[]\n"
def tearDown(self):
Testing.remove_file(self.tmp_file)
Testing.remove_file(self.tmp_bad_file)
def writeFile(self, s, fname):
with open(fname, "w") as f:
f.write(s)
def testFailures(self):
input_file = InputFile()
with self.assertRaises(PeacockException.PeacockException):
input_file.openInputFile("/no_exist")
with self.assertRaises(PeacockException.PeacockException):
input_file.openInputFile("/")
with self.assertRaises(PeacockException.PeacockException):
self.writeFile(self.basic_input, self.tmp_bad_file)
input_file.openInputFile(self.tmp_bad_file)
with self.assertRaises(PeacockException.PeacockException):
# simulate a duplicate section in the input file
# which should throw an exception
self.writeFile(self.basic_input*2, self.tmp_file)
input_file.openInputFile(self.tmp_file)
def testSuccess(self):
self.writeFile(self.basic_input, self.tmp_file)
input_file = InputFile(self.tmp_file)
self.assertNotEqual(input_file.root_node, None)
self.assertEqual(input_file.changed, False)
children = [ c for c in input_file.root_node.children(node_type=hit.NodeType.Section) ]
self.assertEqual(len(children), 1)
top = children[0]
self.assertEqual(top.path(), "foo")
children = [ c for c in top.children(node_type=hit.NodeType.Section) ]
self.assertEqual(len(children), 2)
c0 = children[0]
self.assertEqual(c0.path(), "bar")
params = [ c for c in c0.children(node_type=hit.NodeType.Field) ]
self.assertEqual(len(params), 2)
c1 = children[1]
self.assertEqual(c1.path(), "foobar")
params = [ c for c in c1.children(node_type=hit.NodeType.Field) ]
self.assertEqual(len(params), 1)
if __name__ == '__main__':
Testing.run_tests()
|
A court issued an unprecedented order Wednesday for a nuclear reactor in western Japan to stop operating and ordered a second one to stay offline.
The Otsu District Court that issued the injunction said the emergency response plans and equipment designs at the two reactors have not been sufficiently upgraded after the 2011 Fukushima nuclear disaster.
The order requires Kansai Electric Power Co. to shut down the No. 3 reactor and keep the No. 4 offline at the Takahama plant in Fukui prefecture, home to about a dozen reactors.
The two reactors restarted this year after a high court in December reversed an earlier injunction by another court. The No. 3 reactor, which uses a riskier plutonium-based MOX fuel, resumed operation in late January, while the No. 4 reactor had to be shut down late last month after operating just three days because of a series of technical problems.
Kansai Electric said it will abide by the decision and start the shutdown procedures for No. 3 reactor Thursday morning. The utility, meanwhile, said the decision was “disappointing” and planned to appeal.
The decision reflects Japan’s divisive views on nuclear safety and leaves only two of the country’s 43 reactors in operation.
Judge Yoshihiko Yamamoto said the operator has not fully explained how exactly it has upgraded safety features at the two Takahama reactors under the post-Fukushima safety standards. The utility has not fully explained its design philosophy, its measures to mitigate power loss and how to carry out evacuation plans in case of a severe accident and a massive tsunami, he said in the ruling.
The decision also shakes the credibility of the stricter safety requirements made after Fukushima. Wednesday’s ruling supported concerns by residents and experts that the stricter standards still do not require utilities to have adequate evacuation plans before applying to restart reactors.
Nuclear Regulation Authority Chairman Shunichi Tanaka declined to comment on the ruling, but defended the new requirements, which incorporated lessons learned from the Fukushima disaster.
Chief Cabinet Secretary Yoshihide Suga told reporters that the government stands by the regulators’ standards and that plans to restart Takahama and other reactors deemed safe are unchanged.
Prime Minister Shinzo Abe’s government wants to restart as many reactors as possible. It says nuclear energy should remain a key power source for Japan, which has few natural resources to fuel its economy.
Lawyers for the plaintiffs welcomed the ruling as “fair, calm and wise,” raising questions over the utility’s safety culture and the regulators’ safety standards.
|
from calendar import monthrange
import datetime
from django.core.exceptions import ObjectDoesNotExist
from survey.models import Question, HouseholdHead, UnknownDOBAttribute
from survey.models.households import HouseholdMember
from survey.ussd.ussd import USSD
class USSDRegisterHousehold(USSD):
HEAD_ANSWER = {
'HEAD': '1',
'MEMBER': '2'
}
REGISTRATION_DICT = {}
UNKNOWN = 99
def __init__(self, investigator, request):
super(USSDRegisterHousehold, self).__init__(investigator, request)
self.question = None
self.household_member = None
self.is_head = None
self.is_selecting_member = False
self.set_question()
self.set_form_in_cache()
self.set_household_member()
self.set_head_in_cache()
self.set_is_selecting_member()
def set_question(self):
try:
question = self.get_from_session('QUESTION')
if question:
self.question = question
except KeyError:
pass
def set_form_in_cache(self):
try:
if not self.investigator.get_from_cache('registration_dict'):
self.investigator.set_in_cache('registration_dict', self.REGISTRATION_DICT)
else:
self.REGISTRATION_DICT = self.investigator.get_from_cache('registration_dict')
except KeyError:
pass
def set_head_in_cache(self):
try:
is_head = self.investigator.get_from_cache('is_head')
if is_head is not None:
self.is_head = is_head
except KeyError:
pass
def set_is_selecting_member(self):
try:
is_selecting_member = self.investigator.get_from_cache('is_selecting_member')
if is_selecting_member is not None:
self.is_selecting_member = is_selecting_member
except KeyError:
self.investigator.set_in_cache('is_selecting_member', False)
def set_head(self, answer):
if self.is_head is None or not self.is_head:
if answer == self.HEAD_ANSWER['HEAD']:
self.investigator.set_in_cache('is_head', True)
else:
self.investigator.set_in_cache('is_head', False)
self.is_head = self.investigator.get_from_cache('is_head')
self.investigator.set_in_cache('is_selecting_member', False)
def start(self, answer):
self.register_households(answer)
self.set_in_session('QUESTION', self.question)
return self.action, self.responseString
def render_questions_based_on_head_selection(self, answer):
if self.household.get_head():
self.render_questions_or_member_selection(answer)
else:
self.render_select_member_or_head()
def validate_house_selection(self):
if self.is_invalid_response():
self.get_household_list()
else:
self.investigator.set_in_cache('HOUSEHOLD', self.household)
def register_households(self, answer):
if not self.household and self.is_browsing_households_list(answer):
self.get_household_list()
elif self.household:
if self.is_selecting_member:
self.set_head(answer)
response = self.render_registration_options(answer)
if not response is None:
self.responseString += response
else:
if not self.is_resuming_survey:
self.select_household(answer)
self.validate_house_selection()
else:
self.household = self.investigator.get_from_cache('HOUSEHOLD')
if self.household:
self.render_questions_based_on_head_selection(answer)
def render_select_member_or_head(self):
self.investigator.set_in_cache('is_selecting_member', True)
self.responseString = self.MESSAGES['SELECT_HEAD_OR_MEMBER'] % str(self.household.random_sample_number)
def render_questions_or_member_selection(self, answer):
if self.household.get_head():
self.investigator.set_in_cache('is_head', False)
self.responseString = USSD.MESSAGES['HEAD_REGISTERED']
self.responseString += self.render_questions(answer)
else:
self.render_select_member_or_head()
def render_questions(self, answer):
all_questions = Question.objects.filter(group__name="REGISTRATION GROUP").order_by('order')
if not self.question:
self.investigator.set_in_cache('INVALID_ANSWER', [])
self.question = all_questions[0]
else:
self.question = self.process_registration_answer(answer)
page = self.get_from_session('PAGE')
self.add_question_prefix()
return self.question.to_ussd(page) if self.question else None
def render_registration_options(self, answer):
if self.household_member:
if answer == self.ANSWER['YES']:
self.household = self.investigator.get_from_cache('HOUSEHOLD')
self.render_questions_or_member_selection(answer)
if answer == self.ANSWER['NO']:
self.investigator.clear_interview_caches()
self.set_in_session('HOUSEHOLD', None)
self.responseString = self.render_menu()
self.set_in_session('HOUSEHOLD_MEMBER', None)
else:
return self.render_questions(answer)
def process_registration_answer(self, answer):
answer = int(answer) if answer.isdigit() else answer
if not answer and answer != 0:
self.investigator.invalid_answer(self.question)
return self.question
if self.question.is_multichoice() and self.is_pagination_option(answer):
self.set_current_page(answer)
self.investigator.remove_ussd_variable('INVALID_ANSWER', self.question)
return self.question
age_question = Question.objects.get(text__startswith="Please Enter the age")
if self.is_year_question_answered() and not self.age_validates(answer):
self.investigator.invalid_answer(age_question)
return age_question
return self.get_next_question(answer)
def get_next_question(self, answer):
try:
next_question = self.next_question_by_rule(answer)
except ObjectDoesNotExist, e:
self.save_in_registration_dict(answer)
next_question = self.next_question_by_order()
self.save_in_registration_dict(answer)
return next_question
def next_question_by_rule(self, answer):
answer_class = self.question.answer_class()
if self.question.is_multichoice():
answer = self.question.get_option(answer, self.investigator)
if not answer:
return self.question
_answer = answer_class(answer=answer)
next_question = self.question.get_next_question_by_rule(_answer, self.investigator)
if next_question != self.question:
next_question.order = self.question.order
return next_question
def next_question_by_order(self):
next_questions = Question.objects.filter(group__name="REGISTRATION GROUP",
order__gte=self.question.order + 1).order_by('order')
if not next_questions:
self.save_member_and_clear_cache()
return None
return next_questions[0]
def save_in_registration_dict(self, answer):
self.REGISTRATION_DICT[self.question.text] = answer
self.investigator.set_in_cache('registration_dict', self.REGISTRATION_DICT)
def save_member_and_clear_cache(self):
self.save_member_object()
self.investigator.clear_all_cache_fields_except('IS_REGISTERING_HOUSEHOLD')
self.investigator.set_in_cache('HOUSEHOLD', self.household)
self.responseString = USSD.MESSAGES['END_REGISTRATION']
def process_member_attributes(self):
member_dict = {}
name_question = Question.objects.get(text__startswith="Please Enter the name")
age_question = Question.objects.get(text__startswith="Please Enter the age")
gender_question = Question.objects.get(text__startswith="Please Enter the gender")
month_of_birth_question = Question.objects.get(text__startswith="Please Enter the month of birth")
month_of_birth = self.REGISTRATION_DICT[month_of_birth_question.text]
member_dict['surname'] = self.REGISTRATION_DICT[name_question.text]
member_dict['male'] = self.format_gender_response(gender_question)
member_dict['date_of_birth'] = self.format_age_to_date_of_birth(age_question, month_of_birth)
year_of_birth_question = Question.objects.get(text__startswith="Please Enter the year of birth")
year_of_birth = self.REGISTRATION_DICT[year_of_birth_question.text]
attributes = {'MONTH': month_of_birth,
'YEAR': year_of_birth}
return member_dict, attributes
def save_member_object(self):
member_dict, unknown_attributes = self.process_member_attributes()
member = self.save_member(member_dict)
self.save_unknown_dob_attributes(member, unknown_attributes)
self.set_in_session('HOUSEHOLD_MEMBER', member)
def save_unknown_dob_attributes(self, member, unknown_attributes):
for type_, attribute in unknown_attributes.items():
self.save_attribute(type_, attribute, member)
def save_attribute(self, type_, attribute, member):
if attribute == self.UNKNOWN:
UnknownDOBAttribute.objects.create(household_member=member, type=type_)
def save_member(self, member_dict):
object_to_create = HouseholdHead if self.is_head else HouseholdMember
return object_to_create.objects.create(surname=member_dict['surname'], male=member_dict['male'],
date_of_birth=str(member_dict['date_of_birth']), household=self.household)
def format_age_to_date_of_birth(self, age_question, month_of_birth):
age = self.REGISTRATION_DICT[age_question.text]
today = datetime.date.today()
date_of_birth = today.replace(year=(today.year - int(age)))
if month_of_birth != self.UNKNOWN:
year = date_of_birth.year
month = int(month_of_birth)
day = min(today.day, monthrange(year, month)[1])
date_of_birth = datetime.date(year=year, month=month, day=day)
return date_of_birth
def format_gender_response(self, question):
return self.REGISTRATION_DICT[question.text] == 1
def is_year_question_answered(self):
return "year of birth" in self.question.text
def age_validates(self, answer):
if answer != self.UNKNOWN:
age_question = Question.objects.get(text__startswith="Please Enter the age")
given_age = self.REGISTRATION_DICT[age_question.text]
inferred_year_of_birth = datetime.date.today().year - int(given_age)
return inferred_year_of_birth == int(answer)
return True
|
What is an “At-Home Dad”?
For the purpose of its mission, the National At-Home Dad Network defines an “at-home dad” as any father who is the regular primary caregiver of his children.
In general, he cooks, cleans and cares for his children most days of the week, while his partner works outside the home as the family’s main breadwinner. Because many at-home dads also provide some income to the family, whether by working an evening or weekend shift full-time, working part-time inside or outside the home, or doing odd jobs when it works into the family’s schedule, we believe that a man’s position as an “at-home dad” is best defined by his role as a caregiver, rather than by his employment or income status. We also find that most at-home dads are in the role by choice (over 70% according to this 2012 study), and not due to job loss or an inability to find employment.
While these men are doing what used to be almost exclusively done by moms, they are not “Mr. Mom.” They are at-home dads.
The majority of at-home dads CHOOSE to be home (over 70% according to a study by Boston College Center for Work & Family in 2012). The typical dad chooses to be home because his spouse makes more income, has better benefits and has better long-term career opportunities and he and his spouse value having a parent home to care for their children.
Isolation – Being an stay at-home dad can be an isolating experience which can lead to depression. Every at-home parent deals with isolation but this is magnified for at-home dads because our society is still unfamiliar with men taking this role. On the playground, in the grocery store, or at pre-school, at-home dads often feel ignored or sometimes, feared, by the people they encounter. This makes it difficult for at-home dads to feel comfortable in their role or make many adult friends. While the number of men choosing to stay home with their children has more than doubled in the last 10 years, at-home dads still have many difficulties finding and connecting with other at-home dads.
Identity – At-home dads struggle with their identity as men. Society still believes that childcare and household chores are “a woman’s work” so at-home dads often can feel unsure of their manliness. This is further complicated by friends, family and even their own spouses not supporting their decision to be at-home dads. For some at-home dads, this identity struggle is too great and the family suffers from a man who becomes depressed and frustrated about his perceived failings to “be a man” or he returns to work. Fortunately, most at-home dads come to enjoy their unique role and get comfortable changing diapers and folding laundry instead of “bringing home the bacon.” These at-home dads are redefining the very definition of masculinity.
In order to improve one’s life as an at-home dad, and that of his family, he must be brought out of isolation and re-form his identity to become confident in this boundary-breaking role. One of the best ways, according to recent research by Dr. Aaron Rochlen of the University of Texas-Austin, is to connect with other at-home dads. However, finding other at-home dads nearby can be a great challenge. The National At-Home Dad Network’s mission is to bridge this gap by offering a resource for at-home dads to communicate and connect with other at-home dads, start or find a local at-home dad group and gather for the annual HomeDadCon.
At-home dads have a lot in common with at-home moms, but they are not moms. Men have different interests and styles of communication. Men parent differently. There is something that men gain from fellowship with other men that would be difficult to find in a mom’s group. In previous roles, an at-home dad found male friends through work or school. But at-home dads are typically much more isolated as noted above. The National At-Home Dad Network, our local at-home dad groups, annual convention and discussion forums offer a place for them to connect with new friends who are in the same situation.
|
"""
Django settings for api project.
Generated by 'django-admin startproject' using Django 1.9.7.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '=pe$t0a2dgf%ghj(b$suu2=4vi0x^uq6=l82qn1fx=fe52uym5'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'api.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'api.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
|
As an employer, you have a responsibility to look after your employees’ health and well-being. You also have a requirement to train your staff in first aid.
Scubaducks can provide all the training you need to meet the current regulations.
Our professional staff can deliver React Right and CPR training that is also suitable for training an HSE First Aid appointed person at work.
First Aid and CPR are valuable skills that everyone should have. Who knows when you may be first on the scene when someone needs help quickly? React Right is an innovative CPR and first aid programme. Primary Care (CPR) teaches participants how to deal with emergencies that are immediately life threatening. Your staff will learn how to give Primary Care through a combination of knowledge development, skill development and realistic scenario practice. Secondary Care (first aid) trains them to help those in need when Emergency Medical Services are either delayed or unavailable.
We will provide all equipment necessary. We can provide this training either at our dive centre, located conveniently in Aylesbury, or on-site at your own facility.
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates C++ source files from a mojom.Module."""
import mojom.generate.generator as generator
import mojom.generate.module as mojom
import mojom.generate.pack as pack
from mojom.generate.template_expander import UseJinja
_kind_to_cpp_type = {
mojom.BOOL: "bool",
mojom.INT8: "int8_t",
mojom.UINT8: "uint8_t",
mojom.INT16: "int16_t",
mojom.UINT16: "uint16_t",
mojom.INT32: "int32_t",
mojom.UINT32: "uint32_t",
mojom.FLOAT: "float",
mojom.HANDLE: "mojo::Handle",
mojom.DCPIPE: "mojo::DataPipeConsumerHandle",
mojom.DPPIPE: "mojo::DataPipeProducerHandle",
mojom.MSGPIPE: "mojo::MessagePipeHandle",
mojom.SHAREDBUFFER: "mojo::SharedBufferHandle",
mojom.NULLABLE_HANDLE: "mojo::Handle",
mojom.NULLABLE_DCPIPE: "mojo::DataPipeConsumerHandle",
mojom.NULLABLE_DPPIPE: "mojo::DataPipeProducerHandle",
mojom.NULLABLE_MSGPIPE: "mojo::MessagePipeHandle",
mojom.NULLABLE_SHAREDBUFFER: "mojo::SharedBufferHandle",
mojom.INT64: "int64_t",
mojom.UINT64: "uint64_t",
mojom.DOUBLE: "double",
}
_kind_to_cpp_literal_suffix = {
mojom.UINT8: "U",
mojom.UINT16: "U",
mojom.UINT32: "U",
mojom.FLOAT: "f",
mojom.UINT64: "ULL",
}
def ConstantValue(constant):
return ExpressionToText(constant.value, kind=constant.kind)
def DefaultValue(field):
if field.default:
if mojom.IsStructKind(field.kind):
assert field.default == "default"
return "%s::New()" % GetNameForKind(field.kind)
return ExpressionToText(field.default, kind=field.kind)
return ""
def NamespaceToArray(namespace):
return namespace.split('.') if namespace else []
def GetNameForKind(kind, internal = False):
parts = []
if kind.imported_from:
parts.extend(NamespaceToArray(kind.imported_from["namespace"]))
if internal:
parts.append("internal")
if kind.parent_kind:
parts.append(kind.parent_kind.name)
parts.append(kind.name)
return "::".join(parts)
def GetCppType(kind):
if mojom.IsStructKind(kind):
return "%s_Data*" % GetNameForKind(kind, internal=True)
if mojom.IsAnyArrayKind(kind):
return "mojo::internal::Array_Data<%s>*" % GetCppType(kind.kind)
if mojom.IsInterfaceKind(kind) or mojom.IsInterfaceRequestKind(kind):
return "mojo::MessagePipeHandle"
if mojom.IsEnumKind(kind):
return "int32_t"
if mojom.IsStringKind(kind):
return "mojo::internal::String_Data*"
return _kind_to_cpp_type[kind]
def GetCppPodType(kind):
if mojom.IsStringKind(kind):
return "char*"
return _kind_to_cpp_type[kind]
def GetCppArrayArgWrapperType(kind):
if mojom.IsEnumKind(kind):
return GetNameForKind(kind)
if mojom.IsStructKind(kind):
return "%sPtr" % GetNameForKind(kind)
if mojom.IsAnyArrayKind(kind):
return "mojo::Array<%s> " % GetCppArrayArgWrapperType(kind.kind)
if mojom.IsInterfaceKind(kind):
raise Exception("Arrays of interfaces not yet supported!")
if mojom.IsInterfaceRequestKind(kind):
raise Exception("Arrays of interface requests not yet supported!")
if mojom.IsStringKind(kind):
return "mojo::String"
if mojom.IsHandleKind(kind):
return "mojo::ScopedHandle"
if mojom.IsDataPipeConsumerKind(kind):
return "mojo::ScopedDataPipeConsumerHandle"
if mojom.IsDataPipeProducerKind(kind):
return "mojo::ScopedDataPipeProducerHandle"
if mojom.IsMessagePipeKind(kind):
return "mojo::ScopedMessagePipeHandle"
if mojom.IsSharedBufferKind(kind):
return "mojo::ScopedSharedBufferHandle"
return _kind_to_cpp_type[kind]
def GetCppResultWrapperType(kind):
if mojom.IsEnumKind(kind):
return GetNameForKind(kind)
if mojom.IsStructKind(kind):
return "%sPtr" % GetNameForKind(kind)
if mojom.IsAnyArrayKind(kind):
return "mojo::Array<%s>" % GetCppArrayArgWrapperType(kind.kind)
if mojom.IsInterfaceKind(kind):
return "%sPtr" % GetNameForKind(kind)
if mojom.IsInterfaceRequestKind(kind):
return "mojo::InterfaceRequest<%s>" % GetNameForKind(kind.kind)
if mojom.IsStringKind(kind):
return "mojo::String"
if mojom.IsHandleKind(kind):
return "mojo::ScopedHandle"
if mojom.IsDataPipeConsumerKind(kind):
return "mojo::ScopedDataPipeConsumerHandle"
if mojom.IsDataPipeProducerKind(kind):
return "mojo::ScopedDataPipeProducerHandle"
if mojom.IsMessagePipeKind(kind):
return "mojo::ScopedMessagePipeHandle"
if mojom.IsSharedBufferKind(kind):
return "mojo::ScopedSharedBufferHandle"
return _kind_to_cpp_type[kind]
def GetCppWrapperType(kind):
if mojom.IsEnumKind(kind):
return GetNameForKind(kind)
if mojom.IsStructKind(kind):
return "%sPtr" % GetNameForKind(kind)
if mojom.IsAnyArrayKind(kind):
return "mojo::Array<%s>" % GetCppArrayArgWrapperType(kind.kind)
if mojom.IsInterfaceKind(kind):
return "%sPtr" % GetNameForKind(kind)
if mojom.IsInterfaceRequestKind(kind):
raise Exception("InterfaceRequest fields not supported!")
if mojom.IsStringKind(kind):
return "mojo::String"
if mojom.IsHandleKind(kind):
return "mojo::ScopedHandle"
if mojom.IsDataPipeConsumerKind(kind):
return "mojo::ScopedDataPipeConsumerHandle"
if mojom.IsDataPipeProducerKind(kind):
return "mojo::ScopedDataPipeProducerHandle"
if mojom.IsMessagePipeKind(kind):
return "mojo::ScopedMessagePipeHandle"
if mojom.IsSharedBufferKind(kind):
return "mojo::ScopedSharedBufferHandle"
return _kind_to_cpp_type[kind]
def GetCppConstWrapperType(kind):
if mojom.IsStructKind(kind):
return "%sPtr" % GetNameForKind(kind)
if mojom.IsAnyArrayKind(kind):
return "mojo::Array<%s>" % GetCppArrayArgWrapperType(kind.kind)
if mojom.IsInterfaceKind(kind):
return "%sPtr" % GetNameForKind(kind)
if mojom.IsInterfaceRequestKind(kind):
return "mojo::InterfaceRequest<%s>" % GetNameForKind(kind.kind)
if mojom.IsEnumKind(kind):
return GetNameForKind(kind)
if mojom.IsStringKind(kind):
return "const mojo::String&"
if mojom.IsHandleKind(kind):
return "mojo::ScopedHandle"
if mojom.IsDataPipeConsumerKind(kind):
return "mojo::ScopedDataPipeConsumerHandle"
if mojom.IsDataPipeProducerKind(kind):
return "mojo::ScopedDataPipeProducerHandle"
if mojom.IsMessagePipeKind(kind):
return "mojo::ScopedMessagePipeHandle"
if mojom.IsSharedBufferKind(kind):
return "mojo::ScopedSharedBufferHandle"
if not kind in _kind_to_cpp_type:
print "missing:", kind.spec
return _kind_to_cpp_type[kind]
def GetCppFieldType(kind):
if mojom.IsStructKind(kind):
return ("mojo::internal::StructPointer<%s_Data>" %
GetNameForKind(kind, internal=True))
if mojom.IsAnyArrayKind(kind):
return "mojo::internal::ArrayPointer<%s>" % GetCppType(kind.kind)
if mojom.IsInterfaceKind(kind) or mojom.IsInterfaceRequestKind(kind):
return "mojo::MessagePipeHandle"
if mojom.IsEnumKind(kind):
return GetNameForKind(kind)
if mojom.IsStringKind(kind):
return "mojo::internal::StringPointer"
return _kind_to_cpp_type[kind]
def IsStructWithHandles(struct):
for pf in struct.packed.packed_fields:
if mojom.IsAnyHandleKind(pf.field.kind):
return True
return False
def TranslateConstants(token, kind):
if isinstance(token, mojom.NamedValue):
# Both variable and enum constants are constructed like:
# Namespace::Struct::CONSTANT_NAME
# For enums, CONSTANT_NAME is ENUM_NAME_ENUM_VALUE.
name = []
if token.imported_from:
name.extend(NamespaceToArray(token.namespace))
if token.parent_kind:
name.append(token.parent_kind.name)
if isinstance(token, mojom.EnumValue):
name.append(
"%s_%s" % (generator.CamelCaseToAllCaps(token.enum.name), token.name))
else:
name.append(token.name)
return "::".join(name)
if isinstance(token, mojom.BuiltinValue):
if token.value == "double.INFINITY" or token.value == "float.INFINITY":
return "INFINITY";
if token.value == "double.NEGATIVE_INFINITY" or \
token.value == "float.NEGATIVE_INFINITY":
return "-INFINITY";
if token.value == "double.NAN" or token.value == "float.NAN":
return "NAN";
if (kind is not None and mojom.IsFloatKind(kind)):
return token if token.isdigit() else token + "f";
return '%s%s' % (token, _kind_to_cpp_literal_suffix.get(kind, ''))
def ExpressionToText(value, kind=None):
return TranslateConstants(value, kind)
def ShouldInlineStruct(struct):
# TODO(darin): Base this on the size of the wrapper class.
if len(struct.fields) > 4:
return False
for field in struct.fields:
if mojom.IsMoveOnlyKind(field.kind):
return False
return True
def GetArrayValidateParams(kind):
if not mojom.IsAnyArrayKind(kind) and not mojom.IsStringKind(kind):
return "mojo::internal::NoValidateParams"
if mojom.IsStringKind(kind):
expected_num_elements = 0
element_is_nullable = False
element_validate_params = "mojo::internal::NoValidateParams"
else:
expected_num_elements = generator.ExpectedArraySize(kind)
element_is_nullable = mojom.IsNullableKind(kind.kind)
element_validate_params = GetArrayValidateParams(kind.kind)
return "mojo::internal::ArrayValidateParams<%d, %s,\n%s> " % (
expected_num_elements,
'true' if element_is_nullable else 'false',
element_validate_params)
_HEADER_SIZE = 8
class Generator(generator.Generator):
cpp_filters = {
"constant_value": ConstantValue,
"cpp_const_wrapper_type": GetCppConstWrapperType,
"cpp_field_type": GetCppFieldType,
"cpp_pod_type": GetCppPodType,
"cpp_result_type": GetCppResultWrapperType,
"cpp_type": GetCppType,
"cpp_wrapper_type": GetCppWrapperType,
"default_value": DefaultValue,
"expected_array_size": generator.ExpectedArraySize,
"expression_to_text": ExpressionToText,
"get_array_validate_params": GetArrayValidateParams,
"get_name_for_kind": GetNameForKind,
"get_pad": pack.GetPad,
"has_callbacks": mojom.HasCallbacks,
"should_inline": ShouldInlineStruct,
"is_any_array_kind": mojom.IsAnyArrayKind,
"is_enum_kind": mojom.IsEnumKind,
"is_move_only_kind": mojom.IsMoveOnlyKind,
"is_any_handle_kind": mojom.IsAnyHandleKind,
"is_interface_kind": mojom.IsInterfaceKind,
"is_interface_request_kind": mojom.IsInterfaceRequestKind,
"is_nullable_kind": mojom.IsNullableKind,
"is_object_kind": mojom.IsObjectKind,
"is_string_kind": mojom.IsStringKind,
"is_struct_with_handles": IsStructWithHandles,
"struct_size": lambda ps: ps.GetTotalSize() + _HEADER_SIZE,
"struct_from_method": generator.GetStructFromMethod,
"response_struct_from_method": generator.GetResponseStructFromMethod,
"stylize_method": generator.StudlyCapsToCamel,
"to_all_caps": generator.CamelCaseToAllCaps,
}
def GetJinjaExports(self):
return {
"module": self.module,
"namespace": self.module.namespace,
"namespaces_as_array": NamespaceToArray(self.module.namespace),
"imports": self.module.imports,
"kinds": self.module.kinds,
"enums": self.module.enums,
"structs": self.GetStructs(),
"interfaces": self.module.interfaces,
}
@UseJinja("cpp_templates/module.h.tmpl", filters=cpp_filters)
def GenerateModuleHeader(self):
return self.GetJinjaExports()
@UseJinja("cpp_templates/module-internal.h.tmpl", filters=cpp_filters)
def GenerateModuleInternalHeader(self):
return self.GetJinjaExports()
@UseJinja("cpp_templates/module.cc.tmpl", filters=cpp_filters)
def GenerateModuleSource(self):
return self.GetJinjaExports()
def GenerateFiles(self, args):
self.Write(self.GenerateModuleHeader(), "%s.h" % self.module.name)
self.Write(self.GenerateModuleInternalHeader(),
"%s-internal.h" % self.module.name)
self.Write(self.GenerateModuleSource(), "%s.cc" % self.module.name)
|
A.T. Aukerman, T.M. Hong, “Commissioning and Validation of the ATLAS Level-1 Topological Trigger”, in Proc. 16th Int. Conf. on Accelerator and Large Experimental Control Systems (ICALEPCS'17), Barcelona, Spain, Oct. 2017, paper TUPHA070, pp. 566-570, ISBN: 978-3-95450-193-9, https://doi.org/10.18429/JACoW-ICALEPCS2017-TUPHA070, 2018.
|
"""
PyStratum
"""
from cleo import Command, Input, Output
from pystratum.style.PyStratumStyle import PyStratumStyle
class PyStratumCommand(Command):
"""
Loads stored routines and generates a wrapper class
stratum
{config_file : The stratum configuration file}
{file_names?* : Sources with stored routines}
"""
# ------------------------------------------------------------------------------------------------------------------
def execute(self, input_object: Input, output_object: Output) -> int:
"""
Executes this command.
"""
self.input = input_object
self.output = output_object
return self.handle()
# ------------------------------------------------------------------------------------------------------------------
def handle(self) -> int:
"""
Executes the actual Stratum program.
"""
self.output = PyStratumStyle(self.input, self.output)
command = self.get_application().find('constants')
ret = command.execute(self.input, self.output)
if ret:
return ret
command = self.get_application().find('loader')
ret = command.execute(self.input, self.output)
if ret:
return ret
command = self.get_application().find('wrapper')
ret = command.execute(self.input, self.output)
self.output.writeln('')
return ret
# ----------------------------------------------------------------------------------------------------------------------
|
Yes, johngirton.me does HTML/CSS websites too. As a matter of fact we’ve done more HTML/CSS websites than WP websites. Here’s a case in point. Perneal and Associates in Chicago, Illinois is a business consultant and needed a static website to inform the public of exactly what their specialty is. Of course you can see that we’ve branded the site with a custom logo and colors and also designed and produced the consultants stationary. He gets so much business that he can’t even keep up with his contact emails from the site. That’s a great problem to have too.
|
import os
import deploy as deploy_conf
from fabric.api import env, task, roles, run, execute, sudo
from fabric import colors
from fabric.utils import abort
import inspect
################################################################################
# Tasks for managing Deploy Targets
################################################################################
@task(alias='t')
def target(target_name):
"""Select the deploy target.
"""
if not target_name in deploy_conf.TARGETS:
abort('Deploy target "%s" not found.' % target_name)
target_class = deploy_conf.TARGETS[target_name]
target = target_class()
env['deploy_target'] = target
env.roledefs.update(target.get_roles())
print (colors.green("Selected deploy target ")
+ colors.green(target_name, bold=True))
@task
def list_targets():
"""List all the available targets
"""
targets = deploy_conf.TARGETS.keys()
print 'Available targets:'
print '\n'.join(targets)
################################################################################
# Auxiliary tasks
################################################################################
@task()
@roles('app', 'db', 'static')
def git_pull():
"""Pull changes to the repository of all remote hosts.
"""
env.deploy_target.git_pull()
@task
@roles('app', 'db', 'static')
def setup_repository(force=False):
"""Clone the remote repository, creating the SSH keys if necessary.
"""
env.deploy_target.setup_repository(force)
@task
@roles('app', 'db', 'static')
def setup_virtualenv(force=False):
"""Create the virtualenv and install the packages from the requirements
file.
"""
env.deploy_target.setup_virtualenv(force)
env.deploy_target.install_virtualenv(update=False)
@task
@roles('app', 'db', 'static')
def update_virtualenv():
"""Update the virtualenv according to the requirements file.
"""
env.deploy_target.install_virtualenv(update=True)
################################################################################
# Main Tasks
################################################################################
@task
@roles('app')
def restart_app():
"""Restart the application server.
"""
env.deploy_target.restart_app()
@task()
def deploy():
"""Deploy the application to the selected deploy target.
"""
# Push local changes to central repository
env.deploy_target.git_push()
# Pull changes on remote repositories
execute(git_pull)
# Restart application server
execute(restart_app)
@task
@roles('db')
def migrate(syncdb=False, fake=False):
"""Execute syncdb and migrate in the database hosts.
"""
env.deploy_target.db_migrate(syncdb, fake)
@task
@roles('static')
def collectstatic():
"""Execute collectstatic on static file hosts.
"""
env.deploy_target.db_collectstatic()
@task()
def setup():
"""Initial setup of the remote hosts.
"""
# Set up git repository
execute(setup_repository)
# Set up virtualenv
execute(setup_virtualenv)
# Sync and Migrate database
execute(migrate, True, True)
# Collect static files
execute(collectstatic)
# Restart application servers
execute(restart_app)
################################################################################
# Tasks for manually executing manage.py commands
################################################################################
@task
@roles('app')
def app_manage(arguments):
"""Execute the given manage.py command in Aplication hosts.
"""
env.deploy_target.run_django_manage(arguments)
@task
@roles('db')
def db_manage(arguments):
"""Execute the given manage.py command in Database hosts.
"""
env.deploy_target.run_django_manage(arguments)
@task
@roles('static')
def static_manage(arguments):
"""Execute the given manage.py command in Static File hosts.
"""
env.deploy_target.run_django_manage(arguments)
################################################################################
# Auxiliary tasks for helping with SSH public key authentication
################################################################################
def _read_key_file(key_file):
"""Helper function that returns your SSH public from the given filename.
"""
key_file = os.path.expanduser(key_file)
if not key_file.endswith('pub'):
raise RuntimeWarning('Trying to push non-public part of key pair')
with open(key_file) as f:
return f.read().strip()
@task
def push_key(keyfile='~/.ssh/id_rsa.pub'):
"""Adds your private key to the list of authorized keys to log into the
remote account.
"""
key = _read_key_file(keyfile)
run('mkdir -p ~/.ssh && chmod 0700 ~/.ssh')
run("echo '" + key + "' >> ~/.ssh/authorized_keys")
@task
def push_key_sudo(user,keyfile='~/.ssh/id_rsa.pub'):
"""Adds your private key to the list of authorized keys for another
account on the remote host, via sudo.
"""
key = _read_key_file(keyfile)
sudo('mkdir -p ~%(user)s/.ssh && chmod 0700 ~%(user)s/.ssh'%{'user': user},
user=user)
sudo("echo '" + key + "' >> ~%(user)s/.ssh/authorized_keys"%{'user': user},
user=user)
|
The challenge with possibility is it gets confused with goals, predictions, and optimism. Possibility is not about what we plan to happen, or what we think will happen, or whether things will get better. Goals, prediction, and optimism don’t create anything; they just might make things a little better and cheer us up in the process. Nor is possibility simply a dream. Dreaming leaves us bystanders or observers of our lives. Possibility creates something new. It is a declaration of a future that has thye quality of being and aliveness that we choose to live into. It is framed as a declaration of the world that I want to inhabit. It is a statement of who I am that transcends our history, our story, our usual demographics. The power is in the act of declaring…The future is created through a declaration of what is the possibility we stand for.
Walters then lists some of the questions Block is asking that made him think. They’re pretty good.
I’m thinking about possible futures and projects as I look to rekindle my theatrical and artistic ventures, and I find this call to the possible helpful and timely. It speaks to me of the Kingdom plot of ground, and the “making” function of putting our talents and gifts to work to create the better world of the Kingdom of God. As you know, this is the core of my theology, the theology of making and creation and the possible. How odd that even as I know that in my bones, as I look from this new perspective of 50 years, the last couple deep in full-time ministry, there’s a temptation to see the possibilities as smaller than before.
As my study of Proverbs is showing me, God tests the heart. It’s not always clear what is a test and what is a temptation, but I’m pretty sure the old “more-than-we-can-imagine” clause still holds true.
What possibility will we stand for today?
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.