text
stringlengths 29
850k
|
|---|
## This file is part of Scapy
## See http://www.secdev.org/projects/scapy for more informations
## Copyright (C) Philippe Biondi <phil@secdev.org>
## This program is published under a GPLv2 license
"""
Common customizations for all Unix-like operating systems other than Linux
"""
import sys,os,struct,socket,time
from fcntl import ioctl
from scapy.error import warning
import scapy.config
import scapy.utils
import scapy.utils6
import scapy.arch
scapy.config.conf.use_pcap = 1
scapy.config.conf.use_dnet = 1
from pcapdnet import *
##################
## Routes stuff ##
##################
def read_routes():
if scapy.arch.SOLARIS:
f=os.popen("netstat -rvn") # -f inet
elif scapy.arch.FREEBSD:
f=os.popen("netstat -rnW") # -W to handle long interface names
else:
f=os.popen("netstat -rn") # -f inet
ok = 0
mtu_present = False
prio_present = False
routes = []
pending_if = []
for l in f.readlines():
if not l:
break
l = l.strip()
if l.find("----") >= 0: # a separation line
continue
if not ok:
if l.find("Destination") >= 0:
ok = 1
mtu_present = l.find("Mtu") >= 0
prio_present = l.find("Prio") >= 0
continue
if not l:
break
if scapy.arch.SOLARIS:
lspl = l.split()
if len(lspl) == 10:
dest,mask,gw,netif,mxfrg,rtt,ref,flg = lspl[:8]
else: # missing interface
dest,mask,gw,mxfrg,rtt,ref,flg = lspl[:7]
netif=None
else:
rt = l.split()
dest,gw,flg = rt[:3]
netif = rt[5+mtu_present+prio_present]
if flg.find("Lc") >= 0:
continue
if dest == "default":
dest = 0L
netmask = 0L
else:
if scapy.arch.SOLARIS:
netmask = scapy.utils.atol(mask)
elif "/" in dest:
dest,netmask = dest.split("/")
netmask = scapy.utils.itom(int(netmask))
else:
netmask = scapy.utils.itom((dest.count(".") + 1) * 8)
dest += ".0"*(3-dest.count("."))
dest = scapy.utils.atol(dest)
if not "G" in flg:
gw = '0.0.0.0'
if netif is not None:
ifaddr = scapy.arch.get_if_addr(netif)
routes.append((dest,netmask,gw,netif,ifaddr))
else:
pending_if.append((dest,netmask,gw))
f.close()
# On Solaris, netstat does not provide output interfaces for some routes
# We need to parse completely the routing table to route their gw and
# know their output interface
for dest,netmask,gw in pending_if:
gw_l = scapy.utils.atol(gw)
max_rtmask,gw_if,gw_if_addr, = 0,None,None
for rtdst,rtmask,_,rtif,rtaddr in routes[:]:
if gw_l & rtmask == rtdst:
if rtmask >= max_rtmask:
max_rtmask = rtmask
gw_if = rtif
gw_if_addr = rtaddr
if gw_if:
routes.append((dest,netmask,gw,gw_if,gw_if_addr))
else:
warning("Did not find output interface to reach gateway %s" % gw)
return routes
############
### IPv6 ###
############
def in6_getifaddr():
"""
Returns a list of 3-tuples of the form (addr, scope, iface) where
'addr' is the address of scope 'scope' associated to the interface
'ifcace'.
This is the list of all addresses of all interfaces available on
the system.
"""
ret = []
i = dnet.intf()
for int in i:
ifname = int['name']
v6 = []
if int.has_key('alias_addrs'):
v6 = int['alias_addrs']
for a in v6:
if a.type != dnet.ADDR_TYPE_IP6:
continue
xx = str(a).split('/')[0]
addr = scapy.utils6.in6_ptop(xx)
scope = scapy.utils6.in6_getscope(addr)
ret.append((xx, scope, ifname))
return ret
def read_routes6():
f = os.popen("netstat -rn -f inet6")
ok = False
mtu_present = False
prio_present = False
routes = []
lifaddr = in6_getifaddr()
for l in f.readlines():
if not l:
break
l = l.strip()
if not ok:
if l.find("Destination") >= 0:
ok = 1
mtu_present = l.find("Mtu") >= 0
prio_present = l.find("Prio") >= 0
continue
# gv 12/12/06: under debugging
if scapy.arch.NETBSD or scapy.arch.OPENBSD:
lspl = l.split()
d,nh,fl = lspl[:3]
dev = lspl[5+mtu_present+prio_present]
else: # FREEBSD or DARWIN
d,nh,fl,dev = l.split()[:4]
if filter(lambda x: x[2] == dev, lifaddr) == []:
continue
if 'L' in fl: # drop MAC addresses
continue
if 'link' in nh:
nh = '::'
cset = [] # candidate set (possible source addresses)
dp = 128
if d == 'default':
d = '::'
dp = 0
if '/' in d:
d,dp = d.split("/")
dp = int(dp)
if '%' in d:
d,dev = d.split('%')
if '%' in nh:
nh,dev = nh.split('%')
if scapy.arch.LOOPBACK_NAME in dev:
cset = ['::1']
nh = '::'
else:
devaddrs = filter(lambda x: x[2] == dev, lifaddr)
cset = scapy.utils6.construct_source_candidate_set(d, dp, devaddrs, scapy.arch.LOOPBACK_NAME)
if len(cset) != 0:
routes.append((d, dp, nh, dev, cset))
f.close()
return routes
|
The daughter of a South Daytona police lieutenant has been arrested and charged with burning a 4-year-old boy with cigarettes and a lighter, a Volusia County Sheriff's arrest report shows.
Brandy Nichole Quartier, 23, of South Daytona was arrested by the Sheriff's Office on Wednesday and charged with aggravated child abuse, the report states.
Quartier, the daughter of South Daytona Lt. Doug Quartier, is accused of burning her boyfriend's 4-year-old son over several sections of his body. The abuse is supposed to have occurred the weekend of Feb. 24, the report states.
The youngster's mother called police on Feb. 26 after she picked up the child and his twin brother at their father's South Daytona residence. After she put both boys in her car, the mother noticed that the youngster had a blister under his lip, the report shows. She then saw another blister on the child's left hand.
At that point, the mother told police that she stopped her car and inspected her son's body, the report says. She found two more blisters on the bottom right side of the boy's back, another blister on the left side of his chest and a U-shaped burn on his left buttock.
When the 25-year-old mother asked the boy how he had obtained the injuries, she said he became "very terrified," the report says. She then took the child to Halifax Health Medical Center.
The Daytona Beach News-Journal is not identifying the boy and his parents because the child is a victim of child abuse.
Neither Quartier nor her boyfriend -- the father of the boy -- could be reached for comment Thursday. The child's mother did not return a telephone call Thursday.
South Daytona policeman Samuel Woodson went to the hospital to speak with the boy and his mother. In his incident report, Woodson wrote that the child was "extremely terrified" and "afraid to speak to me."
When the boy finally spoke, he told Woodson that he was asleep in a bedroom at his father's house that weekend with Quartier. He said while he slept, "it felt like someone was sawing at my body." He told Woodson that "it hurt really bad," the report says.
However, when Woodson asked the child who burned him, he said the boy looked at him with a "terrified face and became frozen."
Woodson asked if the boy's dad had hurt him, and the child said no. When asked if Quartier had done it, the youngster remained silent, but nodded his head up and down, the report states. The boy also said he did not want to get anyone in trouble.
South Daytona officials asked that the Sheriff's Office take over the investigation to avoid an apparent conflict of interest.
Sheriff's Investigator Justin Sawicki interviewed the youngster's mother and father separately. The boy's father said at first he thought the injuries were mosquito bites, the report shows.
Informed that a doctor who examined the boy said the burns occurred during the time the boy was with his father and Quartier, the father said, "I don't know, I don't understand this whole thing," the report states.
Sawiki attempted to interview Quartier on March 14, but she refused to speak with him, saying: "My dad told me not to talk to you guys," the report shows.
Quartier was arrested Wednesday and taken to the Volusia County Branch Jail. She was released on $5,000 bail.
|
import ecdsa
import ecdsa.der
import ecdsa.util
import hashlib
import os
import re
import struct
import requests
import json
import math
import time
from bitcoin import *
try:
import cPickle as pickle
except:
import pickle
b58 = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
master_address='1GgwA7c2ovgWDBoVYsHT5VYXw2QBey1EdF'
subkey_complexity=32
standard_fee=0.0001
minincrement=0.001 #min BTC per address (smallest addresses)
increment_base=2
def base58encode(n):
result = ''
while n > 0:
result = b58[n%58] + result
n /= 58
return result
def base256decode(s):
result = 0
for c in s:
result = result * 256 + ord(c)
return result
def countLeadingChars(s, ch):
count = 0
for c in s:
if c == ch:
count += 1
else:
break
return count
# https://en.bitcoin.it/wiki/Base58Check_encoding
def base58CheckEncode(version, payload):
s = chr(version) + payload
checksum = hashlib.sha256(hashlib.sha256(s).digest()).digest()[0:4]
result = s + checksum
leadingZeros = countLeadingChars(result, '\0')
return '1' * leadingZeros + base58encode(base256decode(result))
def privateKeyToWif(key_hex):
return base58CheckEncode(0x80, key_hex.decode('hex'))
def privateKeyToPublicKey(s):
sk = ecdsa.SigningKey.from_string(s.decode('hex'), curve=ecdsa.SECP256k1)
vk = sk.verifying_key
return ('\04' + sk.verifying_key.to_string()).encode('hex')
def pubKeyToAddr(s):
ripemd160 = hashlib.new('ripemd160')
ripemd160.update(hashlib.sha256(s.decode('hex')).digest())
return base58CheckEncode(0, ripemd160.digest())
def keyToAddr(s):
return pubKeyToAddr(privateKeyToPublicKey(s))
# Generate a random private key
def generate_subkeys():
a=[]
a.append(os.urandom(subkey_complexity).encode('hex')) #subkey1
a.append(os.urandom(subkey_complexity).encode('hex')) #subkey2
return a
def generate_privatekey(subkey1,subkey2):
keysum=subkey1+subkey2
secret_exponent=hashlib.sha256(keysum).hexdigest()
privkey=privateKeyToWif(secret_exponent)
return privkey
def generate_publicaddress(subkey1,subkey2):
keysum=subkey1+subkey2
secret_exponent=hashlib.sha256(keysum).hexdigest()
address=keyToAddr(secret_exponent)
return address
def check_address(public_address):
p='https://blockchain.info/q/addressbalance/'
p=p+public_address
h=requests.get(p)
if h.status_code==200:
return h.content
else:
return -1
def check_address_subkeys(subkey1,subkey2):
global h
address=generate_publicaddress(subkey1,subkey2)
return check_address(address)
def generate_receiving_address(destination_address):
global g,r
a='https://blockchain.info/api/receive?method=create&address='
a=a+destination_address
r=requests.get(a)
receiving_address=''
if r.status_code==200:
g=json.loads(str(r.content))
receiving_address=g['input_address']
return str(receiving_address)
else:
return "ERROR"
#'$receiving_address&callback=$callback_url
class subkeypair:
subkey1='' #user
subkey2='' #swiftcoin
referenceid=''
publicaddress=''
balance=0
myuser=''
received=False
def __init__(self):
self.subkey1=os.urandom(subkey_complexity).encode('hex')
self.subkey2=os.urandom(subkey_complexity).encode('hex')
self.referenceid=os.urandom(subkey_complexity).encode('hex')
self.publicaddress=generate_publicaddress(self.subkey1,self.subkey2)
#return self.publicaddress
def private_key(self):
return generate_privatekey(self.subkey1,self.subkey2)
def roundfloat(s, decimals):
n=s
n=n*math.pow(10,decimals)
n=int(n)
n=float(n/math.pow(10,decimals))
return n
def split_logarithmically(amt,base, min):
global r,s
s=amt
r=int(math.log(amt/min,base))
a=[0]*(r+1)
g=0
v=0
s=int(s/min)
min=1
h=s%min
s=s-h
while s>0.00000000:
print s
g=0
while g<r+1 and s+min/100>=math.pow(base,g)*min:
a[g]=a[g]+1
v=v+1
s=s-math.pow(base,g)*min
g=g+1
if s<1 and s>0:
s=-1
#print v
return a
def split_n(amt,base,min):
r=int(math.log(amt/min,base))
a=[0]*(r+1)
g=0
v=0
s=amt
s=s/min
min=1
while s>0.000000001:
g=0
print s
while g<r+1:# and s+min/100>=float(math.pow(base,g)*min):
a[g]=a[g]+1
v=v+1
s=s-float(int(math.pow(base,g)))*min
g=g+1
if s<1 and s>0:
s=-1
return v
def assemble_logarithmically(amt,base,min, storedset):
s=amt
s=s/min
min=1
a=[0]*len(storedset)
c=[]
for x in storedset:
c.append(x)
g=len(storedset)-1
while g>-1:
if c[g]>0 and s>=math.pow(base,g):
n=int(s/math.pow(base,g))
if n>c[g]:
n=c[g]
c[g]=c[g]-n
a[g]=a[g]+n
print s
s=s-math.pow(base,g)*n
g=g-1
return a
a=split_logarithmically(100,2,1)
def convert_to_base(x,base):
a=''
n=30
found=False
while n>-1:
r=math.pow(base,n)
#print r
b=int(x/r)
if b>0:
found=True
if found==True:
a=a+str(b)
x=x-b*r
n=n-1
return a
class user:
name=''
totalbalance=0
inputaddress=''
inputsecretexponent='' #passphrase not yet hashed
outputaddress=''
#outputaddress==''
subkeypairs=[]
subkeys=[] #for memory purposes
def __init__(self):
self.inputsecretexponent=os.urandom(subkey_complexity).encode('hex')
self.inputaddress=generate_publicaddress(self.inputsecretexponent,'')
self.outputaddress=m #TEMPORARY
def generate_subaddresses(self, amt): #this takes way too long
a=0
n=split_n(amt,increment_base,minincrement)
while a<n:
#print a
k=subkeypair()
h1=k.subkey1
h2=k.subkey2
self.subkeys.append([h1,h2])
#UPLOAD SUBKEY2 TO OUR DATABASE AND BACK UP
#k.subkey2=''
save()
self.subkeypairs.append(k)
a=a+1
def checkinputaddress(self):
return check_address(self.inputaddress)
def check_and_split(self): #splits input address BTC into new subkeypairs, subkeypairs must already exist
global dests, outs
newsum=float(self.checkinputaddress())/100000000
newsum=newsum/(1+split_n(newsum,increment_base,minincrement)*standard_fee)
print "detected sum: "+str(newsum)
if newsum>0:
splitsums=split_logarithmically(newsum,increment_base,minincrement)
self.totalbalance=self.totalbalance+newsum
else:
splitsums=[]
a=0
outs=[]
dests=[]
s=0
while a<len(splitsums):#for each digit in splitsums
amt=minincrement*math.pow(increment_base,a)# +standard_fee #dont include standard fee in send_many
print str(amt)
#construct arrays for destinations, outputs
h=0
while h<splitsums[a]:
outputvalue=amt
#if h==0:
# outputvalue=outputvalue+standard_fee
outs.append(outputvalue)
try:
dest=self.subkeypairs[s].publicaddress
self.subkeypairs[s].balance=amt
self.subkeypairs[s].received=True
dests.append(dest)
except:
print "insufficient subkeypairs"
s=s+1
h=h+1
a=a+1
outs[0]=outs[0]+standard_fee
send_many(self.inputaddress,outs,dests,standard_fee,0,0,self.inputsecretexponent)
def redeem(self): #redeem received subkeypairs to outputwallet
global fromaddrs, subkey1s, subkey2s
fromaddrs=[]
dest=self.outputaddress
fee=standard_fee
subkey1s=[]
subkey2s=[]
for x in self.subkeypairs:
if x.received==True:
fromaddrs.append(x.publicaddress)
subkey1s.append(x.subkey1)
subkey2s.append(x.subkey2)
send_from_many(fromaddrs,dest,fee,subkey1s,subkey2s)
#def send_from_many(fromaddrs,destination,fee, subkey1,subkey2): #always sends ALL BTC in ALL SOURCE ADDRESSES
def send_to_output(self,amt):
sent=0
ok=True
h=0
while ok:
if sent>=amt:
ok=False
else:
if self.subkeypairs[h].balance>0:
fromaddr=self.subkeypairs[h].publicaddress
if self.subkeypairs[h].balance>amt-sent+standardfee:
fromthisoneamt=amt-sent
else:
fromthisoneamt=self.subkeypairs[h].balance
subkey1=self.subkeypairs[h].subkey1
subkey2=self.subkeypairs[h].subkey2
send(fromaddr,fromthisoneamt,self.outputaddress,standard_fee,subkey1,subkey2)
self.subkeypairs[h].balance=self.subkeypairs[h].balance-fromthisoneamt-standard_fee
sent=sent+fromthisoneamt
h=h+1
def isinside(small,big):
a=len(small)
b=len(big)
f=0
found=False
while f<b-a:
g=''
for x in big[f:f+a]:
g=g+str(x.lower())
if g==small:
f=b-a
found=True
f=f+1
return found
def find_vanity(vanity,n):
k=math.pow(26,n)
a=0
while a<k:
print math.log(a+1,36)
d=os.urandom(subkey_complexity).encode('hex')
b=generate_publicaddress(d,'')
if isinside(vanity,b):
a=k
print "secret exponent: "+str(d)
print "public address: "+str(b)
a=a+1
def send_transaction(fromaddress,amount,destination, fee, privatekey):
#try:
global ins, outs,h, tx, tx2
fee=int(fee*100000000)
amount=int(amount*100000000)
h=unspent(fromaddress)
ins=[]
ok=False
outs=[]
totalfound=0
for x in h:
if not ok:
ins.append(x)
if x['value']>=fee+amount-totalfound:
outs.append({'value':amount,'address':destination})
if x['value']>fee+amount-totalfound:
outs.append({'value':x['value']-amount-fee,'address':fromaddress})
ok=True
totalfound=fee+amount
else:
outs.append({'value':x['value'],'address':destination})
totalfound=totalfound+x['value']
tx=mktx(ins,outs)
tx2=sign(tx,0,privatekey)
#tx3=sign(tx2,1,privatekey)
pushtx(tx2)
print "Sending "+str(amount)+" from "+str(fromaddress)+" to "+str(destination)+" with fee= "+str(fee)+" and secret exponent= "+str(privatekey)
#a='https://blockchain.info/pushtx/'
#b=requests.get(a+tx3)
#if b.response_code==200:
# print b.content
#except:
# print "failed"
def send_many(fromaddr,outputs,destinations,fee, subkey1,subkey2, secretexponent):
global outs,inp, tx, tx2,totalin,b,amounts, totalout
amounts=[]
outs=[]
ins=[]
totalout=0
fee=int(fee*100000000)
#feeouts=[]
for x in outputs:
amounts.append(int(x*100000000))
totalout=totalout+int(x*100000000)
#x in fees:
#feeouts.append(int(x*100000000))
inp=unspent(fromaddr)
totalin=0
for x in inp:
totalin=totalin+x['value']
ins=inp
a=0
b=0
while a<len(amounts):
amt=amounts[a]#+feeouts[a] #in satoshi
dest=destinations[a]
b=b+amt
outs.append({'value':amt,'address':dest})
a=a+1
unspentbtc=totalin-b-fee
if unspentbtc>0:
outs.append({'value':unspentbtc,'address':fromaddr})
if secretexponent<=0:
priv=hashlib.sha256(subkey1+subkey2).hexdigest()
else:
priv=hashlib.sha256(secretexponent).hexdigest()
tx=mktx(ins,outs)
p=0
tx2=tx
for x in inp:
tx2=sign(tx2,p,priv)
p=p+1
#tx2=sign(tx,0,priv)
pushtx(tx2)
def send_from_many(fromaddrs,destination,fee, subkey1,subkey2): #always sends ALL BTC in ALL SOURCE ADDRESSES
#fromaddrs and subkey1 and subkey2 need to be arrays of addresses and subkeys
global inps, tx, tx2, outs,r
#make inputs
privorder=[]
inps=[]
totalin=0
for x in fromaddrs:
r=unspent(x)
privorder.append(len(r)) # number of inputs from each input address
inps=inps+r
for y in r:
totalin=totalin+y['value']
#make output
sfee=int(fee*100000000)
outs=[]
amt=totalin-sfee
outs.append({'value':amt,'address':destination})
#send tx
tx=mktx(inps,outs)
tx2=tx
g=0
j=0
while g<len(subkey1):
for t in range(0,privorder[g]):
sk1=subkey1[g]
sk2=subkey2[g]
priv=hashlib.sha256(sk1+sk2).hexdigest()
tx2=sign(tx2,j,priv)
j=j+1
g=g+1
pushtx(tx2)
def send(fromaddr, amt, destination, fee, subkey1, subkey2):
pk=hashlib.sha256(subkey1+subkey2).hexdigest()
send_transaction(fromaddr,amt,destination,fee,pk)
users=[]
def add_user():
global users
a=user()
print a.inputaddress
k=len(users)
users.append(a)
return k
def load_user_db():
global users
filename='users.data'
f=open(filename)
users=[]
ok=True
while ok:
inputaddress=f.readline().strip()
if inputaddress=='END':
ok=False
else:
inputsecretexponent=f.readline().strip()
try:
nsubkeypairs=int(f.readline().strip())
except:
print "failed reading file"
r=user()
#r.append(inputaddress)
r.inputaddress=inputaddress
r.inputsecretexponent=inputsecretexponent
for i in range(0,nsubkeypairs):
subkey1=f.readline().strip()
subkey2=f.readline().strip()
referenceid=f.readline().strip()
publicaddress=f.readline().strip()
balance=f.readline().strip()
received=f.readline().strip()
g=subkeypair()
g.subkey1=subkey1
g.subkey2=subkey2
g.referenceid=referenceid
g.publicaddress=publicaddress
g.balance=balance
if received==True:
g.received=True
else:
g.received=False
r.subkeypairs.append(g)
#r.append(inputsecretexponent)
#r.append(nsubkeypairs)
users.append(r)
def save():
filename='users.data'
#pickle.dump(users,open('users.data','wb'))
f=open(filename,'wb')
for x in users:
f.write(x.inputaddress)
f.write('\r\n')
f.write(x.inputsecretexponent)
f.write('\r\n')
f.write(str(len(x.subkeypairs)))
f.write('\r\n')
if len(x.subkeypairs)>0:
for y in x.subkeypairs:
f.write(str(y.subkey1))
f.write('\r\n')
f.write(str(y.subkey2))
f.write('\r\n')
f.write(str(y.referenceid))
f.write('\r\n')
f.write(str(y.publicaddress))
f.write('\r\n')
f.write(str(y.balance))
f.write('\r\n')
if y.received==True:
f.write('received')
else:
f.write('not received')
f.write('\r\n')
f.write('END')
m='1GgwA7c2ovgWDBoVYsHT5VYXw2QBey1EdF'
load_user_db()
|
This season its all about fancy accessories like oversized necklaces which help a lot to boost a whole appearance from casual to dressy . take a look of what I checked out during a real thrilling exhibition visit in London that hosted young designers.
< Previous White shoes dare to wear . . . hard to spare ?
|
#!/usr/bin/env python
# coding=utf-8
"""Exceptions for pyiosxr, a module to interact with Cisco devices running IOS-XR."""
# Copyright 2015 Netflix. All rights reserved.
# Copyright 2016 BigWaveIT. All rights reserved.
#
# The contents of this file are licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
class IOSXRException(Exception):
def __init__(self, msg=None, dev=None):
super(IOSXRException, self).__init__(msg)
if dev:
self._xr = dev
# release the XML agent
if self._xr._xml_agent_locker.locked():
self._xr._xml_agent_locker.release()
class ConnectError(IOSXRException):
"""Exception while openning the connection."""
def __init__(self, msg=None, dev=None):
super(ConnectError, self).__init__(msg=msg, dev=dev)
if dev:
self._xr = dev
self._xr._xml_agent_alive = False
class CommitError(IOSXRException):
"""Raised when unable to commit. Mostly due to ERROR 0x41866c00"""
pass
class LockError(IOSXRException):
"""Throw this exception when unable to lock the config DB."""
pass
class UnlockError(IOSXRException):
"""Throw this exception when unable to unlock the config DB."""
pass
class CompareConfigError(IOSXRException):
"""Throw this exception when unable to compare config."""
pass
class UnknownError(IOSXRException):
"""UnknownError Exception."""
pass
class InvalidInputError(IOSXRException):
"""InvalidInputError Exception."""
pass
class XMLCLIError(IOSXRException):
"""XMLCLIError Exception."""
pass
class InvalidXMLResponse(IOSXRException):
"""Raised when unable to process properly the XML reply from the device."""
pass
class TimeoutError(IOSXRException):
"""TimeoutError Exception."""
def __init__(self, msg=None, dev=None):
super(TimeoutError, self).__init__(msg=msg, dev=dev)
if dev:
self._xr = dev
self._xr._xml_agent_alive = False
class EOFError(IOSXRException):
"""EOFError Exception."""
pass
class IteratorIDError(IOSXRException):
"""IteratorIDError Exception."""
pass
|
Sluciak Contracting is a customer-focused contracting company offering professional services based out of the Canonsburg, PA area. Go with Sluciak to finish your barn demolition project, as we complete jobs from start to finish in a timely manner. Contact our friendly staff today to learn more about our full range of barn demolition capabilities.
|
from __future__ import division
from __future__ import print_function
#!/usr/bin/python
import datetime
import logging
from mpop.satellites import GeostationaryFactory
from mpop.projector import get_area_def
from mpop.utils import debug_on
from pyresample import plot
import numpy as np
from pydecorate import DecoratorAGG
import aggdraw
from trollimage.colormap import rainbow, RainRate
from trollimage.image import Image as trollimage
from PIL import ImageFont, ImageDraw
from pycoast import ContourWriterAGG
from datetime import timedelta
import sys
from os.path import dirname, exists
from os import makedirs
LOG = logging.getLogger(__name__)
delay=0
if len(sys.argv) > 1:
if len(sys.argv) < 6:
print("*** ")
print("*** Warning, please specify date and time completely, e.g.")
print("*** python plot_odyssey.py 2014 07 23 16 10 ")
print("*** ")
quit() # quit at this point
else:
year = int(sys.argv[1])
month = int(sys.argv[2])
day = int(sys.argv[3])
hour = int(sys.argv[4])
minute = int(sys.argv[5])
else:
if True: # automatic choise of last 5min
from my_msg_module import get_last_SEVIRI_date
datetime1 = get_last_SEVIRI_date(False)
if delay != 0:
datetime1 -= timedelta(minutes=delay)
year = datetime1.year
month = datetime1.month
day = datetime1.day
hour = datetime1.hour
minute = datetime1.minute
else: # fixed date for text reasons
year = 2015
month = 12
day = 16
hour = 13
minute = 30
prop_str='DBZH'
#prop_str='RATE'
#if len(sys.argv) > 1:
# prop_str = sys.argv[1]
yearS = str(year)
#yearS = yearS[2:]
monthS = "%02d" % month
dayS = "%02d" % day
hourS = "%02d" % hour
minS = "%02d" % minute
dateS=yearS+'-'+monthS+'-'+dayS
timeS=hourS+':'+minS+'UTC'
print(dateS, timeS)
#import sys, string, os
#sys.path.insert(0, "/opt/users/mbc/pytroll/install/lib/python2.6/site-packages")
debug_on()
time_slot = datetime.datetime(year, month, day, hour, minute)
global_data = GeostationaryFactory.create_scene("odyssey", "", "radar", time_slot)
global_data.load([prop_str])
print(global_data)
color_mode='RainRate'
#print "global_data[prop_str].product_name=",global_data[prop_str].product_name
#area='odyssey'
#area='odysseyS25'
area='EuroMercator' # should be the same as blitzortung
reproject=True
if reproject:
print('-------------------')
print("start projection")
# PROJECT data to new area
data = global_data.project(area, precompute=True)
#data[prop_str].product_name = global_data[prop_str].product_name
#data[prop_str].units = global_data[prop_str].units
global_data = data
#outputDir = "/data/cinesat/out/"
outputDir = time_slot.strftime('/data/COALITION2/PicturesSatellite/%Y-%m-%d/%Y-%m-%d_ODY_RATE_'+area+'/')
if not exists(outputDir):
makedirs(outputDir)
outputFile = outputDir+'ODY_'+prop_str+'-'+area+'_'+yearS[2:]+monthS+dayS+hourS+minS +'.png'
# define area
print('-------------------')
obj_area = get_area_def(area)
print('obj_area ', obj_area)
proj4_string = obj_area.proj4_string
# e.g. proj4_string = '+proj=geos +lon_0=0.0 +a=6378169.00 +b=6356583.80 +h=35785831.0'
print('proj4_string ',proj4_string)
area_extent = obj_area.area_extent
# e.g. area_extent = (-5570248.4773392612, -5567248.074173444, 5567248.074173444, 5570248.4773392612)
area_def = (proj4_string, area_extent)
print('-------------------')
print('area_def ', area_def)
prop=global_data[prop_str].data
fill_value=None # transparent background
#fill_value=(1,1,1) # white background
min_data = 0.0
max_data = 150
colormap = RainRate
# instantaneous rain rate in mm/h
if prop_str == 'RATE':
# prop = np.log10(prop)
# min_data = prop.min()
# #max_data = prop.max()
# #min_data = -0.25
# #max_data = 1.9
# min_data = -0.2 # log(0.63)
# max_data = 2.41 # log(260)
# units='log(RR)'
# tick_marks = 1 # default
# minor_tick_marks = 0.1 # default
lower_value=0.15
# instantaneous rain rate in mm/h
if prop_str == 'DBZH':
min_data = -20
max_data = 70
colormap = rainbow
lower_value=13
if prop_str == 'ACRR':
min_data = 0
max_data = 250
lower_value=0.15
if lower_value > -1000:
prop [prop < lower_value ] = np.ma.masked
LOG.debug("min_data/max_data: "+str(min_data)+" / "+str(max_data))
colormap.set_range(min_data, max_data)
# prop.mask[:,:]=True
img = trollimage(prop, mode="L", fill_value=fill_value)
img.colorize(colormap)
PIL_image=img.pil_image()
dc = DecoratorAGG(PIL_image)
add_logos=True
add_colorscale=True
add_title=True
add_map=True
find_maxima=True
verbose=True
layer=' 2nd layer'
add_borders=True
resolution='l'
if add_borders:
cw = ContourWriterAGG('/data/OWARNA/hau/pytroll/shapes/')
cw.add_coastlines(PIL_image, area_def, outline='white', resolution=resolution, outline_opacity=127, width=1, level=2) #, outline_opacity=0
#outline = (255, 0, 0)
outline = 'red'
#outline = 'white'
cw.add_coastlines(PIL_image, area_def, outline=outline, resolution=resolution, width=2) #, outline_opacity=0
cw.add_borders(PIL_image, area_def, outline=outline, resolution=resolution, width=2) #, outline_opacity=0
ticks=20
tick_marks=20 # default
minor_tick_marks=10 # default
title_color='white'
units=global_data[prop_str].info["units"]
#global_data[prop_str].units
if add_logos:
if verbose:
print('... add logos')
dc.align_right()
if add_colorscale:
dc.write_vertically()
#dc.add_logo("../logos/meteoSwiss3.jpg",height=60.0)
#dc.add_logo("../logos/pytroll3.jpg",height=60.0)
dc.add_logo("/opt/users/common/logos/meteoSwiss.png",height=40.0)
#font_scale = aggdraw.Font("black","/usr/share/fonts/truetype/ttf-dejavu/DejaVuSerif-Bold.ttf",size=16)
fontsize=18
#font = ImageFont.truetype("/usr/openv/java/jre/lib/fonts/LucidaTypewriterBold.ttf", fontsize)
font = ImageFont.truetype("/usr/openv/java/jre/lib/fonts/LucidaSansRegular.ttf", fontsize)
if add_colorscale:
print('... add colorscale ranging from min_data (',min_data,') to max_data (',max_data,')')
dc.align_right()
dc.write_vertically()
#font_scale = ImageFont.truetype("/usr/openv/java/jre/lib/fonts/LucidaTypewriterBold.ttf", fontsize)
colormap_r = colormap.reverse()
#rainbow_r.set_range(min_data, max_data)
dc.add_scale(colormap_r, extend=True, ticks=ticks, tick_marks=tick_marks, minor_tick_marks=minor_tick_marks, line_opacity=100, unit=units) #, font=font
indicate_range=True
if indicate_range:
mask = global_data[prop_str+'-MASK'].data
img = trollimage(mask, mode="L", fill_value=None) #fill_value,[1,1,1], None
from trollimage.colormap import greys
img.colorize(greys)
img.putalpha(mask*0+0.4)
PIL_mask = img.pil_image()
from PIL import Image as PILimage
PIL_image = PILimage.alpha_composite(PIL_mask, PIL_image)
if add_title:
draw = ImageDraw.Draw(PIL_image)
if layer.find('2nd') != -1:
y_pos_title=20
elif layer.find('3rd') != -1:
y_pos_title=40
else:
y_pos_title=5
layer = dateS+' '+timeS
if len(layer) > 0:
layer=layer+':'
#title = layer+' radar, '+prop_str+' ['+global_data[prop_str].units+']'
title = layer+' ODYSSEY, '+'precipitation rate'+' ['+global_data[prop_str].info["units"]+']'
draw.text((0, y_pos_title),title, title_color, font=font)
PIL_image.save(outputFile)
print('... save image as ', outputFile)
# Austria works with
# https://opencv-python-tutroals.readthedocs.io/en/latest/py_tutorials/py_imgproc/py_watershed/py_watershed.html
if find_maxima:
import numpy as np
import scipy
import scipy.ndimage as ndimage
import scipy.ndimage.filters as filters
import matplotlib.pyplot as plt
data = global_data[prop_str].data.data
#data = filters.gaussian_filter(global_data[prop_str].data,1) ### filter eliminates too many data points...
noise_removal = False
if noise_removal:
# ... need to install openCV2
import cv2
kernel = np.ones((3,3),np.uint8)
ret, thresh = cv2.threshold(data, 0, 75, cv2.THRESH_BINARY_INV+cv2.THRESH_OTSU)
opening = cv2.morphologyEx(thresh, cv2.MORPH_OPEN, kernel, iterations = 1)
sure_bg = cv2.dilate(opening,kernel,iterations=3)
# arbitrary settings
neighborhood_size = 6
threshold = 6
ref_min=43
data_max = filters.maximum_filter(data, neighborhood_size)
print(data_max.max())
maxima = (data == data_max)
data_min = filters.minimum_filter(data, neighborhood_size)
print(data_min.max())
diff = ((data_max - data_min) > threshold)
#print "diff: ", diff
maxima[diff == False] = 0
maxima[data_max < ref_min] = 0
labeled, num_objects = ndimage.label(maxima)
slices = ndimage.find_objects(labeled)
x, y = [], []
for dy,dx in slices:
x_center = (dx.start + dx.stop - 1)/2
x.append(x_center)
y_center = (dy.start + dy.stop - 1)/2
y.append(y_center)
plot_plt=True
if plot_plt:
plt.imshow(data, vmin=0, vmax=0.9*data_max.max())
#plt.imshow(data, vmin=0, vmax=50)
#plt.imshow(data)
plt.autoscale(False)
outputFile = outputDir+'odd_'+prop_str+'-'+area+'_'+yearS[2:]+monthS+dayS+hourS+minS +'.png'
plt.savefig(outputFile, bbox_inches = 'tight')
print("display "+outputFile+" &")
plt.autoscale(False)
plt.plot(x,y, 'ro', markersize=2.5)
outputFile = outputDir+'odm_'+prop_str+'-'+area+'_'+yearS[2:]+monthS+dayS+hourS+minS +'.png'
plt.savefig(outputFile, bbox_inches = 'tight')
print("display "+outputFile+" &")
else:
prop = np.full(data.shape, False, dtype=bool)
for i,j in zip(x,y):
prop[i,j]=True
from mpop.satin.swisslightning import unfold_lightning
img = trollimage(prop, mode="L", fill_value=fill_value)
# ... not yet finished ...
|
Is Rage Against The Machine reuniting and/or once again raging against the machine (possibly in time for the machine’s general election)? The beloved rock group, which last played together in 2011, has launched a new website that hints at… well, something, at least.
On Tuesday (May 17), Rage tweeted a link to prophetsofrage.com; the site includes the hashtag #TakeThePowerBack, an email address signup and a countdown to Tuesday, May 31. “Prophets of Rage” is the name of a classic Public Enemy song, and Chuck D seems to be in on the tease, posting a classic Rage performance on Twitter on Tuesday.
Could Zack de la Rocha and co. be returning in time to battle Donald Trump (and maybe Hillary Clinton, too)? The band has been on hiatus since 2012, and hasn’t released a non-covers album since 1999’s The Battle of Los Angeles.
|
# -*- coding: utf-8 -*-
"""
Current25 Plugin
Copyright (C) 2011-2012 Olaf Lüke <olaf@tinkerforge.com>
Copyright (C) 2014-2016 Matthias Bolte <matthias@tinkerforge.com>
current25.py: Current25 Plugin Implementation
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public
License along with this program; if not, write to the
Free Software Foundation, Inc., 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.
"""
from PyQt5.QtCore import pyqtSignal, Qt
from PyQt5.QtWidgets import QVBoxLayout, QLabel, QPushButton, QFrame
from brickv.plugin_system.plugin_base import PluginBase
from brickv.bindings import ip_connection
from brickv.bindings.bricklet_current25 import BrickletCurrent25
from brickv.plot_widget import PlotWidget, CurveValueWrapper
from brickv.callback_emulator import CallbackEmulator
from brickv.utils import format_current
class Current25(PluginBase):
qtcb_over = pyqtSignal()
def __init__(self, *args):
super().__init__(BrickletCurrent25, *args)
self.cur = self.device
self.cbe_current = CallbackEmulator(self,
self.cur.get_current,
None,
self.cb_current,
self.increase_error_count)
self.qtcb_over.connect(self.cb_over)
self.cur.register_callback(self.cur.CALLBACK_OVER_CURRENT,
self.qtcb_over.emit)
self.over_label = QLabel('Over Current: No')
self.calibrate_button = QPushButton('Calibrate Zero')
self.calibrate_button.clicked.connect(self.calibrate_clicked)
self.current_current = CurveValueWrapper() # float, A
plots = [('Current', Qt.red, self.current_current, format_current)]
self.plot_widget = PlotWidget('Current [A]', plots, extra_key_widgets=[self.over_label], y_resolution=0.001)
line = QFrame()
line.setObjectName("line")
line.setFrameShape(QFrame.HLine)
line.setFrameShadow(QFrame.Sunken)
layout = QVBoxLayout(self)
layout.addWidget(self.plot_widget)
layout.addWidget(line)
layout.addWidget(self.calibrate_button)
def start(self):
self.cbe_current.set_period(100)
self.plot_widget.stop = False
def stop(self):
self.cbe_current.set_period(0)
self.plot_widget.stop = True
def destroy(self):
pass
@staticmethod
def has_device_identifier(device_identifier):
return device_identifier == BrickletCurrent25.DEVICE_IDENTIFIER
def cb_current(self, current):
self.current_current.value = current / 1000.0
def cb_over(self):
self.over_label.setText('Over Current: Yes')
def calibrate_clicked(self):
try:
self.cur.calibrate()
except ip_connection.Error:
return
|
Viking Ship 16 oz. pint glass.
Great looking pint glass to enjoy your favorite beverage from while showing your Viking heritage! Skal!
|
import json
import os
import mutagen
import mutagen.mp3
import mutagen.oggvorbis
import mutagen.flac
from mutagen.easyid3 import EasyID3
import itertools
class db_structure(object):
data_fields = ['artist', 'title', 'album', 'tracknumber']
def __init__(self,
path,
artist,
title,
album,
tracknumber):
self.path = path
self.artist = artist
self.title = title
self.album = album
if tracknumber:
t = type(tracknumber)
if t is str or t is unicode:
s = tracknumber.split('/')[0]
s = s.split('.')[0]
self.tracknumber = int(s)
else:
self.tracknumber = tracknumber
else:
self.tracknumber = None
self.name_dict = dict(zip(self.data_fields, [self.artist,
self.title,
self.album,
self.tracknumber]))
def __iter__(self):
return iter([self.path,
self.artist,
self.title,
self.album,
self.tracknumber])
def __getitem__(self, k):
if k == 'path':
return self.path
return self.name_dict[k]
def __repr__(self):
return repr((self.path,
self.artist,
self.title,
self.album,
self.tracknumber))
def to_list(self):
return (self.path,
self.artist,
self.title,
self.album,
self.tracknumber)
class MusicDB(object):
"""
Functions to use from outside:
scan_library,
get_albums_from_artist,
get_artists_from_album,
get_title, num_songs
"""
def __init__(self, filename='music.db'):
self.music_db = list() # main db
self.artist_db = set() # set of all artists
self.title_db = set()
self.path_db = set()
self.playlist_db = list()
self.filename = filename
self.initialized = False
self.file_types = frozenset(('mp3', 'flac', 'ogg', 'oga'))
def num_songs(self):
return len(self.music_db)
def save_db(self, db):
with open(self.filename, 'w') as f:
l = map(db_structure.to_list, db)
f.write(json.dumps(l))
def load_db(self):
try:
with open(self.filename, 'r') as f:
db = json.loads(*f.readlines())
print len(db)
# directories, db = db
db = map(lambda x: db_structure(*x), db)
self._update_db(db)
except Exception, e:
print e
def scan_library(self, directories=None):
"""
Scans directories for mp3 files and stops time
"""
import time
t = time.time()
try:
self._create_db(directories)
except Exception, e:
print e
raise Exception( "Couldn't create DB")
print 'db created in ', time.time() - t, ' seconds'
def _parse_dirs(self, directories):
"""
Parses directories and returns mp3 files
"""
l = []
for dirs in directories:
try:
d = os.listdir(dirs)
except os.error, e:
continue
# ignore hidden files
d = filter(lambda x: not x.startswith('.'), d)
d = map(lambda x: dirs + '/' + x, d)
for f in d:
try:
if not os.path.isdir(f):
ending = f.split('.')[-1].lower()
if ending in self.file_types:
l.append(f)
else:
print f
print ending, "not supported"
else:
# parse subdirectories
p = self._parse_dirs([f])
for x in p:
l.append(x)
except Exception, e:
print e
if not l:
raise Exception('Parsing failed for {}'.format(directories))
return l
def _create_db(self, directories):
"""
Creates db from directories
"""
try:
d = self._parse_dirs(directories)
except Exception, e:
print e
if not d:
raise Exception('No music in', directories)
def get_tags(f):
l = [f]
offset = len(l)
try:
t = mutagen.File(f, easy=True)
for tag in db_structure.data_fields:
if tag == 'artist':
# first try albumartist and others
if 'albumartist' in t:
i = t.get('albumartist')
elif 'albumartistsort' in t:
i = t.get('albumartistsort')
else:
i = t.get(tag)
else:
i = t.get(tag)
if i:
l.append(i[0])
else:
l.append(None)
except Exception, e:
print 'get tags'
print f, e
print db_structure, dir(db_structure)
for i in xrange(len(l),
len(db_structure.data_fields) + offset):
l.append(None)
return l
d = map(lambda x: db_structure(*get_tags(x)), d)
try:
self._update_db(d)
except Exception, e:
raise e
self.save_db(d)
def _find(self, db, wanted, t):
"""
Finds wanted e.g. 'artist'
for args e.g. 'album'
in db
"""
def crawl_db(key):
"""
Return set of all DB entries of key
"""
s = set()
for e in db:
s.add(e[key])
return s
d = dict()
for n in crawl_db(t):
w = set()
for a in self._filter_by(db, t, n):
name = a[wanted]
if name:
w.add(a[wanted])
else:
w.add("unknown")
d[n] = w
return d
def _update_db(self, db):
"""
Updates DBS with album and artist entries
"""
self.artist_db = self._find(db, 'album', 'artist')
self.music_db = db
self.initialized = True
def _get(self, key, db, name):
return self._filter_by(db, key, name)
def get_album(self, name):
return self._sort_by(self._get('album', self.music_db, name),
'tracknumber')
def get_title(self, db, name):
return self._get('title', db, name)
def get_albums_from_artist(self, name):
a = list()
if name not in self.artist_db:
return None
return self.artist_db[name]
def _sort_by(self, db, t):
return sorted(db, key=lambda db_structure: db_structure.name_dict[t])
def _filter_by(self, db, t, name):
l = []
return filter(lambda x: name == x.name_dict[t], db)
|
MJS Commodities can provide its clients with a broad range of services in support of international trade. We offer inventory management and monitoring of products to investors, traders, producers and banks for commodities stored at warehouses which goods are usually pledged to banks and clients.
Our company can assist in the issuance of warehouse receipts and monitor stored cargoes for commodity banks and which are accepted by the major international banks.
Depending on the client’s requirements, we can carry out warehouse surveys to assess the suitability of storage facilities and can station dedicated staff at the warehouse, if requested.
Alternatively, we can physically visit the warehouse on a regular basis to inspect the warehouse and goods stored. Reports will be provided, inclusive of photographs if requested. Cargo releases can be monitored by our staff, prior to arrival at the warehouse and after release from the warehouse, if client requests for such service.
|
"""This module contains fields that depend on importing `bson`. `bson` is
a part of the pymongo distribution.
"""
from __future__ import unicode_literals, absolute_import
import bson
from ..common import * # pylint: disable=redefined-builtin
from ..types import BaseType
from ..exceptions import ConversionError
class ObjectIdType(BaseType):
"""An field wrapper around MongoDB ObjectIds. It is correct to say they're
bson fields, but I am unaware of bson being used outside MongoDB.
`auto_fill` is disabled by default for ObjectIdType's as they are
typically obtained after a successful save to Mongo.
"""
MESSAGES = {
'convert': "Couldn't interpret value as an ObjectId.",
}
def __init__(self, auto_fill=False, **kwargs):
self.auto_fill = auto_fill
super(ObjectIdType, self).__init__(**kwargs)
def to_native(self, value, context=None):
if not isinstance(value, bson.objectid.ObjectId):
try:
value = bson.objectid.ObjectId(str(value))
except bson.objectid.InvalidId:
raise ConversionError(self.messages['convert'])
return value
def to_primitive(self, value, context=None):
return str(value)
|
La Marina between Guardamar del Segura and Santa Pola. Attractive offer - beautiful and spacious house in La Marina - in the popular resort on the Costa Blanca. The house has a surface of 118 m2, in the middle of the 3 bedrooms, 2 bathrooms, kitchen, living room and an observation deck. The plot of the area. 95 m2 of space for a car. Excellent location, just 800 meters from the sea. Access to a common pool.
|
from django.db import models
from django.utils.html import format_html
# Create your models here.
class Search(models.Model):
"""
http://austin.craigslist.org/search/bia?sort=date&hasPic=1&minAsk=10&maxAsk=250&query=fixed
"""
server = models.CharField(max_length=200)
category = models.CharField(max_length=50)
has_pic = models.BooleanField(default=True)
min_ask = models.PositiveIntegerField(default=0)
max_ask = models.PositiveIntegerField(default=1000)
query = models.CharField(max_length=300, default='')
tag = models.CharField(max_length=20)
custom_search_args = models.CharField(max_length=300, default='')
last_update = models.DateTimeField(auto_now=True)
def __str__(self):
return self.tag
class Item(models.Model):
search = models.ForeignKey(Search)
id = models.CharField(primary_key=True, max_length=200)
link = models.URLField()
post_date = models.DateTimeField()
pnr = models.CharField(max_length=200)
price = models.PositiveIntegerField()
title = models.CharField(max_length=200)
retrieved = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.title
def external_link(self):
return format_html('<a href="{0}">{0}</a>', self.link)
external_link.allow_tags = True
|
Florida’s Atlantic coast is known for salty waves, pristine sand, and the bright lights of South Beach. Miami is a vibrant location for partying and star-sighting, but for those who crave a more laid-back retreat that still has a big-city feel, Jacksonville is ideal.
Those chill vibes are what former Jacksonville Jaguars head coach Gus Bradley and his wife, Michaela, love about the the northern coast of Florida.
Just 18 miles southeast of downtown Jacksonville sits Ponte Vedra Beach, a beautiful expanse of crystal-blue Atlantic beach. This is where the couple made their home while Gus Bradley coached the Jags from 2013 until 2016, when he was fired.
For $12,950 a month, you can make their home your home. Now that Gus Bradley is working as the defensive coordinator of the L.A. Chargers, he and his wife are renting out their incredible Florida estate.
However, this is no Airbnb winter-break crash pad. A home of this caliber is definitely more suited for a long-term rental—and let’s be honest—the HOA has requirements about vacation rentals.
A stellar location, a host of amenities (golf, anyone?), and easy access to the water make this home worthy of the five-digit rental price.
“This property is the total package,” says listing agent Jane Chefan.
The kitchen/dining and living area have recently gone through extensive renovations, and the result is gorgeous.
“It is a wide-open space perfect for family activity or entertaining,” says Michaela Bradley.
The 6,376-square-foot home also offers an abundance of space. Each of its five bedrooms has an ensuite bath. An additional three half-bathrooms make for easy facility access for guests.
There are also two bonus rooms, an office, and a stellar outdoor space.
“The outdoor living is second to none. Covered porches overlook expansive water views and a large screened-in pool and jacuzzi. A recently built gazebo offers a sit-down bar with big screen TV, grill, refrigerator, and ice maker. A 60-foot dock comes with the lease for anyone looking to have their boat conveniently located for ease of use,” says Chefan.
Endless summer abounds in Jacksonville, and this house offers the amenities to enjoy it all. All it’ll take is a monthly rental payment to a coaching couple who aren’t yet ready to leave Florida for good.
The post Rent Fired Jacksonville Jaguars Coach Gus Bradley’s Waterfront Florida Home appeared first on Real Estate News & Insights | realtor.com®.
|
# Copyright 2016-2017 Canonical Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Encoding of MIME multipart data."""
__all__ = ["encode_multipart_data"]
from collections import Iterable, Mapping
from email.generator import BytesGenerator
from email.mime.application import MIMEApplication
from email.mime.multipart import MIMEMultipart
from io import BytesIO, IOBase
from itertools import chain
import mimetypes
def get_content_type(*names):
"""Return the MIME content type for the file with the given name."""
for name in names:
if name is not None:
mimetype, encoding = mimetypes.guess_type(name)
if mimetype is not None:
if isinstance(mimetype, bytes):
return mimetype.decode("ascii")
else:
return mimetype
else:
return "application/octet-stream"
def make_bytes_payload(name, content):
payload = MIMEApplication(content)
payload.add_header("Content-Disposition", "form-data", name=name)
return payload
def make_string_payload(name, content):
payload = MIMEApplication(content.encode("utf-8"), charset="utf-8")
payload.add_header("Content-Disposition", "form-data", name=name)
payload.set_type("text/plain")
return payload
def make_file_payload(name, content):
payload = MIMEApplication(content.read())
payload.add_header("Content-Disposition", "form-data", name=name, filename=name)
names = name, getattr(content, "name", None)
payload.set_type(get_content_type(*names))
return payload
def make_payloads(name, content):
"""Constructs payload(s) for the given `name` and `content`.
If `content` is a byte string, this calls `make_bytes_payload` to
construct the payload, which this then yields.
If `content` is a unicode string, this calls `make_string_payload`.
If `content` is file-like -- it inherits from `IOBase` or `file` --
this calls `make_file_payload`.
If `content` is iterable, this calls `make_payloads` for each item,
with the same name, and then re-yields each payload generated.
If `content` is callable, this calls it with no arguments, and then
uses the result as a context manager. This can be useful if the
callable returns an open file, for example, because the context
protocol means it will be closed after use.
This raises `AssertionError` if it encounters anything else.
"""
if content is None:
yield make_bytes_payload(name, b"")
elif isinstance(content, bool):
if content:
yield make_bytes_payload(name, b"true")
else:
yield make_bytes_payload(name, b"false")
elif isinstance(content, int):
yield make_bytes_payload(name, b"%d" % content)
elif isinstance(content, bytes):
yield make_bytes_payload(name, content)
elif isinstance(content, str):
yield make_string_payload(name, content)
elif isinstance(content, IOBase):
yield make_file_payload(name, content)
elif callable(content):
with content() as content:
for payload in make_payloads(name, content):
yield payload
elif isinstance(content, Iterable):
for part in content:
for payload in make_payloads(name, part):
yield payload
else:
raise AssertionError("%r is unrecognised: %r" % (name, content))
def build_multipart_message(data):
message = MIMEMultipart("form-data")
for name, content in data:
for payload in make_payloads(name, content):
message.attach(payload)
return message
def encode_multipart_message(message):
# The message must be multipart.
assert message.is_multipart()
# The body length cannot yet be known.
assert "Content-Length" not in message
# So line-endings can be fixed-up later on, component payloads must have
# no Content-Length and their Content-Transfer-Encoding must be base64
# (and not quoted-printable, which Django doesn't appear to understand).
for part in message.get_payload():
assert "Content-Length" not in part
assert part["Content-Transfer-Encoding"] == "base64"
# Flatten the message without headers.
buf = BytesIO()
generator = BytesGenerator(buf, False) # Don't mangle "^From".
generator._write_headers = lambda self: None # Ignore.
generator.flatten(message)
# Ensure the body has CRLF-delimited lines. See
# http://bugs.python.org/issue1349106.
body = b"\r\n".join(buf.getvalue().splitlines())
# Only now is it safe to set the content length.
message.add_header("Content-Length", "%d" % len(body))
return message.items(), body
def encode_multipart_data(data=(), files=()):
"""Create a MIME multipart payload from L{data} and L{files}.
**Note** that this function is deprecated. Use `build_multipart_message`
and `encode_multipart_message` instead.
@param data: A mapping of names (ASCII strings) to data (byte string).
@param files: A mapping of names (ASCII strings) to file objects ready to
be read.
@return: A 2-tuple of C{(body, headers)}, where C{body} is a a byte string
and C{headers} is a dict of headers to add to the enclosing request in
which this payload will travel.
"""
if isinstance(data, Mapping):
data = data.items()
if isinstance(files, Mapping):
files = files.items()
message = build_multipart_message(chain(data, files))
headers, body = encode_multipart_message(message)
return body, dict(headers)
|
As from 2016, Thorne and Brager are focusing their collaboration more on co-writing than co-directing. Below you can read about the various plays and other texts written by the founders of Imploding Fictions.
This play merges new writing with a taut crime story. Performance rights are available.
|
"""Optimization method
Supported method:
+ Stochastic gradient descent
"""
from collections import OrderedDict;
import theano.tensor as T;
def gd_updates(cost,
params,
updates=None,
max_norm=5.0,
learning_rate=0.1,
eps=1e-6,
rho=0.95,
method="sgd"):
"""Gradient Descent based optimization
Note: should be a class to make flexible call
Parameters
----------
cost : scalar
total cost of the cost function.
params : list
parameter list
method : string
optimization method: "sgd", "adagrad", "adadelta"
Returns
-------
updates : OrderedDict
dictionary of updates
"""
if updates is None:
updates=OrderedDict();
gparams=T.grad(cost, params);
for gparam, param in zip(gparams, params):
if method=="sgd":
updates[param]=param-learning_rate*gparam;
return updates;
theano_rng=T.shared_randomstreams.RandomStreams(1234);
def dropout(shape, prob=0.):
"""generate dropout mask
Parameters
----------
shape : tuple
shape of the dropout mask
prob : double
probability of each sample
Returns
-------
mask : tensor
dropout mask
"""
mask=theano_rng.binominal(n=1, p=1-prob, size=shape);
return T.cast(x=mask, dtype="float32");
def multi_dropout(shapes, prob=0.):
"""generate a list of dropout mask
Parameters
----------
shapes : tuple of tuples
list of shapes of dropout masks
prob : double
probability of each sample
Returns
-------
masks : tuple of tensors
list of dropout masks
"""
return [dropout(shape, dropout) for shape in shapes];
def apply_dropout(X, mask=None):
"""apply dropout operation
Parameters
----------
X : tensor
data to be masked
mask : dropout mask
Returns
-------
masked_X : tensor
dropout masked data
"""
if mask is not None:
return X*mask;
else:
return X;
def corrupt_input(X, corruption_level=0.):
"""Add noise on data
Parameters
----------
X : tensor
data to be corrupted
corruption_level : double
probability of the corruption level
Returns
-------
corrupted_out : tensor
corrupted output
"""
return apply_dropout(X, dropout(X.shape, corruption_level));
|
Also great in marinades.SpecificationsGreat tastingExcellent for restaurantsHigh qualityBrand nameUs..
It's hard to believe a snack food that tastes so good can actually be good for you! With suggestions..
Give your pancakes what they deserve and top them with the classic taste of Aunt JemimaOriginal Syru..
98% fat freeHigh in fiberSecret family recipe8/16.5 oz. cansOriginalSeasoned with bacon & brown ..
When you serve Chef Boyardee to your family, you give them more than great taste-you give them a who..
Precisely trimmed and individually frozen within hours of harvest, Daily Chef Steamable Broccoli Flo..
Whether you're looking for a decadent ending to a special meal or an indulgent anytime treat, our Da..
The pantry pack stores and dispenses individually wrapped 1 lb. packages.Specifications6 - 1lb. Pack..
Daily Chef™ Mixed Vegetables is a delicious mix of carrots, super sweet corn, green beans and peas.T..
About this item Keep your pantry prepared and stock up with this six pack of spaghetti pasta. ..
Freshen up your soups, salads, pasta and rice with Daily Chef™ Organic Super Sweet Corn. Drizzle mel..
Extra fine cane sugar.SpecificationsUse in bakeries, cafeterias, restaurants, caterers, and confecti..
Improved Enfamil Premium with a unique, dual prebiotic blend, now promotes the growth of good bacter..
Flour for every purposeGuaranteed premium qualityConvenient bag size..
Tastes great on: wings, pizza, eggs, burgers, tacos, chili, chicken, vegetablesSince 19202/23 ounce ..
You could probably feed your kids our Original French Toast Sticks every morning and not hear a sing..
These Goya Black Beans, offer a nutritious addition to a variety of meals. Use them as a side dish o..
Bright, plump and tenderFor classic beans and rice, soups, salads and chiliKosher..
To make an interesting and flavorful side dishReady in only 25 minutes..
Goya Sazón is where you'll find the secret to creating the authentic flavors of Latino cuisine. Goya..
Spanish-style Goya Tomato Sauce boasts premium quality and superior taste. Try this Spanish tomato s..
For over 100 years, Heinz has made quality, great-tasting relish. Now, Heinz offers your family a gr..
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-12-11 13:57
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Scholien', '0006_artikel_prioritaet'),
]
operations = [
migrations.CreateModel(
name='MarkdownArtikel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('bezeichnung', models.CharField(max_length=200)),
('slug', models.SlugField(blank=True, max_length=100, unique=True)),
('zeit_erstellt', models.DateTimeField(auto_now_add=True)),
('text', models.TextField()),
('prioritaet', models.PositiveSmallIntegerField(default=0)),
],
options={
'verbose_name': 'Markdown Artikel',
'verbose_name_plural': 'Markdown Artikel',
'ordering': ['-zeit_erstellt'],
},
),
]
|
Creating a national patient identification system has long been a goal for many healthcare system CIOs and other health IT leaders.
In a release, CHIME said it is launching a $1 million challenge to raise money on HeroX, the incentivized prize competition platform for tech innovation, to spur progress on national patient identifier technology.
Also signing onto the CHIME initiative were the American Health Information Management Association, Cerner Corp., the Health IT Now Coalition, the National Patient Safety Foundation, and the Healthcare Financial Management Association.
CHIME is also expected to advocate for a national patient identifier at the Health Information and Management Systems Society (HIMSS) 2015 Annual Conference and Exhibition in Chicago next month.
Despite an impressive array of supporters, movement toward a true national patient identification system has been blocked by conservative political activists and healthcare advocates who believe the concept runs counter to individual rights.
And while the original HIPAA legislation in 1996 contained language calling for a national patient identifier, Congress in 1998 rejected funding for HHS to implement the system, effectively killing the idea.
With the advent of the Obama administration six years ago, it looked like chances for a national patient identifier were pretty good. But even strong advocates such as Farzad Mostashari, M.D, former national health IT coordinator, weren’t able to push it through.
Let’s see if this latest coalition, led by CHIME and its members of diverse political persuasions, can succeed when others failed.
|
#! /usr/bin/python
###### SUM STATES #######
# Python script for summing and ploting the data from the Density Of States
# files obtained from projwfc.x. It can sum also k-solved dos, and make a plot
# with mathplotlib (if not available, gnuplot, if not avaible, print to file)
# if there is not X11 forwarding, plots in terminal.
# It does something very similar to sumpdos.f90, but with
# some extra features (use "-h" option).
#
# it takes two different inputs, the first one is the pw.x output
# ("-o" option), which is used for parsing the Fermi energy for fitting
# the PDOS curve to the right energy. The other files are the pDOS files
# ("-s" option), that can be given with shell syntax, i.e.
# pdos_atm*Fe*wfc*d* for summing all the d orbitals of Fe.
# It can also handle k solved dos files.
#
# One of the most useful feature, compared to the sumpdos.x, is the
# fact that it also builds the picture directly, so it can be directly
# visualized and exported for inclusion in a document.
# It uses mathplotlib for plotting, but if no mathplotlib is found in
# the $PYTHONPATH, it tries to use gnuplot, if no gnuplot available,
# dumps the output data to a file.
# In the that no X11 forwarding is available (i.e. ssh to the cluster),
# it shows a rough graph in the terminal, so we get an idea of the shape
# of the results.
#
# Example of usage:
# cd ....../espresso-5.0/PP/examples/example02/results/
# ../../../src/sum_states.py -o ni.dos.out -s
# ni.pdos_atm#1\(Ni\)_wfc#2\(d\) -t "Example PP/02" -xr -6 2
#
#
# The procedure for obtaining the DOS files is explained
# i.e. in (espresso-dir)/PP/examples/example02/
#
# Author: Dr. Julen Larrucea
# University of Bremen,
# Bremen Centre for Computational Materials Science, HMI Group
# julenl [at] gmail.com or larrucea [at] hmi.uni-bremen.de
#
# This file is distributed under the terms of the GNU General Public
# License. See the file `License'
# in the root directory of the present distribution,
# or http://www.gnu.org/copyleft/gpl.txt .
#######################
import sys
import os
import fnmatch
import linecache
# Some default variables
version=0.2
pwout=""
selat="*"
graphtitle=""
min_x,max_x=-10,3
min_y,max_y="",""
output_file_name="sum_dos.out"
prt="no"
print " #### sum_states.py version "+str(version)+" #### "
# Check if X11, mathplotlib and gnuplot are available
try:
os.popen("gnuplot -V").read()
prog_gnuplot="yes" # gnuplot is installed
except:
prog_gnuplot="no"
# Parse command line options
if len(sys.argv)>1:
for i in sys.argv:
if i.startswith('-'):
option=i.split('-')[1]
if option=="o":
pwout= sys.argv[sys.argv.index('-o')+1]
if option=="s":
selat= sys.argv[sys.argv.index('-s')+1]
if option=="p":
prt="yes"
if len(sys.argv) > sys.argv.index('-p')+1: # if there is a name after "-p" take it as an output name
if sys.argv[sys.argv.index('-p')+1] != "-": # otherwise default name sum_dos.out
dos_out_name=sys.argv[sys.argv.index('-p')+1]
if option=="t":
graphtitle= sys.argv[sys.argv.index('-t')+1]
if option=="xr":
min_x,max_x= float(sys.argv[sys.argv.index('-xr')+1]),float(sys.argv[sys.argv.index('-xr')+2])
if option=="yr":
min_y,max_y= float(sys.argv[sys.argv.index('-yr')+1]),float(sys.argv[sys.argv.index('-yr')+2])
if option=="v":
print "sum_dos.py version: "+version
sys.exit()
if option=="h":
print '''
-o QE output file name (for grepping Fermi E)
-s Selection of atoms for summing the DOSes. "*" for all, *1*Fe*d* for first Fe atom " (def. "*")
-p Print output to a file and aditionaly provide an output name (def. no output and "sum_dos.out")
-t set title in the head of the graph
-xr set min and max x value for the axes in the graph
-yr set min and max y value for the axes in the graph
-h print this help
-v print version
Example: sum_states.py --s sys.pdos_atm#4\(Fe2\)_wfc#2\(d\) -t "Wustite LDA+U single Fe" -xr -9 4
'''
sys.exit()
# Check for mathplotlib/gnuplot and import mpl if possible
if len(os.popen('echo $DISPLAY').read()) > 1:
graphic_plot="yes"
try:
from pylab import *
mplplot="yes"
print "pylab imported"
except:
print "There is no mathplotlib installed. Using gnuplot."
mplplot="no"
prt="yes"
else:
print "No X11. Trying to plot on terminal"
graphic_plot="no"
if prog_gnuplot=="no":
prt="yes"
# if not specified, try to find the espresso output, in order to parse the Fermi energy
if pwout == "":
for filen in filter(os.path.isfile, os.listdir('.')):
if "Program PWSCF" in linecache.getline(filen, 2):
print "Using " + filen + " as pw.x output. You can specify another one with the -o option."
pwout=filen
# Parse Fermi energy from the pw.x output
if pwout!="":
try:
os.popen("grep -a 'the Fermi energy is' "+pwout ).read()
fermi=float(os.popen("grep -a 'the Fermi energy is' "+pwout ).read().split()[4])
print "Fermi energy = ", fermi, "a.u."
except:
print "WARNING: No Fermi energy found. Using 0 e.V. instead"
fermi=0
else:
print "WARNING: No pw.x output found. Using E Fermi = 0 e.V."
fermi=0
# List of all DOS files to add
dosfiles=[]
for dfile in os.listdir('.'):
if fnmatch.fnmatch(dfile, selat):
dosfiles.append(dfile)
if len(dosfiles)==0:
print "ERROR: Provide a (list of) valid DOS file(s)"
sys.exit()
print "dosfiles list: ",
for dosfile in dosfiles:
print dosfile,
print ""
# Check wetter we have k-solved DOS
if open(dosfiles[0],'r').readline().split()[1]=="E":
ksolved="no"
print "no ksolved"
elif open(dosfiles[0],'r').readline().split()[1]=="ik":
ksolved="yes"
print "ksolved"
# Sum over all k-points and files
mat=[] # matrix with total sum of ldos
for i in range(len(dosfiles)):
mati=[] # temporal matrix for each DOS file "i"
k=0
for line in open(dosfiles[i],'r'):
if len(line) > 10 and line.split()[0] != "#":
if ksolved=="no":
mati.append([float(line.split()[0]),float(line.split()[1]),float(line.split()[2])])
if ksolved=="yes":
ik = int(line.split()[0])
if ik > k: #if it is a different k block
k=int(line.split()[0])
oldmat=[] # temporal matrix for each k-point
if ik == 1:
mati.append([float(line.split()[1]),float(line.split()[2]),float(line.split()[3])]) # append: energy, ldosup, ldosdw
elif ik == k and k > 1:
oldmat.append([float(line.split()[1]),float(line.split()[2]),float(line.split()[3])])
elif len(line) < 5 and k > 1: #if blank line, sum k-frame to the total
for j in range(len(oldmat)):
mati[j]=[mati[j][0],mati[j][1]+oldmat[j][1],mati[j][2]+oldmat[j][2]]
if mat == []: # if it is the first dos file, copy total matrix (mat) = the first dos files's data
mat=mati[:]
else:
for j in range(len(mati)): # if it is not the first file, sum values
mat[j]=[mat[j][0],mat[j][1]+mati[j][1],mat[j][2]+mati[j][2]]
print "...ploting..."
if prt=="yes":
out=open(output_file_name,"w")
x,y1,y2=[],[],[]
for i in mat:
x.append(i[0]-fermi)
y1.append(i[1])
y2.append(-i[2])
if prt=="yes": # print to a file
print>>out, i[0]-fermi, i[1], i[2]
if prt=="yes":
out.close()
if graphic_plot=="yes":
# if there is matplotlib, generate a plot with it
if mplplot=="yes":
plot(x,y1,linewidth=1.0)
plot(x,y2,linewidth=1.0)
print min(y2),max(y1)
plt.title(graphtitle)
plt.xlabel('E (eV)')
plt.ylabel('States')
plt.grid(True)
plt.rcParams.update({'font.size': 22})
plt.fill(x,y1,color='0.8')
plt.fill(x,y2,color='0.9')
if min_x and max_x:
fromx,tox=min_x,max_x
plt.axis([fromx, tox, min(y2), max(y1)])
show()
elif mplplot=="no" and prog_gnuplot=="yes": # If no mathplotlib available, use gnuplot
os.system("echo \"plot '"+ output_file_name + "' using ($1-"+str(fermi)+"):2 w l, '' u ($1"+str(fermi)+"):3 w l\" | gnuplot -persist")
elif graphic_plot=="no": # If no X forwarding available, show graph in terminal
if prog_gnuplot=="yes":
os.system("echo \"set terminal dumb; plot '"+ output_file_name + "' using ($1-"+str(fermi)+"):2 w l, '' u ($1-"+str(fermi)+"):3 w l\" | gnuplot -persist")
|
Connect with the best care companions in Humboldt—today!
I am an Licensed Practice Nurse. I received my degree December 2017. I have had petsitting and nanny experience since I was 10 years old. I love to cook and the outdoors.
Well I started taking care of people at the age of 16, I enjoy it because you really get to know people and it’s just an amazing experience and I love helping people it’s a great feeling!
I love animals and helping care for other peoples pets. I have been in the caregiver field for 12 years. I would love to help with the type of care you’re lookinf for.
|
#!/usr/bin/env python
import argparse
import os
from getpass import getpass
from loomengine import verify_has_connection_settings, \
verify_server_is_running, get_server_url, \
save_token, delete_token, get_token
from loomengine_utils.connection import Connection
from requests.exceptions import HTTPError
class AuthClient(object):
def __init__(self, args=None, silent=False):
# Parse arguments
if args is None:
args = _get_args()
verify_has_connection_settings()
server_url = get_server_url()
verify_server_is_running(url=server_url)
self.args = args
self.silent = silent
self._set_run_function()
self.connection = Connection(server_url, token=None)
def _print(self, text):
if not self.silent:
print text
def _set_run_function(self):
# Map user input command to method
commands = {
'login': self.login,
'logout': self.logout,
'print-token': self.print_token,
}
self.run = commands[self.args.command]
def login(self):
username = self.args.username
password = self.args.password
if password is None:
password = getpass("Password: ")
try:
token = self.connection.create_token(
username=username, password=password)
except HTTPError:
raise SystemExit("ERROR! Login failed")
save_token(token)
self._print("Login was successful. Token saved.")
def logout(self):
token = get_token()
if token is None:
self._print("No token found. You are logged out.")
else:
delete_token()
self._print("Token deleted.")
def print_token(self):
print get_token()
def get_parser(parser=None):
if parser is None:
parser = argparse.ArgumentParser(__file__)
subparsers = parser.add_subparsers(dest='command')
login_parser = subparsers.add_parser('login')
login_parser.add_argument('username', metavar='USERNAME')
login_parser.add_argument(
'--password', '-p', metavar='PASSWORD',
default=None,
help='Optional. Wait for the prompt to avoid displaying '
'password and writing it in your terminal history'
)
subparsers.add_parser('logout')
subparsers.add_parser('print-token')
return parser
def _get_args():
parser = get_parser()
args = parser.parse_args()
return args
if __name__ == '__main__':
AuthClient().run()
|
The week before Christmas I travelled to Tallinn for Christmas shopping, and to see Anton, who I met last time I was in Tallinn. Together we visited the Kadriorg Palace a few kilometers outside the city.
Two months after I was there on my summer holiday, I returned to Tallinn for a weekend in October. The city is even more beautiful in the fall, when the trees are bursting with colors.
|
# pygsear
# Copyright (C) 2003 Lee Harr
#
#
# This file is part of pygsear.
#
# pygsear is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# pygsear is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pygsear; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Graphical input devices"""
import time, random, math, os, sys, types
import colorsys
from code import InteractiveConsole
from code import compile_command
import pygame
import pygame.draw
from pygame.locals import K_RETURN, K_ESCAPE, K_BACKSPACE, K_F1, K_UP, K_DOWN
from pygame.locals import K_PAGEUP, K_PAGEDOWN, K_LEFT, K_RIGHT, K_DELETE
from pygame.locals import QUIT, MOUSEBUTTONUP
import conf
import Drawable
from Drawable import Rectangle
import Path
import Event
import Util
from locals import TRANSPARENT, BLACK, WHITE, LGREEN, LGRAY, GRAY, BLUE, RED
class Widget:
def __init__(self, callback=None, group=()):
self.set_callback(callback)
self.events = Event.EventGroup()
def set_callback(self, callback):
if callback is None:
callback = self.nop
self.callback = callback
def nop(self, arg=None):
pass
def activate(self):
self.active = 1
def deactivate(self):
self.active = 0
def _stop(self, pygame_event=None):
self.stop = 1
def _quit(self, pygame_event=None):
ev = pygame.event.Event(QUIT)
pygame.event.post(ev)
self._stop()
def modal(self):
stop = Event.KEYUP_Event(key=K_ESCAPE, callback=self._stop)
while not self.stop:
self.events.check()
class Score(Widget, Drawable.Drawable):
"""Keep and display a score or value."""
def __init__(self,
w=None,
position=(100, 100),
text="Score:",
digits=6,
fontSize=40,
color=WHITE,
bgcolor=TRANSPARENT):
Drawable.Drawable.__init__(self, w)
self.score_position = position
self.text = text
self.digits = digits
self.color = color
self.bgcolor = bgcolor
self.font = pygame.font.Font(None, fontSize)
self.points = 0
self.updateScore()
self.set_position(position)
self.set_crect(self.rect)
def addPoints(self, n):
"""Add points to the score."""
self.points += n
def subtractPoints(self, n):
"""Subtract points from the score."""
self.points -= n
def set_points(self, p):
"""Set the score to a particular value."""
self.points = p
def updateScore(self):
"""Render the text for showing the score."""
if hasattr(self, 'image'):
self.uclear()
line = '%s %*d' % (self.text, self.digits, self.points)
self.image = self.font.render(line, 1, self.color, self.bgcolor)
self.rect = self.image.get_rect()
self.set_position(self.score_position)
if self.bgcolor == TRANSPARENT:
self.image.set_colorkey(TRANSPARENT)
class ProgressBar(Widget, Rectangle):
"""Percentage bar graph."""
def __init__(self,
w=None,
steps=100,
position=None,
color=BLACK,
width=None,
height=10,
fill=1,
border=0,
borderColor=WHITE):
if width is None:
width = conf.WINWIDTH-60
self.colorOriginal = color
self.set_color(color)
self.width = width
self.height = height
Rectangle.__init__(self, w, width, height, color=color)
self.image.set_colorkey(TRANSPARENT)
if position is None:
self.center(y=-30)
else:
self.set_position(position)
self.fill = fill
self.set_steps(steps)
self.set_crect(self.image.get_rect())
def set_steps(self, steps):
"""
"""
self.steps = steps
self.perStep = float(self.width)/steps
if self.fill:
self.stepsLeft = steps
else:
self.stepsLeft = 0
self.show()
def step(self):
"""
"""
if self.fill:
self.stepsLeft -= 1
if self.stepsLeft < 1:
self.stepsLeft = 0
else:
self.stepsLeft += 1
if self.stepsLeft > self.steps:
self.stepsLeft = self.steps
self.show()
def unstep(self):
"""
"""
if not self.fill:
self.stepsLeft -= 1
if self.stepsLeft < 1:
self.stepsLeft = 0
else:
self.stepsLeft += 1
if self.stepsLeft > self.steps:
self.stepsLeft = self.steps
self.show()
def reset(self):
self.stepsLeft = self.steps
self.set_color(self.colorOriginal)
self.show()
def set_color(self, color):
"""set the color of the bar"""
self.color = color
def show(self):
"""
"""
width = int(self.stepsLeft * self.perStep)
height = self.height
bar = pygame.Surface((width, height))
bar.fill(self.color)
self.image.fill(TRANSPARENT)
self.image.blit(bar, (0, 0))
class VProgressBar(ProgressBar):
def __init__(self,
w=None,
steps=100,
position=None,
color=BLACK,
width=10,
height=None,
fill=1):
if height is None:
height = conf.WINHEIGHT-60
self.colorOriginal = color
self.set_color(color)
self.width = width
self.height = height
Rectangle.__init__(self, w, width, height, color=color)
self.image.set_colorkey(TRANSPARENT)
if position is None:
self.center(x=30)
else:
self.set_position(position)
self.fill = fill
self.set_steps(steps)
self.set_crect(self.image.get_rect())
def set_steps(self, steps):
"""
"""
self.steps = steps
self.perStep = float(self.height)/steps
if self.fill:
self.stepsLeft = steps
else:
self.stepsLeft = 0
self.show()
def show(self):
"""
"""
width = self.width
height = int(self.stepsLeft * self.perStep)
bar = pygame.Surface((width, height))
bar.fill(self.color)
self.image.fill(TRANSPARENT)
self.image.blit(bar, (0, self.height-height))
class Button(Widget):
def __init__(self, callback=None, group=None):
Widget.__init__(self)
self.set_callback(callback)
#print 'offset', offset, callback
self.armed = 0
self.events.add(Event.MOUSEBUTTONDOWN_Event(callback=self.clicked))
self.events.add(Event.MOUSEBUTTONUP_Event(callback=self.released))
if group is not None:
group.add(self.events)
self.stop = 0
def arm(self):
self.armed = 1
def fire(self, pygameEvent):
self.armed = 0
self.callback(pygameEvent)
def clicked(self, pygameEvent):
pos = pygameEvent.pos
try:
offset = self.window.rect[0:2]
#print 'off', offset
except AttributeError:
offset = (0, 0)
if self.rect.move(offset[0], offset[1]).collidepoint(pos):
self.arm()
return 1
else:
return 0
def released(self, pygameEvent):
#print 'rel'
pos = pygameEvent.pos
try:
offset = self.window.rect[0:2]
#print 'off', offset
except AttributeError:
offset = (0, 0)
if self.rect.move(offset[0], offset[1]).collidepoint(pos) and self.armed:
self.fire(pygameEvent)
else:
self.armed = 0
class SpriteButton(Button, Drawable.Drawable):
"""Clickable button which is also a sprite."""
def __init__(self, sprite, callback=None, group=None):
"""Initialize the button.
@param sprite: Clickable sprite.
@param callback: Function to call when sprite is clicked.
@param group: Other C{EventGroup} to put this widget's events
in to also.
"""
pos = sprite.get_position()
Button.__init__(self, callback, group)
Drawable.Drawable.__init__(self, w=sprite.window)
self.image = sprite.image
self.rect = sprite.rect
self.set_path(sprite.path)
def modal(self):
quit = Event.QUIT_Event(callback=self._stop)
self.events.add(quit)
stop = Event.KEYUP_Event(key=K_ESCAPE, callback=self._stop)
self.events.add(stop)
while not self.stop:
self.clear()
self.events.check()
self.udraw()
quit.kill()
stop.kill()
self.uclear()
class ImageButton(SpriteButton):
filename = None
def __init__(self, filename=None, callback=None, group=None):
if filename is None:
filename = self.filename
sprite = Drawable.Image(filename=filename)
SpriteButton.__init__(self, sprite=sprite, callback=callback, group=group)
class StationaryButton(Drawable.Stationary, SpriteButton):
"""Clickable button which is a sprite but does not need to move."""
def __init__(self,
window=None,
sprite=None,
callback=None,
group=None):
pos = sprite.get_position()
Drawable.Stationary.__init__(self, w=window, sprite=sprite)
SpriteButton.__init__(self, sprite, callback, group)
self.image = sprite.image
self.rect = sprite.rect
self.set_position(pos)
class CloseButton(StationaryButton):
"""White square button with a black X."""
def __init__(self, window=None, callback=None, group=None):
b = Drawable.Square(w=window, side=15, color=WHITE)
w, h = b.image.get_size()
pygame.draw.line(b.image, BLACK, (0, 0), (w, h))
pygame.draw.line(b.image, BLACK, (w, 0), (0, h))
b.center(-5, 5)
#print 'bc', b.center, window.screen.get_size(), b.get_position()
StationaryButton.__init__(self, window, b, callback, group)
self.draw()
class TextButton:
"""Clickable button with text printed on it."""
def __init__(self,
window=None,
text='click',
length=None,
callback=None,
size=22,
color=WHITE,
border=2,
borderColor=LGREEN,
padding=5,
bgColor=BLACK):
self.window = window
#print window
self.text = text
self.length = length
self.size = size
self.color = color
self.border = border
self.borderColor = borderColor
self.padding = padding
self.bgColor = bgColor
self.makeButton()
def makeButton(self):
window = self.window
text = self.text
length = self.length
size = self.size
color = self.color
border = self.border
borderColor = self.borderColor
padding = self.padding
bgColor = self.bgColor
t = Drawable.String(message=text, fontSize=size, color=color,
bgcolor=bgColor)
# use inverse text at cursor position if cursor_pos is set
if hasattr(self, 'cursor_pos'):
c = self.cursor_pos
before = Drawable.String(message=text[:c], fontSize=size, color=color,
bgcolor=bgColor)
bw, bh = before.image.get_size()
cursor = Drawable.String(message=text[c:c+1], fontSize=size, color=bgColor,
bgcolor=color)
cw, ch = cursor.image.get_size()
t.image.blit(cursor.image, (bw, 0))
w, h = t.image.get_size()
if length is not None:
s = pygame.Surface((length, h))
s.fill(bgColor)
s.blit(t.image, (0, 0))
w = length
self.length = w
bw = w + 2*padding + 2*border
bh = h + 2*padding + 2*border
if border:
#print 'boxing', dir(window)
box = Drawable.Rectangle(w=window, width=bw, height=bh,
color=borderColor)
iw = w + 2*padding
ih = h + 2*padding
pygame.draw.rect(box.image, bgColor,
((border, border), (iw, ih)))
else:
#print 'boxing', dir(window)
box = Drawable.Rectangle(w=window, width=bw, height=bh, color=bgColor)
box.image.blit(t.image, (border+padding, border+padding))
if bgColor == TRANSPARENT:
box.image.set_colorkey(TRANSPARENT)
self.box = box
class SpriteTextButton(TextButton, SpriteButton):
"""Clickable button which is also a sprite with text printed on it."""
def __init__(self,
window=None,
text='',
length=None,
callback=None,
size=22,
color=WHITE,
border=2,
borderColor=LGREEN,
padding=5,
bgColor=BLACK,
group=None):
#print 'stb', window.offset
TextButton.__init__(self, window, text, length, callback, size,
color, border, borderColor, padding, bgColor)
SpriteButton.__init__(self, self.box, callback, group)
class StationaryTextButton(TextButton, StationaryButton):
"""Clickable button which is also a sprite with text printed on it
and does not need to move."""
def __init__(self,
window=None,
text="",
length=None,
callback=None,
size=22,
color=WHITE,
border=1,
borderColor=LGREEN,
padding=5,
bgColor=BLACK,
group=None):
TextButton.__init__(self, window, text, length, callback, size,
color, border, borderColor, padding, bgColor)
StationaryButton.__init__(self, sprite=self.box, callback=callback,
group=group)
class TextInput(SpriteTextButton):
"""Used to gather text input from the user."""
def __init__(self,
window=None,
text='',
prompt='',
maxLength=10,
length=150,
callback=None,
size=22,
color=WHITE,
border=1,
borderColor=LGREEN,
padding=5,
bgColor=BLACK,
inactiveColor=LGRAY,
inactiveBgColor=GRAY,
group=None):
"""
Initialize the TextInput widget.
@param window: Layer on which sprite lives.
@param text: Initial text in the window.
@param maxLength: Maximum number of characters in input.
@param length: Width of the text window in pixels.
@param callback: Function to call when RETURN is pressed.
@param size: Font size.
@param color: Text color.
@param border: Thickness of text window border (0 for no border)
@param borderColor: Color of window border (if any)
@param padding: Space between text and edge of window or border.
@param bgColor: Background color of text window.
@param inactiveColor: Text color when widget is inactive.
@param inactiveBgColor: Background color when widget is inactive.
@param group: Additional group/ groups that should watch for
this widget's events.
"""
self.maxLength = maxLength
self.text = text
self.prompt = prompt
self.text_content = text
t = prompt + text + " " * (maxLength - len(text))
self.active = 0
SpriteTextButton.__init__(self, window, t, length, callback, size, color,
border, borderColor, padding, bgColor, group)
repeater = Event.Repeat_KEY_Event(on_hold=self.addLetter, group=group)
self.events.add(repeater)
self.events.add(repeater.contains.events())
self.repeater = repeater
self.events.add(Event.KEYUP_Event(key=K_RETURN, callback=self.done, group=group))
self.activeColor = color
self.activeBgColor = bgColor
self.inactiveColor = inactiveColor
self.inactiveBgColor = inactiveBgColor
def done(self, pygame_event=None):
"""return the text_content.
If this is triggered from one of the widget's own events (ie K_RETURN),
it only returns the contents if the widget is active. Otherwise, if it
was called from outside (pygame_event is None) it returns the content
no matter what it's state was (active or inactive). This allows another
button to call in to the TextInput and force it to trigger its callback.
@param pygame_event: C{pygame.Event} triggering the call. If this is
None, C{done()} must have been called from outside the widget, and
so it should just go ahead and callback with its text.
"""
#print 'done', pygame_event, self.active
if self.active or pygame_event is None:
text = self.text_content
self.callback(text)
self.text = ""
self.text_content = ""
if hasattr(self, 'cursor_pos'):
del(self.cursor_pos)
self.updateButton()
else:
return
def addLetter(self, pygameEvent):
"""Process the next keypress.
@param pygameEvent: L{pygame.event.Event}. Usually passed in from
the pygsear Event handler.
"""
#print 'adding letter'
if not self.active:
return
k = pygameEvent.key
text = self.text_content
new_text = text
if k in (K_RETURN, K_ESCAPE):
return
elif k == K_LEFT:
self.cursor_left()
return
elif k == K_RIGHT:
self.cursor_right()
return
letter = pygameEvent.unicode
if letter:
if hasattr(self, 'cursor_pos'):
c = self.cursor_pos
t = list(text)
if k == K_BACKSPACE:
t.pop(c-1)
self.cursor_pos -= 1
elif k == K_DELETE:
t.pop(c)
if self.cursor_pos > len(t) - 1:
del(self.cursor_pos)
elif len(t) >= self.maxLength:
Util.beep()
#return
else:
t.insert(c, letter)
self.cursor_pos += 1
new_text = ''.join(t)
else:
if k == K_BACKSPACE:
if text:
new_text = text[0:-1]
else:
Util.beep()
elif k == K_DELETE:
Util.beep()
elif len(text) >= self.maxLength:
Util.beep()
#return
else:
new_text = text + letter
if new_text != text:
self.set_text(new_text)
def set_text(self, text):
"""Save a copy of the content of the text field and update.
Since the actual field is padded with spaces when it is rendered,
it is necessary to save a copy of the actual contents before going
to render.
"""
#self.text = text
self.text_content = text
self.updateButton()
def cursor_left(self):
text = self.text_content
if text:
if not hasattr(self, 'cursor_pos'):
pos = len(text) - 1
else:
pos = self.cursor_pos - 1
if pos < 0:
pos = 0
Util.beep()
self.cursor_pos = pos
self.updateButton()
else:
pass
Util.beep()
def cursor_right(self):
if not hasattr(self, 'cursor_pos'):
Util.beep()
else:
pos = self.cursor_pos + 1
if pos == len(self.text_content):
del(self.cursor_pos)
else:
self.cursor_pos = pos
self.updateButton()
def updateButton(self):
pos = self.get_position()
text = self.text_content
self.text = self.prompt + text + " " * (self.maxLength - len(text))
self.makeButton()
self.image = self.box.image
self.set_position(pos)
self.udraw()
def makeButton(self):
if self.prompt:
if hasattr(self, 'cursor_pos'):
promptlen = len(self.prompt)
self.cursor_pos += promptlen
TextButton.makeButton(self)
self.cursor_pos -= promptlen
else:
TextButton.makeButton(self)
else:
TextButton.makeButton(self)
def activate(self):
Widget.activate(self)
self.color = self.activeColor
self.bgColor = self.activeBgColor
self.updateButton()
def deactivate(self):
Widget.deactivate(self)
self.color = self.inactiveColor
self.bgColor = self.inactiveBgColor
self.updateButton()
def fire(self, pygameEvent):
self.armed = 0
self.callback(pygameEvent)
def clicked(self, pygameEvent):
pos = pygameEvent.pos
try:
offset = self.window.rect[0:2]
except AttributeError:
offset = (0, 0)
if self.rect.move(offset[0], offset[1]).collidepoint(pos):
self.activate()
return 1
else:
self.deactivate()
return 0
def released(self, pygameEvent):
pass
def modal(self):
quit = Event.QUIT_Event(callback=self._stop)
self.events.add(quit)
stop = Event.KEYUP_Event(key=K_ESCAPE, callback=self._stop)
self.events.add(stop)
self.stop = 0
self.activate()
while not self.stop and self.active:
try:
conf.ticks = min(20, conf.game.clock.tick(conf.MAX_FPS))
except AttributeError:
conf.ticks = 20
self.clear()
self.events.check()
self.udraw()
if not self.line.repeater.key_held and not self.stop:
ev = pygame.event.wait()
pygame.event.post(ev)
self.deactivate()
quit.kill()
stop.kill()
self.uclear()
class Dialog(Drawable.Layer, Widget):
def __init__(self, window=None, size=None, callback=None):
Widget.__init__(self, callback)
if size is None:
w, h = conf.WINSIZE
w = int(0.5 * w)
h = int(0.3 * h)
else:
w, h = size
Drawable.Layer.__init__(self, w=window, size=(w, h))
self.center()
self.events.add(Event.KEYUP_Event(key=K_ESCAPE, callback=self.cancel))
self.set_background(color=BLACK)
self.border(width=3, color=RED)
def cancel(self, pygame_event=None):
self.teardown()
def teardown(self):
self._stop()
self.uclear()
self.kill()
self.events.kill()
def modal(self):
quit_ev = Event.QUIT_Event(callback=self._quit)
self.events.add(quit_ev)
stop_ev = Event.KEYUP_Event(key=K_ESCAPE, callback=self._stop)
self.events.add(stop_ev)
self.stop = 0
while not self.stop:
self.clear()
self.events.check()
self.udraw()
quit_ev.kill()
stop_ev.kill()
self.uclear()
class Dialog_OK(Dialog):
"""Pop up a window to get some input."""
message = None
def __init__(self,
window=None,
size=None,
message=None,
centertext=1,
callback=None):
"""Initialize dialog
@param window: Layer in which to draw the dialog box.
@param size: Tuple of C{(width, height)} for dialog box.
@param message: String message to be displayed. Text will be wrapped
automatically to fit inside the box, but an error will be raised
if the text will not fit.
@param centertext: Center justify the message by default.
@param callback: Function to call when the OK button is clicked
or the enter key is pressed.
"""
Dialog.__init__(self, window, size, callback)
if message is None:
if self.message is None:
message = 'OK ?'
else:
message = self.message
self.events.add(Event.KEYUP_Event(key=K_RETURN, callback=self.ok))
w, h = self.get_size()
rect_w = int(0.9 * w)
rect_h = int(h - 70)
rect = pygame.Rect(0, 0, rect_w, rect_h)
textrect = Util.render_textrect(message, rect, fontSize=24, justification=centertext)
s = Drawable.Image(w=self, image=textrect)
s.center(y=15)
s = Drawable.Stationary(w=self, sprite=s)
s.draw()
ok = SpriteTextButton(self, ' OK ', callback=self.ok,
group=self.events)
ok.center(y=-30)
ok = Drawable.Stationary(w=self, sprite=ok)
ok.draw()
self.return_ok = None
def ok(self, pygame_event=None):
self.teardown()
self.callback(pygame_event)
self.return_ok = 1
def modal(self):
Dialog.modal(self)
if self.return_ok == 1:
return 1
class Dialog_LineInput(Dialog_OK):
"""Used to get a single line of input"""
def __init__(self, window=None, size=None, message='', default='', callback=None, group=None):
"""Initialize the line input dialog.
@param window: Layer in which to draw the dialog box.
@param message: Text message to print above the input box.
@param callback: Function to call when input is finished. returns
the input text to the callback function.
@param group: Other event group in which to include this widget's
events, in addition to its own event group.
"""
Dialog_OK.__init__(self, window, size, message, callback=callback)
w, h = self.get_size()
self.line = TextInput(self, callback=self.finished, text=default, maxLength=50, length=w-50, group=self.events)
self.events.add(self.line.events.events())
self.line.center()
self.line.activate()
self.has_finished = 0
if group is not None:
group.add(self.events.events())
group.add(self.line.events.events())
self.return_text = ''
def ok(self, pygame_event):
"""Called when OK button is clicked. Also called when widget is active
and ENTER key is pressed.
@param pygame_event: The pygame event that triggered the callback.
"""
if pygame_event.type == MOUSEBUTTONUP or self.line.active:
self.line.done()
if self.has_finished:
self.teardown()
def finished(self, text):
"""Default callback when text input is complete."""
if not self.has_finished:
self.return_text = text
self.callback(text)
self.line.deactivate()
self.has_finished = 1
def modal(self):
quit_ev = Event.QUIT_Event(callback=self._quit)
self.events.add(quit_ev)
stop_ev = Event.KEYUP_Event(key=K_ESCAPE, callback=self._stop)
self.events.add(stop_ev)
self.line.activate()
self.stop = 0
while not self.stop:
try:
conf.ticks = min(20, conf.game.clock.tick(conf.MAX_FPS))
except AttributeError:
conf.ticks = 20
self.clear()
self.events.check()
self.line.udraw()
self.udraw()
if not self.line.repeater.key_held and not self.stop:
ev = pygame.event.wait()
pygame.event.post(ev)
quit_ev.kill()
stop_ev.kill()
self.uclear()
if self.return_text:
return self.return_text
class Dialog_ColorSelector(Dialog_OK):
"""Used to choose a color interactively"""
def __init__(self, window=None):
"""Initialize the color selector"""
Dialog_OK.__init__(self, window=window, size=(400, 380))
self.color_square = Drawable.Square(w=self, side=256)
self.color_square.set_position((10, 10))
self.color_square_array = pygame.surfarray.array2d(self.color_square.image)
#self.R = 0
self.hue = 0
self.set_color_square()
self.color_rect = Drawable.Rectangle(w=self, width=20, height=360)
self.color_rect.set_position((370, 10))
self.set_color_rect()
self.show_square = Drawable.Square(w=self, side=50)
self.show_square.set_position((300, 10))
self.color_chosen = WHITE
self.set_color_chosen(self.color_chosen)
self.mousebuttonup(None)
def set_color_square(self):
"""Paint a square with possible colors.
This uses the ColorSelector C{hue} property for the hue of the
color, then ranges over all possible saturations and values to make
a square.
This is way too slow.
"""
image = self.color_square.image
h = self.hue
r, g, b = colorsys.hsv_to_rgb(h, 1, 1)
rmax = r * 255
gmax = g * 255
bmax = b * 255
dr = (255 - rmax) / 255.0
dg = (255 - gmax) / 255.0
db = (255 - bmax) / 255.0
for y in range(256):
r = g = b = 0
xdr = rmax / 255.0
xdg = gmax / 255.0
xdb = bmax / 255.0
for x in range(256):
image.set_at((x, y), (r, g, b))
r += xdr
g += xdg
b += xdb
rmax += dr
gmax += dg
bmax += db
self.color_square.udraw()
# image = self.color_square.image
#
# h = self.hue
# r, g, b = colorsys.hsv_to_rgb(h, 1, 1)
#
# x,y = N.indices((256,256), N.Float)
# y /= 256.0
# row_mul = 1-y
#
# y *= x
# x *= row_mul
#
# rgb = N.zeros((256,256,3), N.Float)
# rgb[...,0] = x * r + y
# rgb[...,1] = x * g + y
# rgb[...,2] = x * b + y
#
# a = pygame.surfarray.pixels3d(image)
# a[...] = rgb.astype(N.UnsignedInt8)
#
# self.color_square.udraw()
def set_color_rect(self):
"""Set up the chooser for the red value of the color."""
image = self.color_rect.image
# for R in range(256):
# pygame.draw.line(image, (R, 0, 0), (0, R), (19, R))
for hue in range(360):
h = hue / 360.0
s = v = 1.0
r, g, b = colorsys.hsv_to_rgb(h, s, v)
R, G, B = 255 * r, 255* g, 255 * b
pygame.draw.line(image, (R, G, B), (0, hue), (19, hue))
self.color_rect.udraw()
def set_color_chosen(self, color):
"""Set the chosen color, and update the display of the chosen color."""
self.color_chosen = color
self.show_square.set_color(color)
self.show_square.udraw()
def mousebuttondown(self, ev):
"""Set a flag indicating the mouse button is held down."""
self.button_pressed = 1
def mousebuttonup(self, ev):
"""Reset the mouse button held down flag."""
self.button_pressed = 0
def mousebutton_action(self):
"""Actions to perform any time the mouse button is held down.
Checks to see if the mouse is inside either of the C{color_square}
or the C{color_rect} and either sets the chosen color, or sets the
red value for possible colors and updates the C{color_square}.
"""
try:
offset = self.rect[0:2]
except AttributeError:
offset = (0, 0)
lx, ly = offset
x, y = pygame.mouse.get_pos()
pos = x-lx, y-ly
if self.color_square.rect.collidepoint(pos):
try:
pos = x-lx-10, y-ly-10
color = self.color_square.image.get_at(pos)
except IndexError:
pass
else:
self.set_color_chosen(color[0:3])
elif self.color_rect.rect.collidepoint(pos):
try:
pos = x-lx-370, y-ly-10
color = self.color_rect.image.get_at(pos)
except IndexError:
pass
else:
R, G, B = color[0:3]
r, g, b = R / 256.0, G / 256.0, B / 256.0
h, s, v = colorsys.rgb_to_hsv(r, g, b)
self.hue = h
self.set_color_square()
def modal(self):
quit_ev = Event.QUIT_Event(callback=self._quit)
self.events.add(quit_ev)
stop_ev = Event.KEYUP_Event(key=K_ESCAPE, callback=self._stop)
self.events.add(stop_ev)
down = Event.MOUSEBUTTONDOWN_Event(callback=self.mousebuttondown)
self.events.add(down)
up = Event.MOUSEBUTTONUP_Event(callback=self.mousebuttonup)
self.events.add(up)
self.stop = 0
while not self.stop:
self.clear()
self.events.check()
if self.button_pressed:
self.mousebutton_action()
self.udraw()
quit_ev.kill()
stop_ev.kill()
self.uclear()
if self.return_ok:
return self.color_chosen
class Console(Widget):
def __init__(self, locals={}, size=(600, 200)):
self.locals = locals
Widget.__init__(self)
self.size = size
self.history = []
self.history_curr_index = 0
self.buffer = []
self.paged_up = 0
self.save_stdout = sys.stdout
self.save_stderr = sys.stderr
self.make_widget()
def make_widget(self):
self.events.kill()
size = self.size
w, h = size
chars = int(w / 8.0)
self.layer = Drawable.Layer(size=size, color=BLACK)
self.layer.center(x=10, y=-10)
self.terp = InteractiveConsole(self.locals)
self.line = TextInput(self.layer, callback=self.run_command, text='', prompt='>>> ',
maxLength=chars, length=w, border=0, group=self.events)
self.events.add(self.line.events.events())
self.line.center(x=10, y=-10)
self.events.add(self.line.events)
#self.events.add(Event.KEYUP_Event(key=K_F1, callback=self.toggle_visible))
self.events.add(Event.KEYUP_Event(key=K_UP, callback=self.history_prev))
self.events.add(Event.KEYUP_Event(key=K_DOWN, callback=self.history_next))
self.events.add(Event.KEYUP_Event(key=K_PAGEUP, callback=self.handle_pageup))
self.events.add(Event.KEYUP_Event(key=K_PAGEDOWN, callback=self.handle_pagedown))
self.lines_width = int(0.95 * w)
self.lines_height = 5000
self.lines_per_screen = int(0.8 * (h-45))
self.lines = Drawable.Layer(w=self.layer, size=(self.lines_width, self.lines_height), color=BLACK)
self.lines.center(x=10, y=15)
self.lines_position = h - 52
def resize(self, size):
self.size = size
self.make_widget()
self.layer.udraw()
def activate(self):
Widget.activate(self)
self.line.activate()
self.layer.udraw()
def deactivate(self):
Widget.deactivate(self)
self.line.deactivate()
self.layer.uclear()
def new_line(self, text, prompt=''):
save_text = prompt + text
s = Drawable.String(w=self.lines, message=save_text, fontSize=22)
w, h = s.get_size()
# deal with output longer than one line
if w > self.lines_width:
try:
t = Util.render_textrect(save_text, pygame.Rect(0, 0, self.lines_width, 1500), fontSize=22, trim=1)
s = Drawable.Image(image=t)
except Exception, e:
s = Drawable.String(w=self.lines, message='Output too long for this window...', fontSize=22)
w, h = s.get_size()
s.set_position((5, self.lines_position))
self.lines_position += h
if self.lines_position > self.lines_height - 50:
# starting to run out of room in the lines surface...
# i am not sure how large to make this or if i should
# bother trying to extend it if it starts to get full.
Util.beep()
s = Drawable.Stationary(w=self.lines, sprite=s)
s.draw()
self.lines.clear()
self.lines.nudge(dy=-h)
self.lines.udraw()
def write(self, text):
self.handle_print(text)
def handle_print(self, text):
text = text.strip()
lines = str(text).split('\n')
for line in lines:
self.new_line(line, prompt='')
def handle_pageup(self, pygame_event=None):
self.paged_up += self.lines_per_screen
self.lines.clear()
self.lines.nudge(dy=self.lines_per_screen)
self.lines.udraw()
def handle_pagedown(self, pygame_event=None):
self.paged_up -= self.lines_per_screen
if self.paged_up >= 0:
self.lines.clear()
self.lines.nudge(dy=-self.lines_per_screen)
self.lines.udraw()
else:
self.paged_up = 0
Util.beep()
if self.paged_up == 0:
self.line.udraw()
def handle_exception(self, e):
for line in str(e).split('\n'):
self.new_line(line, prompt='')
self.line.prompt = '>>> '
def run_command(self, text):
"""Process the next line of input.
This is called when the user presses ENTER in the console widget.
If this new text completes a command, the command is executed,
otherwise this text is added to a buffer awaiting the next line.
@param text: The next line of input. Does not include the newline.
"""
if text:
self.history.append(text)
self.history_curr_index = len(self.history)
self.new_line(text, self.line.prompt)
self.buffer.append(text)
command = '\n'.join(self.buffer)
sys.stdout = self
sys.stderr = self
code = None
try:
code = compile_command(command)
except Exception, e:
self.handle_exception(e)
self.buffer = []
else:
if code is not None:
self.deactivate()
try:
self.terp.runcode(code)
except SyntaxError, e:
self.handle_exception(e)
except Exception, e:
self.handle_exception(e)
self.buffer = []
self.line.prompt = '>>> '
self.activate()
else:
self.line.prompt = '... '
sys.stdout = self.save_stdout
sys.stderr = self.save_stderr
def toggle_visible(self, pygame_event):
if self.active:
self.deactivate()
else:
self.activate()
def history_prev(self, pygame_event):
if self.history_curr_index == len(self.history):
self.partial_line = self.line.text_content
self.history_curr_index -= 1
if self.history_curr_index < 0:
Util.beep()
self.history_curr_index = 0
if self.history:
text = self.history[self.history_curr_index]
self.line.set_text(text)
def history_next(self, pygame_event):
self.history_curr_index += 1
if self.history_curr_index > len(self.history)-1:
if self.line.text_content == self.partial_line:
Util.beep()
self.history_curr_index = len(self.history)
self.line.set_text(self.partial_line)
elif self.history:
text = self.history[self.history_curr_index]
self.line.set_text(text)
else:
Util.beep()
def set_modal_events(self):
self.quit_ev = Event.QUIT_Event(callback=self._quit)
self.events.add(self.quit_ev)
self.stop_ev = Event.KEYUP_Event(key=K_ESCAPE, callback=self._stop)
self.events.add(self.stop_ev)
def modal(self):
self.activate()
self.set_modal_events()
self.new_line('Press ESC to close console')
self.line.udraw()
self.stop = 0
while not self.stop:
try:
conf.ticks = min(20, conf.game.clock.tick(conf.MAX_FPS))
except AttributeError:
conf.ticks = 20
self.layer.clear()
self.events.check()
if self.active:
self.line.udraw()
self.layer.udraw()
if not self.line.repeater.key_held and not self.stop:
ev = pygame.event.wait()
pygame.event.post(ev)
self.quit_ev.kill()
self.stop_ev.kill()
self.layer.uclear()
self.deactivate()
class EscCatcher(Widget):
def __init__(self, callback=None):
if callback is None:
callback = self._quit
self.set_callback(callback)
Event.KEYDOWN_Event(self.escape)
def escape(self, pygameEvent):
key = pygameEvent.key
if key == K_ESCAPE:
self.callback(self)
def _quit(self, pygame_event=None):
import sys
sys.exit()
|
If you need a little bit of inspiration it's worth making the most of the various photographic exhibitions not too far from home. I have chosen a few that may interest you in London, but firstly one in Kent, at Hever Castle this Sunday for Father's Day.
Stella Scordellis Automotive Photography at Hever Castle on Father's Day Sunday 21st June, a family day out and to see the cars and bikes racing legend John Surtees once drove and to meet him too. On display are a selection of my Limited Edition framed automotive photographs and on sale for the first time. This link will tell you more.
Beneath the Surface at Somerset House until 24th August, the photographs on display are of a watery theme dating back to Victorian times, some rarely seen and from the V & A collection.
Fresh Faced + Wide Eyed at the Photographers Gallery is dedicated to recognising and nurturing new talents on until 5th July.
Ernst Haas: Reconstructing London at the Atlas Gallery showing images by this iconic photographer of London in the late 1940's some of it still recognisable, on until Saturday 4th July.
Audrey Hepburn - Portraits of An Icon at the National Portrait Gallery from 2nd July, a selection of more than seventy images taken by, Terry O'Neill, Richard Avedon, Cecil Beaton, Norman Parkinson and Irving Penn, this isa must if you love portraiture.
A History of Photography Series and Sequences Victoria and Albert Museum until 1st November. V&A’s internationally renowned collection, which chronicles the history of photography from the 1840s to the present day. In 1852, the V&A became the first museum in the UK to collect photographs and in 1858, the first to hold a photography exhibition.
Would You Like to Understand Your Camera Better? If this applies to you please get in touch. Twice a week I save the time to teach, on a one to one basis, how to understand your camera better and how to achieve professional results. Often this is bought as a gift voucher to present to someone special (like your dad for Father's Day).
Thanks for reading, I'll be back in two weeks with more to share.
Enjoy the sunshine! Bye for now.
|
# Copyright 2018 - Vitrage team
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from vitrage.common.constants import EntityCategory
from vitrage.common.constants import VertexProperties as VProps
from vitrage.datasources.resource_transformer_base import \
ResourceTransformerBase
from vitrage.datasources.sample import SAMPLE_DATASOURCE
from vitrage.datasources.sample import SampleFields
from vitrage.datasources import transformer_base
import vitrage.graph.utils as graph_utils
LOG = logging.getLogger(__name__)
class SampleTransformer(ResourceTransformerBase):
def __init__(self, transformers):
super(SampleTransformer, self).__init__(transformers)
def _create_snapshot_entity_vertex(self, entity_event):
return self._create_vertex(entity_event)
def _create_update_entity_vertex(self, entity_event):
return self._create_vertex(entity_event)
def _create_snapshot_neighbors(self, entity_event):
return self._create_sample_neighbors(entity_event)
def _create_update_neighbors(self, entity_event):
return self._create_sample_neighbors(entity_event)
def _create_entity_key(self, entity_event):
"""the unique key of this entity"""
entity_id = entity_event[VProps.ID]
entity_type = entity_event[SampleFields.TYPE]
key_fields = self._key_values(entity_type, entity_id)
return transformer_base.build_key(key_fields)
@staticmethod
def get_vitrage_type():
return SAMPLE_DATASOURCE
def _create_vertex(self, entity_event):
return graph_utils.create_vertex(
self._create_entity_key(entity_event),
vitrage_category=EntityCategory.RESOURCE,
vitrage_type=None, # FIXME
vitrage_sample_timestamp=None, # FIXME
entity_id=None, # FIXME
update_timestamp=None, # FIXME
entity_state=None, # FIXME
metadata=None) # FIXME
def _create_sample_neighbors(self, entity_event):
return []
|
Olumide Emmanuel has released a new song titled “Ogo NI”. Ogo ni “It is Glory” is a song of Gratitude. Be eternally grateful to God!
Ogo ni drives you into depths with God until we all, with open face beholding as in a glass the glory of the Lord.
|
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from flask import render_template
from flask_pluginengine import plugin_context
from wtforms.fields import TextAreaField, SubmitField
from indico.core.db import db
from indico.modules.events.requests.models.requests import RequestState
from indico.modules.events.requests.notifications import (notify_new_modified_request, notify_withdrawn_request,
notify_accepted_request, notify_rejected_request)
from indico.util.date_time import now_utc
from indico.util.i18n import _
from indico.web.flask.templating import get_overridable_template_name, get_template_module
from indico.web.forms.base import FormDefaults, IndicoForm
class RequestFormBase(IndicoForm):
def __init__(self, *args, **kwargs):
self.event = kwargs.pop('event')
self.request = kwargs.pop('request')
super(RequestFormBase, self).__init__(*args, **kwargs)
class RequestManagerForm(IndicoForm):
action_buttons = {'action_save', 'action_accept', 'action_reject'}
comment = TextAreaField(_('Comment'),
description=_('The comment will be shown only if the request is accepted or rejected.'))
action_save = SubmitField(_('Save'))
action_accept = SubmitField(_('Accept'))
action_reject = SubmitField(_('Reject'))
class RequestDefinitionBase(object):
"""Defines a service request which can be sent by event managers."""
#: the plugin containing this request definition - assigned automatically
plugin = None
#: the unique internal name of the request type
name = None
#: the title of the request type as shown to users
title = None
#: the :class:`IndicoForm` to use for the request form
form = None
#: the :class:`IndicoForm` to use for the request manager form
manager_form = RequestManagerForm
#: default values to use if there's no existing request
form_defaults = {}
@classmethod
def render_form(cls, **kwargs):
"""Renders the request form
:param kwargs: arguments passed to the template
"""
tpl = get_overridable_template_name('event_request_details.html', cls.plugin, 'events/requests/')
return render_template(tpl, **kwargs)
@classmethod
def create_form(cls, event, existing_request=None):
"""Creates the request form
:param event: the event the request is for
:param existing_request: the :class:`Request` if there's an existing request of this type
:return: an instance of an :class:`IndicoForm` subclass
"""
defaults = FormDefaults(existing_request.data if existing_request else cls.form_defaults)
with plugin_context(cls.plugin):
return cls.form(prefix='request-', obj=defaults, event=event, request=existing_request)
@classmethod
def create_manager_form(cls, req):
"""Creates the request management form
:param req: the :class:`Request` of the request
:return: an instance of an :class:`IndicoForm` subclass
"""
defaults = FormDefaults(req, **req.data)
with plugin_context(cls.plugin):
return cls.manager_form(prefix='request-manage-', obj=defaults)
@classmethod
def get_notification_template(cls, name, **context):
"""Gets the template module for a notification email
:param name: the template name
:param context: data passed to the template
"""
tpl = get_overridable_template_name(name, cls.plugin, 'events/requests/emails/', 'emails/')
return get_template_module(tpl, **context)
@classmethod
def can_be_managed(cls, user):
"""Checks whether the user is allowed to manage this request type
:param user: a :class:`.User`
"""
raise NotImplementedError
@classmethod
def get_manager_notification_emails(cls):
"""Returns the email addresses of users who manage requests of this type
The email addresses are used only for notifications.
It usually makes sense to return the email addresses of the users who
pass the :method:`can_be_managed` check.
:return: set of email addresses
"""
return set()
@classmethod
def send(cls, req, data):
"""Sends a new/modified request
:param req: the :class:`Request` of the request
:param data: the form data from the request form
"""
req.data = dict(req.data or {}, **data)
is_new = req.id is None
if is_new:
db.session.add(req)
db.session.flush() # we need the creation dt for the notification
notify_new_modified_request(req, is_new)
@classmethod
def withdraw(cls, req, notify_event_managers=True):
"""Withdraws the request
:param req: the :class:`Request` of the request
:param notify_event_managers: if event managers should be notified
"""
req.state = RequestState.withdrawn
notify_withdrawn_request(req, notify_event_managers)
@classmethod
def accept(cls, req, data, user):
"""Accepts the request
To ensure that additional data is saved, this method should
call :method:`manager_save`.
:param req: the :class:`Request` of the request
:param data: the form data from the management form
:param user: the user processing the request
"""
cls.manager_save(req, data)
req.state = RequestState.accepted
req.processed_by_user = user
req.processed_dt = now_utc()
notify_accepted_request(req)
@classmethod
def reject(cls, req, data, user):
"""Rejects the request
To ensure that additional data is saved, this method should
call :method:`manager_save`.
:param req: the :class:`Request` of the request
:param data: the form data from the management form
:param user: the user processing the request
"""
cls.manager_save(req, data)
req.state = RequestState.rejected
req.processed_by_user = user
req.processed_dt = now_utc()
notify_rejected_request(req)
@classmethod
def manager_save(cls, req, data):
"""Saves management-specific data
This method is called when the management form is submitted without
accepting/rejecting the request (which is guaranteed to be already
accepted or rejected).
:param req: the :class:`Request` of the request
:param data: the form data from the management form
"""
req.comment = data['comment']
|
App more contents, and fix some bugs.
Want to DIY learn Turkish Recipes, and want to get help with expert's advice, as well as with daily tips? This is App for you.
November 19, 2012 Price decrease: $2.99 -> FREE!
November 23, 2012 Price decrease: $2.99 -> FREE!
November 27, 2012 Price decrease: $2.99 -> FREE!
December 01, 2012 Price decrease: $2.99 -> FREE!
December 05, 2012 Price decrease: $2.99 -> FREE!
December 09, 2012 Price decrease: $2.99 -> FREE!
December 13, 2012 Price decrease: $2.99 -> FREE!
December 21, 2012 Price decrease: $2.99 -> FREE!
December 25, 2012 Price decrease: $2.99 -> FREE!
December 29, 2012 Price decrease: $2.99 -> FREE!
|
#!/usr/bin/env python3
"""
Return answers implementation.
"""
__author__ = "Mark Birger"
__date__ = "20 Jan 2015"
import multiprocessing
class Returns:
"""
Class manages routines.
"""
def __init__(self):
self.routines = []
self.processes = {}
def new_return(self, answers):
"""
Creates Queue, adds it to the pool.
"""
routine = {}
routine["answers"] = answers
routine["queue"] = multiprocessing.Queue(maxsize=0)
self.routines.append(routine)
return routine["queue"]
def get_returns(self):
"""
For each routine, get indexes of returns.
Reuturns every "return" statements.
"""
# print("GETTING RETURNS", self.routines)
answers = []
for routine in self.routines:
while not routine["queue"].empty():
answer_idx = routine["queue"].get()
answers.append(routine["answers"][answer_idx])
to_delete = []
for name, routine in self.processes.items():
while not routine["responses_queue"].empty():
response = routine["responses_queue"].get()
for idx, case in enumerate(routine["cases"][0]):
if case == response:
answers.append(routine["cases"][1][idx])
if not routine["process"].is_alive():
# TODO: check how it is safety from his child states
to_delete.append(name)
for each in to_delete:
del self.processes[each]
return answers
def new_routine(self, process, name, requests_queue, responses):
"""
Adds new routine to the list.
"""
self.processes[name] = {
"process": process,
"requests_queue": requests_queue, #TODO: remove, unused, realised with Scope module
"cases": responses[0],
"responses_queue": responses[1],
}
|
Realist News (Jsnip4) - POLICE STATE NOW: Obama Signs Anti-Protest Bill HR 347- Suppresses Free Speech!
Realist News (Jsnip4) > General Discussion > Survivalist | Conspiracies | Gold | Silver | News > POLICE STATE NOW: Obama Signs Anti-Protest Bill HR 347- Suppresses Free Speech!
Full Version: POLICE STATE NOW: Obama Signs Anti-Protest Bill HR 347- Suppresses Free Speech!
On Thursday, President Obama signed H.R. 347, a bill that could drastically limit the ability of Americans to assemble and protest. It is now a federal offense to assemble at many political events, essentially criminalizing protesting.
Under the new law, anyone who enters Secret Service protected property and "engages in disorderly or disruptive conduct" or "impede[s] or disrupt[s] the orderly conduct of Government business or official functions" can be arrested.
Before you jump to the conclusion that anyone who impedes the Secret Service could be endangering Obama, and thus should be arrested, keep in mind that the Secret Service not only protects the President, but is also called in to monitor national events, sporting events, state funerals, inaugural addresses, NATO meetings, G-8 Summits, and a host of other events.
The law President Obama signed makes a bad law worse. Previously, a person could only be arrested for "knowingly" and "willfully" impeding the Secret Service, but now a protester could be arrested for unknowingly impeding the Secret Service, even if they have no idea that the *Secret* Service is monitoring the event.
|
#!/usr/bin/python
#
# FPGA Prog, transfers a BitStream to a device
# Copyright (C) 2015-2019 INTI
# Copyright (C) 2015-2019 Rodrigo A. Melo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import sys
import database as db
import common
def main():
options = common.get_options(__file__)
# Processing the options
if not os.path.exists(options.bit) and options.device not in ['detect','unlock'] and options.tool not in ['libero']:
sys.exit('fpga_prog (ERROR): bitstream needed but not found.')
if options.board and options.board not in db._boards:
sys.exit("fpga_prog (ERROR): unsupported board")
if options.board is not None and options.device not in ['detect','unlock']:
if options.device + '_name' not in db._boards[options.board]:
sys.exit(
"fpga_prog (ERROR): the device <%s> is not supported in the board <%s>." %
(options.device, options.board)
)
else:
options.position = db._boards[options.board]['fpga_pos']
if options.device != 'fpga':
options.memname = db._boards[options.board][options.device + '_name']
options.width = db._boards[options.board][options.device + '_width']
if not options.debug:
# Preparing files
temp = None;
if not os.path.exists('options.tcl'):
temp = open('options.tcl','w')
if 'memname' in options:
for dev in ['fpga', 'spi', 'bpi', 'xcf']:
temp.write("set %s_name %s\n" % (dev, options.memname))
if 'position' in options:
for dev in ['fpga', 'xcf']:
temp.write("set %s_pos %s\n" % (dev, options.position))
if 'width' in options:
for dev in ['spi', 'bpi', 'xcf']:
temp.write("set %s_width %s\n" % (dev, options.width))
temp.flush()
# Executing
text = common.get_makefile_content(
tool=options.tool, task=None, dev=options.device,
path=(common.get_script_path(__file__) + "/tcl")
)
common.execute_make(__file__, text)
if temp is not None:
temp.close()
os.remove('options.tcl')
else:
print(options)
if __name__ == "__main__":
main()
|
SR Suntour ATS rear-wheel drive system impresses cross riders with a torque of up to 80Nm and 500W power motor that can go up to 28mph in speed.
An upgrade from the 9-speed predecessor. It permits a more gradual gear adjustment and more efficient force transmission.
This is semi-integrated headset with an inner diameter of 1 1/8u201d at the top and 1 u00bdu201d at the bottom. This ensures best-riding stability and excellent control head stiffness. The biggest shaft results in stiffer fork and frame as riding forces are spread more evenly thanks to a thicker head tube.
|
import psycopg2
import os
_logger = logging.getLogger(__name__)
def query(conn, sql, results='namedtuple'):
"Issue SQL query that returns a result set."
return execute(conn, sql, results=results)
def execute(conn, sql, data=None, results=False):
"Issue a general SQL statment. Optinally specify results cursor type."
with conn:
if results:
from psycopg2 import extras
if results == 'pgdict':
cur_type = psycopg2.extras.DictCursor
elif results == 'dict':
cur_type = psycopg2.extras.RealDictCursor
elif results == 'logging':
cur_type = psycopg2.extras.LoggingCursor
elif results == 'namedtuple':
cur_type = psycopg2.extras.NamedTupleCursor
with conn.cursor(cursor_factory=cur_type) as cursor:
cursor.execute(query=sql, vars=data)
_logger.info('fetching results: {0}'.format(sql))
return cursor.fetchall()
else:
with conn.cursor() as cursor:
cursor.execute(query=sql, vars=data)
_logger.info('executing statment: {0}'.format(sql))
def exec_psql(conn, sql_path, results=True, **kwargs):
"Execute a parameterized .psql file"
with open(sql_path, 'r') as sql_file:
sql_template = sql_file.read()
sql = sql_template.format(**kwargs)
_logger.info('executing psql file: {0}'.format(sql_path))
execute(conn, sql, results=results)
def multi_insert(conn, data, table, column_list, schema='public', submit=True):
"Issue a multi-row insert"
# http://stackoverflow.com/questions/8134602/psycopg2-insert-multiple-rows-with-one-query
values = ",".join(["%s"] * len(data[0]))
sql = '''insert into {schema}.{table} ({columns}) values ({values})
'''.format(table=table, schema=schema, columns=column_list, values=values)
execute(conn, sql, data=data, submit=submit)
def copy_from(conn, file_obj, table, columns, sep="\t"):
"Stream file_obj into table"
with conn:
with conn.cursor() as cursor:
cursor.copy_from(file=file_obj, table=table, columns=columns, sep=sep)
_logger.info('psql copy to table: {0}'.format(table))
def drop_table(conn, table, schema='public'):
"Issue 'drop table if exists' statment."
sql = "drop table if exists {schema}.{table};".format(schema=schema, table=table)
execute(conn, sql)
_logger.info('dropped table: {0}'.format(table))
def drop_schema(conn, schema):
"Issue 'drop schema if exists .. cascade' statment."
sql = "drop schema if exists {schema} cascade;".format(schema=schema)
execute(conn, sql)
_logger.info('dropped schema: {0}'.format(schema))
def vacuum(conn, table, schema='public'):
"Vacume & analyze table"
execute(conn, sql="vacuum analyze {schema}.{table};".format(schema=schema, table=table))
|
DC-8 Jetliner Series 10 through 70 - No flaps indicator?
I just reinstalled the DC8 50 - 70.
However, I now noticed that there is no needle in the flaps indicator?
Same problem here. Running FSX-SE under Windows 10.
I use P3D4.1 and W10 and I have a needle. So its maybe something with the FSX installer.
|
import time
import pygame
from time import sleep
from cannybots.radio import BLE
from cannybots.clients.joypad import SimpleJoypadClient
pygame.init()
def main():
xAxis=0
yAxis=0
lastUpdateTime = time.time()
joysticks = []
clock = pygame.time.Clock()
keepPlaying = True
ble = BLE()
myBot = ble.findNearest()
joypadClient = SimpleJoypadClient(myBot)
# for al the connected joysticks
for i in range(0, pygame.joystick.get_count()):
# create an Joystick object in our list
joysticks.append(pygame.joystick.Joystick(i))
# initialize them all (-1 means loop forever)
joysticks[-1].init()
# print a statement telling what the name of the controller is
print "Detected joystick '",joysticks[-1].get_name(),"'"
while keepPlaying:
if (time.time() - lastUpdateTime) > 0.05:
joypadClient.updateJoypadWithZ(int(xAxis*255), int(yAxis*255), 0,0)
lastUpdateTime=time.time()
clock.tick(60)
for event in pygame.event.get():
if event.type == pygame.QUIT:
print "Received event 'Quit', exiting."
keepPlaying = False
elif event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
print "Escape key pressed, exiting."
keepPlaying = False
elif event.type == pygame.JOYAXISMOTION:
#print "Joystick '",joysticks[event.joy].get_name(),"' axis",event.axis,"motion."
if event.axis==0:
xAxis=joysticks[-1].get_axis(0)
if event.axis==1:
yAxis=joysticks[-1].get_axis(1)
elif event.type == pygame.JOYBUTTONDOWN:
print "Joystick '",joysticks[event.joy].get_name(),"' button",event.button,"down."
elif event.type == pygame.JOYBUTTONUP:
print "Joystick '",joysticks[event.joy].get_name(),"' button",event.button,"up."
elif event.type == pygame.JOYHATMOTION:
print "Joystick '",joysticks[event.joy].get_name(),"' hat",event.hat," moved."
main()
pygame.quit()
|
1950 American Standard double basin porcelain over cast iron sink.
The sink's dimensions are 42"LX25D". The shallow basin is 6" deep. The deep basin reaches a depth of 12 inches.
The sink is in good condition with the minor wear.
|
import numpy as np
import scipy.linalg as ln
from sklearn.decomposition import TruncatedSVD
from splearn.decomposition import SparkTruncatedSVD
from splearn.decomposition.truncated_svd import svd, svd_em
from splearn.utils.testing import (SplearnTestCase, assert_array_almost_equal,
assert_array_equal, assert_true)
from splearn.utils.validation import check_rdd_dtype
def match_sign(a, b):
a_sign = np.sign(a)
b_sign = np.sign(b)
if np.array_equal(a_sign, -b_sign):
return -b
elif np.array_equal(a_sign, b_sign):
return b
else:
raise AssertionError("inconsistent matching of sign")
class TestSVDFunctions(SplearnTestCase):
def test_svd(self):
X, X_rdd = self.make_dense_rdd()
u, s, v = svd(X_rdd, 1)
u = np.squeeze(np.concatenate(np.array(u.collect()))).T
u_true, s_true, v_true = ln.svd(X)
assert_array_almost_equal(v[0], match_sign(v[0], v_true[0, :]))
assert_array_almost_equal(s[0], s_true[0])
assert_array_almost_equal(u, match_sign(u, u_true[:, 0]))
def test_svd_em(self):
X, X_rdd = self.make_dense_rdd((1000, 4))
u, s, v = svd_em(X_rdd, 1, seed=42, maxiter=50)
u = np.squeeze(np.concatenate(np.array(u.collect()))).T
u_true, s_true, v_true = ln.svd(X)
tol = 1e-1
assert(np.allclose(s[0], s_true[0], atol=tol))
assert(np.allclose(+v, v_true[0, :], atol=tol) |
np.allclose(-v, v_true[0, :], atol=tol))
assert(np.allclose(+u, u_true[:, 0], atol=tol) |
np.allclose(-u, u_true[:, 0], atol=tol))
def test_svd_em_sparse(self):
X, X_rdd = self.make_sparse_rdd((1000, 4))
u, s, v = svd_em(X_rdd, 1, seed=42, maxiter=50)
u = np.squeeze(np.concatenate(np.array(u.collect()))).T
u_true, s_true, v_true = ln.svd(X.toarray())
tol = 1e-1
assert(np.allclose(s[0], s_true[0], atol=tol))
assert(np.allclose(+v, v_true[0, :], atol=tol) |
np.allclose(-v, v_true[0, :], atol=tol))
assert(np.allclose(+u, u_true[:, 0], atol=tol) |
np.allclose(-u, u_true[:, 0], atol=tol))
class TestTruncatedSVD(SplearnTestCase):
def test_same_components(self):
X, X_rdd = self.make_dense_rdd((1000, 10))
n_components = 2
random_state = 42
tol = 1e-7
local = TruncatedSVD(n_components, n_iter=5, tol=tol,
random_state=random_state)
dist = SparkTruncatedSVD(n_components, n_iter=50, tol=tol,
random_state=random_state)
local.fit(X)
dist.fit(X_rdd)
v_true = local.components_
v = dist.components_
tol = 1e-1
assert(np.allclose(+v[0], v_true[0, :], atol=tol) |
np.allclose(-v[0], v_true[0, :], atol=tol))
def test_same_fit_transforms(self):
X, X_rdd = self.make_dense_rdd((1000, 12))
n_components = 4
random_state = 42
tol = 1e-7
local = TruncatedSVD(n_components, n_iter=5, tol=tol,
random_state=random_state)
dist = SparkTruncatedSVD(n_components, n_iter=50, tol=tol,
random_state=random_state)
Z_local = local.fit_transform(X)
Z_dist = dist.fit_transform(X_rdd)
Z_collected = Z_dist.toarray()
assert_true(check_rdd_dtype(Z_dist, (np.ndarray,)))
tol = 1e-1
assert_array_equal(Z_local.shape, Z_collected.shape)
assert(np.allclose(+Z_collected[:, 0], Z_local[:, 0], atol=tol) |
np.allclose(-Z_collected[:, 0], Z_local[:, 0], atol=tol))
|
February 20, 2018 07:00 ET | Source: Aunt Fannie's, Inc.
PORTLAND, Ore., Feb. 20, 2018 (GLOBE NEWSWIRE) -- Aunt Fannie’s, nurturing healthy homes, protecting the biome we live in, supporting our immunity and strengthening our families with natural, healthy household cleaning products and pest solutions, today announced the addition of five new pest control products to its award-winning line-up of healthy housekeeping solutions. The five new products are: Ant Remedy, Perimeter Pest Powder, Roach Remedy, Insect Repellent Cards and Mosquito Wipes body care products. The new pest products will be available next month at auntfannies.com and in 1,600 retailers nationwide, including Target stores beginning June 2018. Aunt Fannie’s products, rated best-in-class, are the healthiest and safest options for our homes, bodies, families and pets.
Aunt Fannie's leads a revolution of the pest control market with solutions that eliminate pests without the use of chemical pesticides, dyes, phenols, formaldehyde, neonicotinoids, pyrethrins or petroleum propellants. Aunt Fannie’s pest products are designed for routine use to prevent bugs, pests and critters from entering the home. In addition to being safe for the environment, the entire suite of products is safe for use around loved ones, including kids and pets.
Since its launch in 2013, Aunt Fannie's has emerged as a clear industry leader in natural, healthy cleaning and pest control products. Its entire lineup of healthful, microbiomic cleaning solutions secured more "A" ratings than any competing brand in a scientific evaluation across three cleaning product categories by the Environmental Working Group (EWG), meaning products have the Lowest Concern with few or no known or suspected hazards to health or the environment and offers comprehensive ingredient disclosure.
Aunt Fannie's Roach Remedy gets rid of roaches and other creepy crawlies without carpet-bombing your home in conventional chemical pesticides – simply spray around windows, doors, cabinets and appliances, and directly on roaches as necessary. MSRP: $7.99.
Aunt Fannie’s full product line-up consists of: Cleaning Vinegar Wipes (MSRP: $6.99), Cleaning Vinegar Sprays (MSRP: $5.99), Floor Cleaner Vinegar Wash (MSRP: $7.99), Glass & Window Vinegar Wash (MSRP: $5.99). Aunt Fannie’s household cleaning products come in four natural fragrances: eucalyptus, lime mint, lavender and sweet mandarin.
Aunt Fannie’s is the pioneer in microbiomic, safe, non-toxic and effective cleaning and pest solutions for households, food service establishments and food manufacturers. Where other conventional and “natural” cleaning products over-clean and destroy healthy microbes our bodies and environments need, Aunt Fannie’s family of products promote a healthy balance with our bodies and indoor environments. Born out of a years-long journey to understand and resolve his family’s mysterious and life-threatening medical conditions, Mat Franken founded Aunt Fannie’s in 2013 with a mission to educate and enlighten us to what our ancestors understood. That a chemical-filled, hyper-sanitized home destroys the beneficial, human-friendly microorganisms our homes and families need for health and balance. Mat and his team have developed a family of microbiomic cleaning and pest solutions that received the highest rating by the Environmental Working Group (EWG), out-ranking competing cleaning brands, based on the safety of the ingredients used and comprehensive ingredient disclosure. Aunt Fannie’s can be found in more than 1,600 retailers nationwide, including Whole Foods, Bed Bath & Beyond, Safeway, Albertson’s, H-E-B Grocery Stores and Hy-Vee. To find a retailer near you or to shop online, please visit www.auntfannies.com – and join our family by following us on Twitter, Instagram, & Facebook.
|
from django.core.validators import MinLengthValidator
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from jsonfield import JSONField
from treeherder.model.models import (MachinePlatform,
OptionCollection,
Repository)
SIGNATURE_HASH_LENGTH = 40L
@python_2_unicode_compatible
class PerformanceFramework(models.Model):
name = models.SlugField(max_length=255L, unique=True)
class Meta:
db_table = 'performance_framework'
def __str__(self):
return self.name
@python_2_unicode_compatible
class PerformanceSignature(models.Model):
signature_hash = models.CharField(max_length=SIGNATURE_HASH_LENGTH,
validators=[
MinLengthValidator(SIGNATURE_HASH_LENGTH)
],
db_index=True)
repository = models.ForeignKey(Repository, null=True) # null=True only temporary, until we update old entries
framework = models.ForeignKey(PerformanceFramework)
platform = models.ForeignKey(MachinePlatform)
option_collection = models.ForeignKey(OptionCollection)
suite = models.CharField(max_length=80L)
test = models.CharField(max_length=80L, blank=True)
lower_is_better = models.BooleanField(default=True)
last_updated = models.DateTimeField(db_index=True, null=True) # null=True only temporary, until we update old entries
# extra properties to distinguish the test (that don't fit into
# option collection for whatever reason)
extra_properties = JSONField(max_length=1024)
class Meta:
db_table = 'performance_signature'
# make sure there is only one signature per repository with a
# particular set of properties
unique_together = ('repository', 'framework', 'platform',
'option_collection', 'suite', 'test',
'last_updated')
# make sure there is only one signature of any hash per
# repository (same hash in different repositories is allowed)
unique_together = ('repository', 'signature_hash')
def __str__(self):
return self.signature_hash
@python_2_unicode_compatible
class PerformanceDatum(models.Model):
repository = models.ForeignKey(Repository)
job_id = models.PositiveIntegerField(db_index=True)
result_set_id = models.PositiveIntegerField(db_index=True)
signature = models.ForeignKey(PerformanceSignature)
value = models.FloatField()
push_timestamp = models.DateTimeField(db_index=True)
class Meta:
db_table = 'performance_datum'
index_together = [('repository', 'signature', 'push_timestamp'),
('repository', 'job_id'),
('repository', 'result_set_id')]
unique_together = ('repository', 'job_id', 'result_set_id',
'signature', 'push_timestamp')
def save(self, *args, **kwargs):
super(PerformanceDatum, self).save(*args, **kwargs) # Call the "real" save() method.
if not self.signature.last_updated or (self.signature.last_updated <
self.push_timestamp):
self.signature.last_updated = self.push_timestamp
self.signature.save()
def __str__(self):
return "{} {}".format(self.value, self.push_timestamp)
@python_2_unicode_compatible
class PerformanceAlertSummary(models.Model):
'''
A summarization of performance alerts
A summary of "alerts" that the performance numbers for a specific
repository have changed at a particular time.
See also the :ref:`PerformanceAlert` class below.
'''
id = models.AutoField(primary_key=True)
repository = models.ForeignKey(Repository)
prev_result_set_id = models.PositiveIntegerField()
result_set_id = models.PositiveIntegerField()
last_updated = models.DateTimeField(db_index=True)
class Meta:
db_table = "performance_alert_summary"
unique_together = ('repository', 'prev_result_set_id', 'result_set_id')
def __str__(self):
return "{} {}".format(self.repository, self.result_set_id)
@python_2_unicode_compatible
class PerformanceAlert(models.Model):
'''
A single performance alert
An individual "alert" that the numbers in a specific performance
series have consistently changed level at a specific time.
An alert is always a member of an alert summary, which groups all
the alerts associated with a particular result set and repository
together. In many cases at Mozilla, the original alert summary is not
correct, so we allow reassigning it to a different (revised) summary.
'''
id = models.AutoField(primary_key=True)
summary = models.ForeignKey(PerformanceAlertSummary,
related_name='alerts')
revised_summary = models.ForeignKey(PerformanceAlertSummary,
related_name='revised_alerts',
null=True)
series_signature = models.ForeignKey(PerformanceSignature)
is_regression = models.BooleanField()
amount_pct = models.FloatField(
help_text="Amount in percentage that series has changed")
amount_abs = models.FloatField(
help_text="Absolute amount that series has changed")
prev_value = models.FloatField(
help_text="Previous value of series before change")
new_value = models.FloatField(
help_text="New value of series after change")
t_value = models.FloatField(
help_text="t value out of analysis indicating confidence "
"that change is 'real'")
class Meta:
db_table = "performance_alert"
unique_together = ('summary', 'series_signature')
def __str__(self):
return "{} {} {}%".format(self.summary, self.series_signature,
self.amount_pct)
|
Home Godhead What does God want from us?
God created us for fellowship with Him: “I have loved thee with an everlasting love: therefore with loving kindness have I drawn thee” (Jeremiah 31:3). Parents bring children to life for this same reason. God is our Heavenly Father and yearns to have children to love (2 Thessalonians 2:16). Life is a chance given to us humans to develop that love-relationship with our Creator. But God created us with the freedom of choice to accept His love or reject it. Sadly, our first parents chose to believe the devil and distrust God. As a result, sin, death, and suffering became facts of life (Romans 5:12).
But God in His great mercy offered His Son to save us from death so that all who accept His offer of salvation can be eternally saved (John 1:12). “For God so loved the world, that he gave his only begotten Son, that whosoever believeth in him should not perish, but have everlasting life” (John 3:16). There is no greater love than this that someone may die for those whom he loves (John 15:13).
The Lord cares for us individually (Psalm 139:13) and has a purpose for each of us: “For we are his workmanship, created in Christ Jesus for good works, which God prepared beforehand, that we should walk in them” (Ephesians 2:10). God’s plans for us are good plans of prosperity and hope (Jeremiah 29:11; Ephesians 2:10) and we can find purpose for our lives when we abide in Him (John 15:1-17). Abiding in God means daily connecting with Him through study of His Word (Acts 17:11) and prayer (1 Thessalonians 5:17).
God has given each of us special gifts that are listed in 1 Corinthians 12:4-11. In this chapter, the apostle Paul presents the Church as the body of Christ with each member having a different purpose that all unanimously work together (1 Corinthians 12:12-31). We can therefore be successful, fulfilled and happy in fellowship with one another and with the Lord who wants us to reciprocate the love He has poured on us.
|
from openpyxl import load_workbook
from openpyxl import Workbook
from openpyxl.utils import column_index_from_string
from copy import copy
import re
key_map = {
'Cash and Equivalents':'Cash And Cash Equivalents',
'Accounts Receivable, Net':'Trade Receivables',
'Inventory':'Inventories',
'Total Current Assets':'Total Current Assets',
'Net PP&E':'Tangible Assets',
'Intangible Assets': 'Intangible Assets',
'Total Assets': 'Total Assets',
'Accounts Payable':'Trade Payables',
'Taxes Payable':'Deferred Tax Liabilities [Net]',
'Total Current Liabilities':'Total Current Liabilities',
'Long-term Debt':'Long Term Borrowings',
"Total Stockholder's Equity":'Total Shareholders Funds',
'Total Liabilities and Equity':'Total Capital And Liabilities',
'Sales':'Total Operating Revenues',
'Depreciation and Amortization':'Depreciation And Amortisation Expenses',
'Interest Expense':'Finance Costs',
'Other Gains and Losses':'Exceptional Items',
'Pretax Income': 'Profit/Loss Before Tax',
'Income Tax Expense':'Total Tax Expenses',
'Net Income':'Profit/Loss For The Period',
'Net Cash from Operations':'Net CashFlow From Operating Activities',
'Net Cash from Investing Activities':'Net Cash Used In Investing Activities',
'Net Cash from Financing Activities':'Net Cash Used From Financing Activities',
'Change in cash':'Net Inc/Dec In Cash And Cash Equivalents',
'Earnings per share': 'Diluted EPS (Rs.)',
'Dividends per share': 'Dividend / Share(Rs.)',
'BookValue per share': 'Book Value [InclRevalReserve]/Share (Rs.)',
'Other Current Assets':'Total Current Assets - Inventories - Trade Receivables - Cash And Cash Equivalents',
'Other Current Liabilities':'Total Current Liabilities - Trade Payables',
'Other Liabilities': 'Total Non-Current Liabilities - Long Term Borrowings - Deferred Tax Liabilities [Net]',
'Total Liabilities': 'Total Current Liabilities + Total Non-Current Liabilities',
'Cost of Goods Sold':'Cost Of Materials Consumed + Purchase Of Stock-In Trade + Changes In Inventories Of FG,WIP And Stock-In Trade',
'Gross Profit':'Total Operating Revenues - Cost Of Materials Consumed - Purchase Of Stock-In Trade - Changes In Inventories Of FG,WIP And Stock-In Trade',
'Operating Income before Depr':'Total Operating Revenues - Cost Of Materials Consumed - Purchase Of Stock-In Trade - Changes In Inventories Of FG,WIP And Stock-In Trade - Employee Benefit Expenses - Other Expenses',
'Operating Profit':'Total Operating Revenues - Cost Of Materials Consumed - Purchase Of Stock-In Trade - Changes In Inventories Of FG,WIP And Stock-In Trade - Employee Benefit Expenses - Other Expenses - Depreciation And Amortisation Expenses',
'Selling, General, and Admin Exp':'Employee Benefit Expenses + Other Expenses'
}
class SDxlsMixin():
def copy_fulldata(self, dest):
wb = Workbook()
ws = wb.active
for i,row in enumerate(self.data):
for j,col in enumerate(row):
ws.cell(row=i+1, column=j+1, value=col)
wb.save(dest)
def oper_list_of_list(self, data, name, oper):
if oper :
tempdata = [sum(i) for i in zip(*data) if not str in [type(e) for e in i]]
else:
tempdata = [i[0] - i[1] for i in zip(*data) if not str in [type(e) for e in i]]
tempdata.insert(0,name)
return tempdata
def calculate_datarow(self,key,name):
datarow = []
if len(re.split(' \+ | - ', key)) < 2 :
for drow in self.data:
if drow[0] == key:
datarow = drow
else:
tempdata = []
delimt = ' - '
if ' + ' in key : delimt = ' + '
keys = key.split(delimt)
for k in keys:
for drow in self.data:
if drow[0] == k:
tempdata.append(drow)
if delimt == ' + ':
tempdata[0] = self.oper_list_of_list(tempdata, name, True)
if delimt == ' - ':
tempdata[1] = self.oper_list_of_list(tempdata[1:], name, True)
tempdata = tempdata[:2]
tempdata[0] = self.oper_list_of_list(tempdata, name, False)
datarow = tempdata[0]
return datarow
def copy_cellformat(self,incell, outcell):
if incell.has_style:
outcell.font = copy(incell.font)
outcell.border = copy(incell.border)
outcell.fill = copy(incell.fill)
outcell.number_format = copy(incell.number_format)
outcell.protection = copy(incell.protection)
outcell.alignment = copy(incell.alignment)
def update_mysheet(self,wb):
ws = wb.active
for row in ws.rows:
if not isinstance(row[0].value,str):continue
key = key_map.get(row[0].value.strip())
if not key: continue
datarow = self.calculate_datarow(key, row[0].value)
for idx, datacol in enumerate(datarow):
if not idx: continue
cell = row[idx+1]
col = column_index_from_string(cell.column)
if type(datacol) != float:
newcell = ws.cell(row=cell.row,column=col, value=float(datacol.replace(',','')))
else :
newcell = ws.cell(row=cell.row,column=col, value=float(datacol))
self.copy_cellformat(cell, newcell)
def zap_mysheet(self, ws):
for row in ws.rows:
for cell in row:
if isinstance(cell.value,float):
dcell = ws.cell(row=cell.row, column=column_index_from_string(cell.column), value=0.0)
self.copy_cellformat(cell, dcell)
def copy_mysheet(self,src, dest, sheetname):
dwb = Workbook()
dws = dwb.active
swb = load_workbook(filename = src, keep_vba=True)
sws = swb.get_sheet_by_name(sheetname)
dws.title = sws.title
dws.sheet_view.showGridLines = False
for row in sws.rows:
for cell in row:
dcell = dws.cell(row=cell.row, column=column_index_from_string(cell.column), value=cell.value)
self.copy_cellformat(cell, dcell)
self.zap_mysheet(dws)
self.update_mysheet(dwb)
dwb.save(dest)
|
Garmin Echo 150 has an average rating of 3 out of 5 based on 5 user reviews.
While Garmin Echo 150 qualifies as one of the budget models, appealing to the cost-sensible fisherman, it has numerous features and qualities which make it feel like a top of the line system. For starters, there are its excellent dual beam capabilities. Making the most of its 200 watt sonar power, the beams go down 1,300 feet in fresh water, relaying back superb detail. And, there is a viewing angle of up to 120 degrees, so you can see far and wide. The Echo 150 also boasts Garmin HD-ID technology, which provides more detail and definition on your compact but bright 4 inch screen.
The best way to experience the benefits of this model are to just set it up and try it out for yourself; luckily, the mount and go capabilities made that a piece of cake to accomplish. As is standard with many fishfinders, it has a quick release mount; and, can be easily set up on any boat, tiny to large, due to its relatively small size. And of course, you do not have to reach too deeply into your pockets to make the purchase possible, as the Echo 150 is (somewhat surprisingly) an economy model fishfinder.
You can get a great deal on Garmin Echo 150 here.
Garmin Echo 150 is an okay product but I’m having a little problem with the auto range. For some reason it drops to lower depth range too quickly and uses half the screen because of that. I turn off auto range and I manually change the depth range so that I can see the full screen but that requires constant monitoring. Overall it’s a solid fishfinder, can’t expect much for $100 these days.
This fishfinder is not very expensive but remarkably accurate when it comes to marking fish. Each fish is depicted with an arch and the precise depth of the fish. Echo 150 also has different size icons to represent the size of fish and the audio alarm when fish is detected. I fish in shallow water so I was able to lower the power output to 30 to get rid the background sonar noise. It’s remarkable what an $80 piece of electronics can do these days.
|
"Read GenomeSIMLA formatted chromosome templates"
import pydigree
from pydigree.io.smartopen import smartopen
def read_gs_chromosome_template(templatef):
"""
Reads a genomeSIMLA format chromosome template file
:param templatef: The filename of the template file
:type templatef: string
:rtype: A ChromosomeTemplate object corresponding to the file
"""
with smartopen(templatef) as f:
label = f.readline().strip() # The label and
f.readline() # the number of markers, both of which we dont need.
c = pydigree.ChromosomeTemplate(label=label)
# genomeSIMLA chromosome files have marginal recombination probs
# instead of map positions. We'll have to keep track of what the
# last position was and add to it to get it into the shape we want
# it to be in.
last_cm = 0
for line in f:
if line == '\n':
continue
label, _, minf, cm, bp = line.strip().split()
bp = int(bp)
cm = float(cm)
last_cm += cm
c.add_genotype(float(minf), last_cm, label=label, bp=bp)
return c
|
I'm always excited about eating cookies around the holidays! My personal favorite desserts have both chocolate and mint in them and I couldn't find a cookie recipe I really liked. So, I made this recipe (and tested it about 30 times :D). I prefer a thick cakey cookie (almost like a brownie). If you like a more thin cookie, don't melt the butter and use regular flour instead of cake flower. You can also substitute more butter for the shortening if you can't find it or like a more thin cookie.
This is my version of a popular scouting cookie. Instead of thin and crispy, these are thick and chewy!
|
# Copyright © 2019 The vt-py authors. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import asyncio
import os
import sys
import vt
async def get_files_to_upload(queue, path):
"""Finds which files will be uploaded to VirusTotal."""
if os.path.isfile(path):
await queue.put(path)
return
with os.scandir(path) as it:
for entry in it:
if not entry.name.startswith('.') and entry.is_file():
await queue.put(entry.path)
async def upload_hashes(queue, apikey):
"""Uploads selected files to VirusTotal."""
async with vt.Client(apikey) as client:
while not queue.empty():
file_path = await queue.get()
await client.scan_file_async(file=file_path)
print(f'File {file_path} uploaded.')
queue.task_done()
def main():
parser = argparse.ArgumentParser(description='Upload files to VirusTotal.')
parser.add_argument('--apikey', required=True, help='your VirusTotal API key')
parser.add_argument('--path', required=True,
help='path to the file/directory to upload.')
parser.add_argument('--workers', type=int, required=False, default=4,
help='number of concurrent workers')
args = parser.parse_args()
if not os.path.exists(args.path):
print(f'ERROR: file {args.path} not found.')
sys.exit(1)
loop = asyncio.get_event_loop()
queue = asyncio.Queue(loop=loop)
loop.create_task(get_files_to_upload(queue, args.path))
_worker_tasks = []
for i in range(args.workers):
_worker_tasks.append(
loop.create_task(upload_hashes(queue, args.apikey)))
# Wait until all worker tasks has completed.
loop.run_until_complete(asyncio.gather(*_worker_tasks))
loop.close()
if __name__ == '__main__':
main()
|
Parking regulations are a critical part of our transport network and assist in maintaining accessible, efficient, safe and sustainable transport systems in any community. Parking regulation and signage differs across the Town of Victoria Park and we encourage you to be mindful of the signage within the area where you choose to park.
If you do not understand a sign, please call Parking Assistance line on 9311 8115.
Below is some further information on some commonly misunderstood parking signs.
|
# -*- coding: utf-8 -*-
import calendar
from datetime import datetime
import logging
import os.path
import re
import webbrowser
from utils import mongo, fuzzy_dates, unicode_csv
class ScrapeMeetings:
def __init__(self, **kwargs):
# fetch the logger
self._logger = logging.getLogger("spud")
# database stuff
self.db = mongo.MongoInterface()
self.PREFIX = "meetings"
if kwargs["refreshdb"]:
self.db.drop("%s_scrape" % self.PREFIX)
# get the current path
self.current_path = os.path.dirname(os.path.abspath(__file__))
self.STORE_DIR = "store"
def find_header_rows(self, meetings):
found_headers = []
headers_re = [
("date", re.compile(r"(?:date|month)", re.IGNORECASE)),
("organisation", re.compile(r"(?:organisation|individuals|senior executive)", re.IGNORECASE)),
("name", re.compile(r"(?:name|minister|officials|spad)", re.IGNORECASE)),
("purpose", re.compile(r"(?:purpose|nature|issues)", re.IGNORECASE)),
]
for row_idx, row in enumerate(meetings):
column_mappings = {}
# create a copy
current_headers = list(headers_re)
for column_idx, cell in enumerate(row):
for idx, header in enumerate(current_headers):
header_id, header_re = header
if header_re.search(cell):
# remove from the possible headers
column_mappings[header_id] = column_idx
current_headers.pop(idx)
break
found_header = column_mappings.keys()
if "date" in found_header and "organisation" in found_header:
if "name" not in found_header and 0 not in column_mappings.values():
# take a guess that the first column is the name
column_mappings["name"] = 0
found_headers.append((row_idx, column_mappings))
return found_headers
def read_csv(self, filename):
full_path = os.path.join(self.current_path, self.STORE_DIR, filename)
with open(full_path, "rU") as csv_file:
csv = unicode_csv.UnicodeReader(csv_file, encoding="latin1", strict=True)
# read in the whole csv
return [[cell.strip() for cell in row] for row in csv]
# strip empty columns; standardize row length
def normalise_csv(self, meetings):
row_length = max([len(row) for row in meetings])
not_empty = {}
for row in meetings:
if len(not_empty) == row_length:
break
for idx, cell in enumerate(row):
if idx in not_empty:
continue
if cell != "":
not_empty[idx] = None
not_empty = not_empty.keys()
return [[m[idx] if idx < len(m) else "" for idx in not_empty] for m in meetings]
# often, a cell is left blank to mean its value is
# the same as the value of the cell above. This function populates
# these blank cells.
def populate_empty_cells(self, meetings, header_mappings):
if len(meetings) <= 1:
return meetings
pop_meetings = [meetings[0]]
for idx, row in enumerate(meetings[1:]):
pop_meeting = {k: row.get(k) if row.get(k) is not None else pop_meetings[idx].get(k, "") for k in header_mappings.keys()}
pop_meetings.append(pop_meeting)
return pop_meetings
def csv_to_dicts(self, meeting_rows, header_mappings):
meeting_dicts = []
for meeting_row in meeting_rows:
meeting = {}
for k, v in header_mappings.items():
val = meeting_row[v]
if val == "":
continue
meeting[k] = val
# we avoid adding blank rows
if meeting != {}:
meeting_dicts.append(meeting)
return meeting_dicts
def parse_meetings(self, meetings, meta):
date_format = None
date_range = fuzzy_dates.extract_date_range(meta["title"])
# print meta
# for x in meetings:
# print x
# webbrowser.open(meta["source"]["url"] + "/preview")
# raw_input()
for meeting in meetings:
if "date" not in meeting:
self._logger.warning("Date missing from the following row:", meeting)
continue
meeting_date = fuzzy_dates.parse_date(meeting["date"], date_format=date_format, date_range=date_range)
if meeting_date:
meeting["date"] = str(meeting_date.date)
date_format = meeting_date.date_format
else:
self._logger.warning("Couldn't find '%s' in range %s" % (meeting["date"], date_range))
return meetings
def scrape_csv(self, meta):
self._logger.info("... %s" % meta["filename"])
meetings = self.read_csv(meta["filename"])
meetings = self.normalise_csv(meetings)
# find index(es) of header rows
header_rows = self.find_header_rows(meetings)
if header_rows == []:
# doesn't look like this file contains meeting data
return []
meetings_dicts = []
# sometimes a file contains multiple groups of meetings
for idx, header_row in enumerate(header_rows):
if idx == len(header_rows) - 1:
meetings_block = meetings[header_row[0]+1:]
else:
meetings_block = meetings[header_row[0]+1:header_rows[idx + 1][0]-1]
block_dicts = self.csv_to_dicts(meetings_block, header_row[1])
block_dicts = self.populate_empty_cells(block_dicts, header_row[1])
meetings_dicts += block_dicts
# if "name" not in header_row[1]:
return meetings_dicts
def run(self):
self._logger.info("Scraping Meetings")
_all_meetings = self.db.fetch_all("%s_fetch" % self.PREFIX, paged=False)
for meta in _all_meetings:
meetings = []
meta["published_at"] = str(datetime.strptime(meta["published_at"], "%d %B %Y").date())
if meta["file_type"] == "CSV":
meetings = self.scrape_csv(meta)
meetings = self.parse_meetings(meetings, meta)
elif meta["file_type"] == "PDF":
# TODO: Parse PDF
pass
for meeting in meetings:
for k in ["published_at", "department", "title", "source"]:
meeting[k] = meta[k]
self.db.save("%s_scrape" % self.PREFIX, meeting)
def scrape(**kwargs):
ScrapeMeetings(**kwargs).run()
|
That is why we help state schools and colleges to build alumni communities. Former students can transform a young person’s confidence, motivation and skills.
Sign up to your school or college alumni network!
MP goes back to school to inspire pupils to aim high.
Shawbrook Bank and Future First work together to inspire young people in state schools near reach of Shawbrook’s offices. This included supporting Future First to pilot working in Scotland for the first time. Shawbrook also encourage their employees to sign up to their old school or college alumni network.
Future First has received two rounds of funding from the government’s Careers and Enterprise Company (CEC) to boost social mobility by helping prepare young people for the world of work.
In 2016, we received funding from the CEC to expand our work in the South West. The one-year programme supported 40 schools and colleges across Cornwall, Devon and Somerset to build a thriving network of former students. Across the year, we supported participating schools to sign up 6,000 of their former students and to enable 30,000 encounters between their students and alumni.
Following this success, in 2017 we received a second round of funding to help students see a world beyond their own in the government’s first six opportunity areas. We are working with 27 schools in Oldham, Blackpool, Scarborough, Norwich, Derby and West Somerset to build alumni communities which will help address identified gaps in careers and enterprise activity.
UBS have funded Future First's In-House programme in the Bridge Academy, Hackney. The programme is designed to provide the school with hands-on support from Future First staff to embed an alumni programme across the school. The Bridge Academy has their own dedicated Future Fist Alumni Officer, who spends part of their time 'in house' at the school, helping staff to build and manage their network of former students and tracking the destination of students as they leave each year.
KPMG is one of Future First’s flagship supporters. By supporting, Future First's core schools programme, KPMG is helping to connect thousands of alumni with young people in their old schools and colleges. Through KPMG’s support, Future First reached 82,850 students in 2015/16.
The Wellcome Trust and Future First partner to deliver insight days for young people at state schools in Camden. Staff from across the Trust participate, including staff from communications, research, grants and investments. The volunteers coached and supported students in creative activities designed to build students' confidence and to help them identify their skills and strengths.
Taylor Wessing and Future First partner to deliver career insight days for young people from our network of schools. The days aim to build students' confidence and increase their knowledge of a career inside a big City firm. Representatives from across the firm volunteer: from lawyers to facilities and HR to marketing.
|
from unittest.mock import Mock
from django.db.models import NOT_PROVIDED
from django.test import TestCase
from model_utils.fields import UrlsafeTokenField
class UrlsaftTokenFieldTests(TestCase):
def test_editable_default(self):
field = UrlsafeTokenField()
self.assertFalse(field.editable)
def test_editable(self):
field = UrlsafeTokenField(editable=True)
self.assertTrue(field.editable)
def test_max_length_default(self):
field = UrlsafeTokenField()
self.assertEqual(field.max_length, 128)
def test_max_length(self):
field = UrlsafeTokenField(max_length=256)
self.assertEqual(field.max_length, 256)
def test_factory_default(self):
field = UrlsafeTokenField()
self.assertIsNone(field._factory)
def test_factory_not_callable(self):
with self.assertRaises(TypeError):
UrlsafeTokenField(factory='INVALID')
def test_get_default(self):
field = UrlsafeTokenField()
value = field.get_default()
self.assertEqual(len(value), field.max_length)
def test_get_default_with_non_default_max_length(self):
field = UrlsafeTokenField(max_length=64)
value = field.get_default()
self.assertEqual(len(value), 64)
def test_get_default_with_factory(self):
token = 'SAMPLE_TOKEN'
factory = Mock(return_value=token)
field = UrlsafeTokenField(factory=factory)
value = field.get_default()
self.assertEqual(value, token)
factory.assert_called_once_with(field.max_length)
def test_no_default_param(self):
field = UrlsafeTokenField(default='DEFAULT')
self.assertIs(field.default, NOT_PROVIDED)
def test_deconstruct(self):
def test_factory():
pass
instance = UrlsafeTokenField(factory=test_factory)
name, path, args, kwargs = instance.deconstruct()
new_instance = UrlsafeTokenField(*args, **kwargs)
self.assertIs(instance._factory, new_instance._factory)
self.assertIs(test_factory, new_instance._factory)
|
New Massey University research highlighted the importance of communication and technology, personal and agency support, orientation and mobility, health, rebuilding independence, rehabilitation and coping and resilience, for people with visual impairment following a disaster.
Recent earthquakes in the lower North and upper South Islands have been a stark reminder of the challenges residents confronted during the Canterbury earthquakes of 2010/2011.
Visually impaired residents faced further challenges that have now been documented by Massey University’s Dr Gretchen Good and Dr Suzanne Phibbs of the College of Health in research which explores the experiences of 12 visually impaired residents who lived through more than 12,000 aftershocks.
The study, Disorientated and Immobile: The Experience of People with Visual Impairments During and After the Christchurch, New Zealand 2010 and 2011 Earthquakes, was recently published in the Journal of Visual Impairment & Blindness. It involved 12 face-to-face interviews conducted after the September 2010 quake, but prior to the fatal 2011 February quake. Then, in February 2012, seven of the original participants were re-interviewed about how more than a year of earthquakes had affected their lives. Three staff members from the Blind Foundation were also interviewed in April 2011. The paper was co-authored by Dr Good and Dr Phibbs, along with Kerry Williamson, a research assistant from the Ministry of Justice.
Dr Phibbs says the research, a first of its kind in the world, highlighted the importance of communication and technology, personal and agency support, orientation and mobility, health, rebuilding independence, rehabilitation, coping and resilience.
“Participants demonstrated creative problem-solving abilities, resilience and community spirit. However our findings indicate that older visually impaired people are among the most vulnerable in disasters, and more work needs to be done to prepare them,” she says.
“I could hear crockery falling and breaking in the living room and in the kitchen and I thought ‘I don’t know what to do.’ I’ve been told many, many times during an earthquake go and stand under a door jamb but I couldn’t even get there.” – Anonymous research participant.
“This is the first time visually impaired people’s experiences of sequential earthquake activity has been tracked both before and after a catastrophic disaster anywhere in the world, and the results of our study provide a rare insight into the impact of disasters on those who are older and living with impaired vision,” Dr Phibbs says.
Dr Good, who spent nine years visually impaired before regaining her sight after 23 operations, says it is crucial people with impaired vision keep transistor radios handy, with a good supply of batteries. “However, participants told us the quality of the information they received from radio broadcasts was poor. Misinformation and the challenge of finding the best radio station that could be relied on to have the most up-to-date facts was difficult.
“They felt accessing information that helped them, rather than frightened them was hard to come by in the aftermaths of the quakes. There was also a frustration at a lack of information about the conditions of local walkways and disruption to bus routes for many months following the quakes,” Dr Good says.
Dr Gretchen Good with her assistance dog Caz, and Dr Suzanne Phibbs.
“Oh the dog, the poor dog … he was shivering, he shook until about 10 o’clock the next morning, he just shook. I gave him his breakfast and he couldn’t eat all his breakfast, what he did eat, he brought up, so he was really in a bad way” – Bonnie, January 2011.
Guide dog users discovered that their companions had to be comforted, re-trained and assessed for their abilities to cope as working dogs after the quakes. “Altered bus routes, the disappearance of familiar landmarks, liquefaction and detours all created a greater level of stress for the people and their guide dogs,” Dr Good says.
Seven people who were re-interviewed following the February 2011 quakes had learned about emergency preparedness through trial and error and they ultimately managed to cope and maintain their independence through four major earthquakes.
“They spoke about their resilience and having to survive what felt to them like a war zone. It was a matter of doing the best they could at the time with the resources they had. They all spoke of the need to be with others – the need to flee or escape their home to be in the company of friends and family was a regular theme,” Dr Good says.
All of the researchers concluded that more work needs to be done to prepare communities, agencies, families and individuals for potential disasters.
“Every participant told us that personal contact with someone in the week following the quakes was essential, but little agency support was offered,” Dr Phibbs says.
“People with minimal social supports reported feeling isolated and panic-stricken, while those with larger social networks reported a quicker recovery from trauma. It really emphasises the importance of neighbourhood and community support,” she says.
|
"""
/***************************************************************************
Name : Auhtorization Service
Description : Checks whether the logged in user has permissions to
access the particular content item
Date : 2/June/2013
copyright : (C) 2013 by John Gitau
email : gkahiu@gmail.com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from roleprovider import RoleProvider
from exception import SecurityException
from stdm.data import Content, STDMDb, Base
from stdm.utils import *
from sqlalchemy import Table
from sqlalchemy.orm import relationship, mapper, clear_mappers
from sqlalchemy.exc import *
class RoleMapper(object):
pass
class Authorizer(object):
'''
This class has the responsibility of asserting whether an account with
the given user name has permissions to access a particular content item
'''
def __init__(self, username):
self.username = username
self.userRoles = []
self._getUserRoles()
def _getUserRoles(self):
'''
Get roles that the user belongs to
'''
roleProvider = RoleProvider()
self.userRoles = roleProvider.GetRolesForUser(self.username)
'''
If user name is postgres then add it to the list of user roles since
it is not a group role in PostgreSQL but content is initialized by
morphing it as a role in registering content items
'''
pg_account = 'postgres'
if self.username == pg_account:
self.userRoles.append(pg_account)
def CheckAccess(self, contentCode):
'''
Assert whether the given user has permissions to access a content
item with the gien code.
'''
hasPermission = False
#Get roles with permission
try:
cnt = Content()
qo = cnt.queryObject()
'''
cntRef = qo.filter(Content.code == contentCode).first()
'''
cntRef = qo.filter(Content.code == contentCode).first()
if cntRef != None:
cntRoles =cntRef.roles
for rl in cntRoles:
if getIndex(self.userRoles,rl.name) != -1:
hasPermission = True
break
except Exception:
'''
Current user does not have permission to access the content tables.
Catches all errors
'''
#pass
raise
return hasPermission
|
Learn Redis The Hard Way (in Production) — How a popular hotel comparison site uses Redis heavily in production, including where things didn’t work as expected.
Scuba: Diving into Data at Facebook [PDF] — A paper describing a commonly used system at Facebook for doing live, real-time analysis on terabytes of data. Related Hacker News discussion.
[eBook] The Essential Guide to Queuing Theory — Queueing theory rules everything around you - Whether you’re an entrepreneur, engineer, or manager, queueing theory is one of the best ways to boost performance. This 30-page ebook demystifies the subject without requiring pages full of equations.
The Probability of Data Loss in Large Clusters — Many distributed storage systems (e.g. Cassandra, Riak, HDFS, MongoDB,) can use replication to make data durable, but how does this affect the odds of data loss?
1.1 Billion Taxi Rides on kdb+/q & 4 Xeon Phi CPUs — Q is a programming language with a built-in, column-oriented, in-memory and on-disk database called kdb+ that includes and extends SQL. Here’s a practical look at using it.
The Internals of PostgreSQL — Suzuki Hironobu covers the internals of PostgreSQL for database administrators and system developers in this helpful translation of his 2012 Japanese-language book.
MySQL and Snap Packaging — A look at using ‘snaps’ (a new way of packaging apps for distribution on Linux) to distribute MySQL.
JanusGraph Picks Up Where TitanDB Left Off — JanusGraph is a new Linux Foundation project formed to continue development of the TitanDB graph database.
InfluxDB 1.2 Released — ..with subqueries and 50% better write throughput on larger hardware.
Providing Least Privileged Data Access in MongoDB — Read-only views (ROV) are a similar concept to table views found in relational databases.
Secure MongoDB and Protect Yourself from the Ransom Hack — Advice on how to protect yourself from MongoDB ransomware.
Running Production PostgreSQL Systems on ARM Architecture — A whitepaper exploring if PostgreSQL on ARM is a viable option for your next project.
Designing the UFC Moneyball using multiple databases — Gigi Sayfan from VRVIU shows us how analytics for sports can be done using Cassandra/Scylla, MySQL, and Redis.
HikariCP: A Solid High-performance JDBC Connection Pool — Can support 10,000 simultaneous users.
|
import olefile
import zipfile
__version__ = "4.11.0"
def OfficeFile(file):
'''Return an office file object based on the format of given file.
Args:
file (:obj:`_io.BufferedReader`): Input file.
Returns:
BaseOfficeFile object.
Examples:
>>> with open("tests/inputs/example_password.docx", "rb") as f:
... officefile = OfficeFile(f)
... officefile.keyTypes
('password', 'private_key', 'secret_key')
Given file handle will not be closed, the file position will most certainly
change.
'''
file.seek(0) # required by isOleFile
if olefile.isOleFile(file):
ole = olefile.OleFileIO(file)
elif zipfile.is_zipfile(file): # Heuristic
from .format.ooxml import OOXMLFile
return OOXMLFile(file)
else:
raise Exception("Unsupported file format")
# TODO: Make format specifiable by option in case of obstruction
# Try this first; see https://github.com/nolze/msoffcrypto-tool/issues/17
if ole.exists('EncryptionInfo'):
from .format.ooxml import OOXMLFile
return OOXMLFile(file)
# MS-DOC: The WordDocument stream MUST be present in the file.
# https://msdn.microsoft.com/en-us/library/dd926131(v=office.12).aspx
elif ole.exists('wordDocument'):
from .format.doc97 import Doc97File
return Doc97File(file)
# MS-XLS: A file MUST contain exactly one Workbook Stream, ...
# https://msdn.microsoft.com/en-us/library/dd911009(v=office.12).aspx
elif ole.exists('Workbook'):
from .format.xls97 import Xls97File
return Xls97File(file)
# MS-PPT: A required stream whose name MUST be "PowerPoint Document".
# https://docs.microsoft.com/en-us/openspecs/office_file_formats/ms-ppt/1fc22d56-28f9-4818-bd45-67c2bf721ccf
elif ole.exists('PowerPoint Document'):
from .format.ppt97 import Ppt97File
return Ppt97File(file)
else:
raise Exception("Unrecognized file format")
|
Tiny solid Sterling Silver flower stud post earrings.
Sterling sheet cut and filed into flower shapes, sterling beads soldered to the center of the flower, and then sterling wire cut to form the posts and soldered to the back.
Earrings have been tumbled for durability. Earrings oxidized to enhance details. Plastic stoppers included.
Face of earrings measure approx. 9mm diameter.
|
from NodeDefender.db.sql import SQL, GroupModel, NodeModel, LocationModel, UserModel
import NodeDefender
from geopy.geocoders import Nominatim
def get_sql(name):
return NodeModel.query.filter_by(name = name).first()
def update_sql(original_name, **kwargs):
node = get_sql(original_name)
if node is None:
return False
for key, value in kwargs.items():
if key not in node.columns():
continue
setattr(node, key, value)
SQL.session.add(node)
SQL.session.commit()
return node
def create_sql(name):
if get_sql(name):
return get_sql(name)
node = NodeModel(name)
SQL.session.add(node)
SQL.session.commit()
return node
def save_sql(node):
SQL.session.add(node)
return SQL.session.commit()
def delete_sql(name):
if not get_sql(name):
return False
SQL.session.delete(get_sql(name))
SQL.session.commit()
return True
def get(name):
return get_sql(name)
def list(*groups):
return SQL.session.query(NodeModel).join(NodeModel.groups).\
filter(GroupModel.name.in_(groups)).all()
def unassigned():
return SQL.session.query(NodeModel).filter(NodeModel.groups == None).all()
def create(name):
node = create_sql(name)
NodeDefender.db.message.node_created(node)
return node
def update(original_name, **kwargs):
return update_sql(original_name, **kwargs)
def location(name, street, city, latitude = None, longitude = None):
node = get_sql(name)
if node is None:
return False
if not latitude and not longitude:
geo = Nominatim()
coord = geo.geocode(street + ' ' + city, timeout = 10)
if coord:
latitude = coord.latitude
longitude = coord.longitude
else:
latitude = 0.0
longitude = 0.0
node.location = LocationModel(street, city, latitude, longitude)
SQL.session.add(node)
SQL.session.commit()
return node
def delete(name):
return delete_sql(name)
def add_icpe(nodeName, icpeMac):
node = get_sql(nodeName)
icpe = NodeDefender.db.icpe.get_sql(icpeMac)
if icpe is None or node is None:
return False
node.icpe = icpe
SQL.session.add(node)
SQL.session.commit()
return node
def remove_icpe(nodeName, icpeMac):
node = get_sql(nodeName)
icpe = NodeDefender.db.icpe.get(icpeMAc)
if icpe is None or node is None:
return False
node.icpe = None
SQL.session.add(node)
SQL.session.commit()
return node
|
Will Luminella Lighten And Brighten Your Skin?
Everyone wants beautiful, luminescent skin. Even if it’s just for a moment. But, you end up taking your fresh-faced, smooth skin for granted. Before you know it, you realize that soft, elastic skin is a gift. One that slowly breaks down as you get older. And you start wanting it back. Wrinkles mar your skin and bring you face to face with the fact that your beauty is fading. But you don’t have to let this be the last of your youthful beauty! At least, not when you have Luminella Skin Cream. This facial cream could help you to reverse the effects of aging so that you can retain your youthful skin longer. But could our number one skin cream work even better to keep your youth intact? Click on the image below to find out for yourself. Otherwise, keep reading our Luminella Review to see more information.
Luminella Skin Care aims to work to keep your skin soft and glowing for as long as possible. Using a special formula meant to reduce the appearance of wrinkles, it could do exactly that! But does the Luminella formula work as well as it promises? Or could our number one skin cream get you even better results? In this review, we’ll go over ingredients, the price, and how this formula works so you can make the best decision for yourself and your skin. If you want to skip all of the extra reading and see our number one skin cream, click on the link below. Due to popular demand, there are only so many products available, so be sure you check out this product while there is still time!
With benefits like this, why not try Luminella Skin Care? After all, one study states that a topical cream could help you achieve short and long-term anti-wrinkle results. But, we still think our number one skin cream could work even better for your skin needs. To see why, click on the button above to compare these products for yourself!
Use Sunscreen – This is of utmost importance. If you aren’t using sunscreen, UV rays will burn your skin and make you age faster.
Stop Smoking and Drinking – Or at the very least, tone it down a bit. Doing both of these reduces your skin hydration and can dull the appearance.
Eat Healthier – Getting the nutrients you need for your skin begins with getting the nutrients you need for your body. Try reducing the unhealthy stuff and see what happens.
What Are The Luminella Ingredients?
The Luminella Ingredients contain peptides according to the product website. If you don’t already know, peptides are a chain of amino acids that could help you amplify collagen levels. As you get older, your collagen levels fall which can reduce elasticity and firmness in your skin. However, with the help of peptides, they could tell your skin to produce more collagen. As a result, you could improve your skin so that you can see more youthful results.
What Is The Luminella Skin Cream Price?
The Luminella Skin Cream Price is $89.92. But, if you get lucky, you could happen upon a trial offer for this skin cream. If that’s the case, you would only have to pay $4.97 for shipping and handling. From the moment of your purchase, you have two weeks to decide whether or not you like the product. If you don’t cancel your subscription, you will be charged the full amount for every month thereafter for your skin cream. However, if you want to try an even better product than Luminella Cream that could be better worth the price, click any button on this page to try our number one skin cream.
You likely need a skin cream. But you need one that genuinely works. If you are dead set on getting the formula in this review, you can find it on the Luminella Official Website. But if you want to try a skin cream that could get you even better results than the Luminella formula, our number one skin cream could do the trick. Click any image on this page to see for yourself how this skin cream could benefit you. Supplies are limited on these products, so be sure you click now to see what kind of offers are available while the formula lasts!
Is Luminella Skin Care #1?
|
# DWC Network Server Emulator
# Copyright (C) 2014 polaris-
# Copyright (C) 2014 ToadKing
# Copyright (C) 2014 AdmiralCurtiss
# Copyright (C) 2014 msoucy
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from gamespy_player_search_server import GameSpyPlayerSearchServer
from gamespy_profile_server import GameSpyProfileServer
from gamespy_backend_server import GameSpyBackendServer
from gamespy_natneg_server import GameSpyNatNegServer
from gamespy_qr_server import GameSpyQRServer
from gamespy_server_browser_server import GameSpyServerBrowserServer
from gamespy_gamestats_server import GameSpyGamestatsServer
from nas_server import NasServer
from internal_stats_server import InternalStatsServer
from admin_page_server import AdminPageServer
from storage_server import StorageServer
from gamestats_server_http import GameStatsServer
import gamespy.gs_database as gs_database
import threading
if __name__ == "__main__":
# Let database initialize before starting any servers.
# This fixes any conflicts where two servers find an uninitialized database at the same time and both try to
# initialize it.
db = gs_database.GamespyDatabase()
db.initialize_database()
db.close()
servers = [
GameSpyBackendServer,
GameSpyQRServer,
GameSpyProfileServer,
GameSpyPlayerSearchServer,
GameSpyGamestatsServer,
#GameSpyServerBrowserServer,
GameSpyNatNegServer,
NasServer,
InternalStatsServer,
AdminPageServer,
StorageServer,
GameStatsServer,
]
for server in servers:
threading.Thread(target=server().start).start()
|
‘Heldere Taal’ is Dutch for ‘Lucid Copy’. That’s exactly what we do. As copywriters and translators we have the ability to produce convincing and attractive copy for all your marketing and communication activities on the Dutch market. No matter how complicated the subject of your business. Besides copywriting, we also help our clients with their general communication questions.
Heldere Taal works for a wide range of clients. Amongst them you’ll find multinationals based in Holland and global players from other countries. One of our special skills is developing and translating financial copy for the asset management industry, in which we have very extensive knowledge. As our interest is wide, we work however in all kind of sectors.
We work in an international environment and speak our languages. We are used to receive input in English, to be translated and copywritten by us. Do you want to give us your input in French? Usually we translate and copywrite it ourself. In other cases we work together with experienced translators. We are also familiar with the German language.
Besides top quality copy and valuable advisory about your communications, important values you’ll find with us are high service and strict confidentiality.
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2009, 2010 Hermann Meyer, James Warden, Andreas Degert
# Copyright (C) 2011 Pete Shorthose
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#----------------------------------------------------------------
#
# lots of hints and some numerical values taken from
# Virtual Air Guitar (Matti Karjalainen, Teemu Maki-Patola, Aki Kanerva, Antti Huovilainen)
#
import sys
from pylab import *
from scipy.optimize import newton
#
# o V+
# |
# |
# |
# +-+
# | |
# Rp | |
# | | Co
# +-+
# | ||
# +-----++---o Vo
# Vp | ||
# |
# Ci --+--
# Ri /--+--
# || Vi +-----+ Vg / 12AX7
# o---++--+-------| |-----+- - - -
# || | +-----+ \
# | \/----
# | /-----
# | /
# +-+ Vk |
# | | +------+
# | | | |
# | | | |
# +-+ +-+ |
# | | | -+-
# | Rk | | -+- Ck
# | | | |
# ----- +-+ |
# --- | |
# o | |
# | |
# ----- -----
# --- ---
# o o
#
#
# mu Amplification factor.
# kx Exponent:
# kg1 Inversely proportional to overall plate current
# kp Affects opration in region of large plate voltage and large negative grid voltage
# kvb Knee volts
#
names = ("mu", "kx", "kg1", "kg2", "kp", "kvb", "ccg", "cpg", "ccp", "rgi")
factor = ( 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1e-12, 1e-12, 1e-12, 1e3)
tubes = {
#TUBE MU EX KG1 KG2 KP KVB CCG* CPG* CCP* RGI**
"6DJ8" : ( 28.0, 1.3, 330.0, None, 320.0, 300.0, 2.3, 2.1, 0.7, 2.0),
"6L6CG": ( 8.7, 1.35, 1460.0, 4500.0, 48.0, 12.0, 14.0, 0.85, 12.0, 1.0),
"12AX7": (100.0, 1.4, 1060.0, None, 600.0, 300.0, 2.3, 2.4, 0.9, 2.0),
"12AT7": ( 60.0, 1.35, 460.0, None, 300.0, 300.0, 2.7, 2.2, 1.0, 2.0),
"12AU7": ( 21.5, 1.3, 1180.0, None, 84.0, 300.0, 2.3, 2.2, 1.0, 2.0),
"6550" : ( 7.9, 1.35, 890.0, 4800.0, 60.0, 24.0, 14.0, 0.85, 12.0, 1.0),
"KT88" : ( 8.8, 1.35, 730.0, 4200.0, 32.0, 16.0, 14.0, 0.85, 12.0, 1.0),
#"EL34" : ( 60.0, 4.8, 2000.0, None, 800.0, 50.0, None, None, None, None),
"EL34" : ( 11.0, 1.35, 650.0, 4200.0, 60.0, 24.0, 15.0, 1.0, 8.0, 1.0),
"2A3" : ( 4.2, 1.4, 1500.0, None, 60.0, 300.0, 8.0, 17.0, 6.0, 1.0),
"300B" : ( 3.95, 1.4, 1550.0, None, 65.0, 300.0, 10.0, 16.0, 5.0, 1.0),
"6C33C": ( 3.1, 1.4, 163.0, None, 15.0, 300.0, 31.0, 31.0, 11.0, 0.5),
"6C16": ( 42.2, 2.21, 393.0, None, 629.0, 446.0, 9.0, 1.8, 0.48, 2.0),
# * : 10^-12 (pF)
# **: 10^3 (kOhm)
}
class Circuit(object):
# Parameters for circuit / approximation
#table_size = 2001
table_size = 1001
Uin_range = (-5.0, 5.0)
Vp = 250
Rp = 100e3
Ri_values = (68e3, 250e3)
# class data
used_names = ("mu", "kx", "kg1", "kp", "kvb")
ipk_tab = { "triode": "Ipk_triode", "pentode": "Ipk_triode_pentode" }
Vi = linspace(Uin_range[0],Uin_range[1],table_size)
@classmethod
def help(self):
return ("tube: %s\nplate current functions: %s" % (
", ".join(sorted(tubes.keys())),
", ".join(sorted(self.ipk_tab.keys()))))
def __init__(self, tube, ipk_func):
self.tube = tube
self.ipk_func = ipk_func
error = False
if tube not in tubes:
print "tube '%s' not found" % tube
error = True
if ipk_func not in self.ipk_tab:
print "plate current function '%s' not found" % ipk_func
error = True
if error:
print
usage()
for n, f, v in zip(names, factor, tubes[tube]):
if v is not None:
setattr(self, n, f*v)
self.Ipk = getattr(self, self.ipk_tab[ipk_func])
self.FtubeV = vectorize(self.Ftube)
def Igk_Vgk(self, Vgk):
"""gate current as function of gate-kathode voltage"""
return exp(7.75*Vgk-10.3)
def Ipk_triode_pentode(self, Vgk, Vpk):
"""Koren model of pentode connected as class A triode
(screen connected to plate):
plate current as function of gate-kathode voltage
and plate-kathode voltage
"""
E1 = Vpk/self.kp*log(1+exp(self.kp*(1/self.mu+Vgk/Vpk)))
return 2*E1**self.kx/self.kg1*(E1>0.0)*arctan(Vpk/self.kvb)
def Ipk_triode(self, Vgk, Vpk):
"""
Koren model of triode:
plate current as function of gate-kathode voltage
and plate-kathode voltage
"""
E1 = Vpk/self.kp*log(1+exp(self.kp*(1/self.mu+Vgk/sqrt(self.kvb+Vpk*Vpk))))
return 2*E1**self.kx/self.kg1*(E1>0.0)
def Ftube(self, Vi, Ri):
"""calculate output voltage of a tube circuit as function of input voltage
Vi input voltage
Ri value of resistor Ri
"""
def fi(Vgk, Vi, Ri):
return Vi - Vgk - Ri * self.Igk_Vgk(Vgk) # sum of voltages -> zero
Vgk = newton(fi, self.Igk_Vgk(0), args=(Vi, Ri)) # Vgk(Vi)
def fp(Vpk, Vgk, Ipk):
return Vpk + self.Rp * Ipk(Vgk, Vpk) - self.Vp
return newton(fp, self.Vp/2, args=(Vgk,self.Ipk)) # Vpk(Vgk)
def Vk0(self, Ri, Rk):
v0 = 0
def f(v):
return (self.Ftube(-v, Ri)-self.Vp)*(Rk/self.Rp) + v
return newton(f, v0)
def write_ftube_table(self, Ri, Vi, Vp):
"""write C source"""
sys.stdout.write("\t{ // Ri = %dk\n" % (Ri/1e3))
sys.stdout.write('\t%g,%g,%g,%d, {' % (Vi[0], Vi[-1], (len(Vi)-1)/(Vi[-1]-Vi[0]), self.table_size))
s = ""
for i, v in enumerate(Vi):
if i % 5 == 0:
sys.stdout.write(s+"\n\t")
s = ""
sys.stdout.write(s+str(Vp[i]))
s = ","
sys.stdout.write("\n\t}}")
def write_tables(self, prgname):
sys.stdout.write("// generated by %s\n" % prgname)
sys.stdout.write("// tube: %s\n" % self.tube)
sys.stdout.write("// plate current function: %s\n" % self.ipk_func)
for n in self.used_names:
sys.stdout.write("// %s: %g\n" % (n, getattr(self, n)))
sys.stdout.write("\n")
sys.stdout.write("table1d_imp<%d> tubetable_%s[%d] = {\n"
% (self.table_size, self.tube, len(self.Ri_values)))
s = ""
for Ri in self.Ri_values:
sys.stdout.write(s)
s = ",\n"
Vp = self.FtubeV(self.Vi, Ri)
self.write_ftube_table(Ri, self.Vi, Vp)
sys.stdout.write("\n};\n")
def write_tt_ftube_table(self, Ri, Vi, Vp):
"""write C source"""
sys.stdout.write("\t{ // Ri = %dk\n" % (Ri/1e3))
sys.stdout.write('\t%g,%g,%g,%d, {' % (Vi[0], Vi[-1], (len(Vi)-1)/(Vi[-1]-Vi[0]), self.table_size))
s = ""
for i, v in enumerate(Vi):
if i % 3 == 0:
sys.stdout.write(s+"\n\t")
s = ""
sys.stdout.write(s+"TTFLOAT("+str(Vp[i])+")")
s = ","
sys.stdout.write("\n\t}};\n")
def write_tt_tables(self, prgname):
sys.stdout.write("// generated by %s\n" % prgname)
sys.stdout.write("// tube: %s\n" % self.tube)
sys.stdout.write("// plate current function: %s\n" % self.ipk_func)
for n in self.used_names:
sys.stdout.write("// %s: %g\n" % (n, getattr(self, n)))
sys.stdout.write("//\n")
sys.stdout.write("// struct tubetable {\n")
sys.stdout.write("// unsigned int min_Vi;\n")
sys.stdout.write("// unsigned int max_Vi;\n")
sys.stdout.write("// unsigned int mapping;\n")
sys.stdout.write("// unsigned int table_size;\n")
sys.stdout.write("// ttspl_t table[1001];\n")
sys.stdout.write("// }\n")
for Ri in self.Ri_values:
sys.stdout.write("\nstruct tubetable tubetable_%s_Ri%s = \n"
% (self.tube, self.R_name(Ri).upper()))
Vp = self.FtubeV(self.Vi, Ri)
self.write_tt_ftube_table(Ri, self.Vi, Vp)
sys.stdout.write("\n")
def R_name(self, r):
for f, n in (1e6,"M"),(1e3,"k"),(1,""):
if r >= f:
return ("%g%s" % (r/f, n)).replace(".","_")
def show_vk0(self, args):
if args:
Ri = float(args[0])
Rk = float(args[1])
else:
try:
while True:
vl = ["%d: %s" % (i, self.R_name(r)) for i, r in enumerate(self.Ri_values)]
i = raw_input("Ri [%s]: " % ", ".join(vl))
try:
i = int(i)
except ValueError:
pass
else:
if 0 <= i < len(self.Ri_values):
Ri = self.Ri_values[i]
break
print "error: illegal input"
while True:
try:
Rk = float(raw_input("Rk: "))
break
except ValueError:
print "error: please enter float value"
except KeyboardInterrupt:
print
return
print "%f" % self.Vk0(Ri,Rk)
def check_table_accuracy(self, Ri):
"""maximal relative table error at half interval"""
def ip(x):
return (x[:-1]+x[1:])/2
Vp = self.FtubeV(self.Vi, Ri)
VpM = self.FtubeV(ip(self.Vi), Ri)
VpD = (ip(Vp) - VpM) / VpM
return max(VpD)
def display_accuracy(self):
for Ri in self.Ri_values:
print "Ri=%dk: %g" % (Ri/1e3, self.check_table_accuracy(Ri))
def plot_Ftube(self):
title(self.tube)
for Ri in self.Ri_values:
Vp = self.FtubeV(self.Vi, Ri)
plot(self.Vi, Vp, label="Ri=%dk" % (Ri/1e3))
xlabel("Vik")
ylabel("Vp")
legend()
axis
show()
def usage():
print "usage: %s plot|accuracy|table|tt-table|vk0 tube-name plate-func" % sys.argv[0]
print Circuit.help()
raise SystemExit, 1
def main():
if len(sys.argv) < 4:
usage()
cmd = sys.argv[1]
c = Circuit(sys.argv[2], sys.argv[3])
if cmd == "plot":
c.plot_Ftube()
elif cmd == "accuracy":
c.display_accuracy()
elif cmd == "table":
c.write_tables(sys.argv[0])
elif cmd == "tt-table":
c.write_tt_tables(sys.argv[0])
elif cmd == "vk0":
c.show_vk0(sys.argv[4:])
else:
usage()
if __name__ == "__main__":
main()
|
Here is one line taken from the closing prayer in the Liturgy of the Hours for St. Stanislaus – “Keep us strong and loyal in our faith until death.” As all of our prayers are this is directed to God. Now what do you think the reaction of God is to those entrusted with the deposit of faith who through their action and inaction’s surrounding the clergy sexual abuse scandal have not kept the faithful strong and loyal to the faith? It really is not a stretch to suppose that those entrusted with the deposit of faith are not strong or loyal in their own faith.
Pope Francis has put forth a reaffirmation of the decree for a swift and decisive response to clergy sexual abuse of minors. What does that mean? What swift decisive response has happened, especially the bishops? When the Roger Cardinal Mahonys are in our deposit of faith pool what decisively happens? Lies, stonewalling, more lies, protection of clergy abusers, that has been the decisive action. And he continues to make a mockery of Christ who is truth by insisting his actions were harmless. Decisive action needs to be taken on do nothing bishops too.
|
#!/usr/bin/python3
import os
import sys
__dir__ = os.path.dirname(__file__)
sys.path.insert(0, os.path.join(__dir__, ".."))
import sphinx.cmd.build
import ipfshttpclient
# Ensure working directory is script directory
os.chdir(__dir__)
def main(argv=sys.argv[1:], program=sys.argv[0]):
if len(argv) != 1:
print("Usage: {0} [IPNS-key]".format(os.path.basename(program)))
print()
print("!! Continuing without publishing to IPNS !!")
print()
# Invoke Sphinx like the Makefile does
result = sphinx.cmd.build.build_main(["-b", "html", "-d", "build/doctrees", ".", "build/html"])
if result != 0:
return result
print()
print("Exporting files to IPFS…")
client = ipfshttpclient.connect()
hash_docs = client.add("build/html", recursive=True, raw_leaves=True, pin=False)[-1]["Hash"]
hash_main = client.object.new("unixfs-dir")["Hash"]
hash_main = client.object.patch.add_link(hash_main, "docs", hash_docs)["Hash"]
client.pin.add(hash_main)
print("Final IPFS path: /ipfs/{0}".format(hash_main))
if len(argv) == 1:
key = argv[0]
print()
print("Exporting files to IPNS…")
name_main = client.name.publish(hash_main, key=key)["Name"]
print("Final IPNS path: /ipns/{0}".format(name_main))
print()
print("Run the following commandline on all systems that mirror this documentation:")
print(" ipfs pin add {0} && ipfs name publish -k {1} /ipfs/{0}".format(hash_main, name_main))
return 0
if __name__ == "__main__":
sys.exit(main())
|
You'll Find These People In Every Nigerian Office | Zikoko!
Whether you work in a small office or a big corporation, you’re definitely going to meet all sorts of people. Here are 10 types of people you probably have in your office right now!
You know, those people that are always the first in and last out. The ones that’ll have you wondering if there is someone chasing them from their houses. You’re making the rest of us look bad, uncle!
Loool those ones that can’t be on time to work unless by a divine act of God. They are usually the most creative people because they have to come up with a new lie every day.
These ones are always ‘offline’. Small quiet like this, or if the boss steps out, they’re already making their way to dreamland. It’s an amazing something.
The ones that if you mistakenly even look their way, they will start talking about any and everything. Just shoot me!
The way these ones are always gathering and spilling tea, you just have to wonder how they get anything done. You’ll even start to fear them because their powers are too great for you to comprehend.
Then we have the salespeople. The ones who have great entrepreneurial spirits and are always selling one thing or the other. They’ll try to sell every useless trinket, and try to pressure or guilt you into buying. No, bro.
These are the guys that once given a little responsibility, let it get to their head and they start feeling like the boss. If I slap you ehn, your swollen head will reduce.
Now, these ones are the people that if you see them in your office, you wonder if there’s a new employee, even though they’ve been working there for 3 years. They’re so quiet and reserved, they’re pretty much invisible. Ghosts.
QUIZ: Can You Guess The Nigerian Church?
Just follow these instructions and you should be fine.
|
#!/usr/bin/env python
#
# Author: Vincenzo Maffione <v.maffione@gmail.com>
#
import multiprocessing
import subprocess
import statistics
import argparse
import time
import re
import os
def has_outliers(tuples):
for t in range(len(tuples[0])):
avg = statistics.mean([x[t] for x in tuples])
stdev = statistics.stdev([x[t] for x in tuples])
if stdev > avg*0.05:
return True
return False
def to_avg_stdev(vlist, nsamples):
# Sort by kpps or ktts
tuples = sorted(vlist[-nsamples:], key=lambda x: x[1])
left = 0
vals = []
while left < len(tuples):
if not has_outliers(tuples[left:]):
for t in range(len(tuples[0])):
avg = statistics.mean([x[t] for x in tuples[left:]])
stdev = statistics.stdev([x[t] for x in tuples[left:]])
vals.append(avg)
vals.append(stdev)
break
left += 1
del vlist[-nsamples:]
vlist.append(tuple(vals))
description = "Python script to perform automated tests based on rinaperf"
epilog = "2017 Vincenzo Maffione <v.maffione@gmail.com>"
argparser = argparse.ArgumentParser(description = description,
epilog = epilog)
argparser.add_argument('--size-min', type = int, default = 2,
help = "Minimum size for the test")
argparser.add_argument('--size-max', type = int, default = 1400,
help = "Maximum size for the test")
argparser.add_argument('--size-step', type = int, default = 10,
help = "Packet size increment")
argparser.add_argument('--trials', type = int, default = 3,
help = "Number of trials for each combination "
"of parameters")
argparser.add_argument('-D', '--duration', type = int, default = 10,
help = "Duration of each test (in seconds)")
argparser.add_argument('-g', '--max-sdu-gap', type = int, default = -1,
help = "Max SDU gap")
argparser.add_argument('-t', '--test-type', type = str, default = "perf",
help = "Test type", choices = ["perf", "rr"])
argparser.add_argument('-d', '--dif', type = str,
help = "DIF to use for the tests")
argparser.add_argument('-o', '--output', type = str, help = "Output file for gnuplot data",
default = 'output.txt')
argparser.add_argument('--sleep', type = int, default = 2,
help = "How many seconds to sleep between two consecutive test runs")
args = argparser.parse_args()
stats = []
plotcols = ['size']
if args.test_type == 'perf':
plotcols += ['snd_kpps', 'rcv_kpps', 'snd_mbps', 'rcv_mbps']
elif args.test_type == 'rr':
plotcols += ['ktps', 'snd_mbps', 'snd_latency']
# build QoS
qosarg = ""
if args.max_sdu_gap >= 0:
qosarg += " -g %s" % args.max_sdu_gap
difarg = ""
if args.dif:
difarg = " -d %s" % args.dif
try:
for sz in range(args.size_min, args.size_max+1, args.size_step):
cmd = ("rinaperf -s %s -t %s -D %s %s %s"
% (sz, args.test_type, args.duration, qosarg, difarg))
print("Running: %s" % cmd)
t = 1
while t <= args.trials:
try:
out = subprocess.check_output(cmd.split())
except subprocess.CalledProcessError:
print("Test run #%d failed" % t)
continue
out = out.decode('ascii')
outl = out.split('\n')
if args.test_type == 'perf':
if len(outl) < 4:
print(out)
continue
m = re.match(r'^Sender\s+(\d+)\s+(\d+\.?\d*)\s+(\d+\.?\d*)', outl[2])
if m is None:
print(out)
continue
tpackets = int(m.group(1))
tkpps = float(m.group(2))
tmbps = float(m.group(3))
m = re.match(r'^Receiver\s+(\d+)\s+(\d+\.?\d*)\s+(\d+\.?\d*)', outl[3])
if m is None:
print(out)
continue
rpackets = int(m.group(1))
rkpps = float(m.group(2))
rmbps = float(m.group(3))
prtuple = (tpackets, rpackets, tkpps, rkpps, tmbps, rmbps)
stats.append((sz, tkpps, rkpps, tmbps, rmbps))
print("%d/%d pkts %.3f/%.3f Kpps %.3f/%.3f Mbps" % prtuple)
elif args.test_type == 'rr':
if len(outl) < 3:
print(out)
continue
m = re.match(r'^Sender\s+(\d+)\s+(\d+\.?\d*)\s+(\d+\.?\d*)\s+(\d+)', outl[2])
if m is None:
print(out)
continue
transactions = int(m.group(1))
ktps = float(m.group(2))
mbps = float(m.group(3))
latency = int(m.group(4))
prtuple = (transactions, ktps, mbps, latency)
stats.append((sz, ktps, mbps, latency))
print("%d transactions %.3f Ktps %.3f Mbps %d ns" % prtuple)
else:
assert(False)
t += 1
time.sleep(args.sleep)
# Transform the last args.trials element of the 'stats' vectors into
# a (avg, stddev) tuple.
to_avg_stdev(stats, args.trials)
except KeyboardInterrupt:
pass
# Dump statistics for gnuplot
fout = open(args.output, 'w')
s = '#'
for k in plotcols:
s += '%19s ' % k
fout.write("%s\n" % s)
for i in range(len(stats)): # num samples
s = ' '
for j in range(len(stats[i])):
s += '%9.1f ' % stats[i][j]
fout.write("%s\n" % s)
|
Opened on February 2017 with a giant concert performed by the rock band Metallica, following a construction period of around three years and a planning period before this of around two years, the Royal Arena in Copenhagen is a distinguished venue for large national and international events. Extending over an area of 35,000 m², the approx. €135m arena can accommodate both concerts and sporting events. An audience of 15,000 can be catered for at music events while the ground floor of the arena is reserved for the athletes at sporting events. Ice hockey tournaments can thus be followed by an audience of 10,000, while seating for even up to 12,500 can be provided for handball tournaments.
The spacious foyer beneath the main hall is used for markets/trade fairs and exhibitions, with the result that the arena also serves as the city’s cultural hub. The Royal Arena is located in the south of Copenhagen in the Ørestad Syd district of the city within the urban development zone and is surrounded by promenade-like green space.
Because of the different sporting events such as table tennis and ice hockey that are held in the arena in addition to concerts, the building engineering is correspondingly sophisticated – though still not visible to the naked eye. High aesthetic standards were set by the Danish architecture firm 3XN, with the engineering offices of ME engineers from Great Britain and Rambøll from Denmark often having to overcome the difficulties presented by the demanding structure in the most confined of spaces.
Key considerations when it came to implementation included high visitor comfort, excellent acoustics and functionality for the artists and athletes and above all the guests. The oval, self-supporting and slightly curved cantilever roof is architecturally striking. Because there are no supporting columns in the central event hall, this construction ensures an unobstructed view from all seats.
The entire centralised cooling is accommodated directly below the roof construction and posed special challenges for the machine room planners, contractor A-Comfort ApS. A cooling capacity of 2.3 MW is required for normal events, while this figure rises to 3.3 MW for events on ice.
The contractor A-Comfort ApS had decided to build the system including containers, chillers, PHE units and dry cooler in their workshop before the final on-site installation. This was only possible thanks to a detailed 3D representation of the complete system.
Four 1,000 kW screw compressors are responsible for the centralised cooling, which is transferred to glycol cycles that are used to supply 135 coolers. The flow and return temperature in the glycol cycle is 6 °C and 12 °C at normal events, while temperatures at ice events drop to 2.5 °C and 8.5 °C respectively.
The original architect design provided for the compressors being accommodated within the walled machine room in an enclosed container. This container was to be erected on 50 cm high stilts, with the dry coolers in turn being accommodated on the roof of the container. Cooling for the machine room was to be provided in the form of an open space at the side. The clearance of the confined space for the dry coolers was further limited by a steel girder projecting across the roof space.
This spatial arrangement did not allow air space for the fans however for the originally envisaged dry coolers. Even though smaller Güntner dry coolers from the V-SHAPE Compact GFW series or Güntner dry coolers from the FLAT Vario GFH series would have had sufficient set-up space, the air volume would not have been sufficient to ensure the required re-cooling power. The flow and return temperature of the dry cooler is 42 °C and 47 °C respectively at an external temperature of 31 °C, with the result that a standard solution would not have been possible in the spatial set-up.
The technical solution involved on one hand relinquishing the 50 cm stilts for the container in the machine room and on the other hand a combination of 4 x 2 differently sized special models of the Güntner dry cooler from the V-SHAPE Vario GFD series in order to take account of the steel girders in the roof construction.
The entire prefabricated construction was so precisely planned and constructed owing to the confined space conditions that the maximum deviation at the flange connections was just 5 mm. The four Güntner dry coolers from the V-SHAPE Vario GFD series were interconnected in terms of control technology so as to be considered one unit by the control equipment. What’s more, the dry coolers are equipped with energy efficient ebm AxiTops to reduce the sound level. The installed HydroSpray system secures the capacity at very high ambient temperature peaks.
Even today it is clear that Copenhagen attracts top-class events to the city with the new arena: the venue will host the Ice Hockey World Championships in May 2018, one year later it becomes one of the venues of the Men's Handball World Championships.
|
# Type keys and specify shift key up/down
import subprocess
import sys
import argparse
import time
import os
class TypeKeys:
def __init__(self, *args, **kwargs):
self.shift = False
self.name = 'Tally.ERP 9'
self.window = 0
if 'WID' in os.environ:
self.window = os.environ['WID']
if 'WINDOWID' in os.environ:
self.window = os.environ['WINDOWID']
self.chars = {}
for x in range(ord('A'), ord('Z')+1):
self.chars[chr(x)] = True
for x in range(ord('a'), ord('z')+1):
self.chars[chr(x)] = False
for x in [' ', ',', '.', '/', ';', "'", '[', ']', '`', '1', '2', '3', '4', '5', '6', '7', '8', '9', '0', '-', '=', '\\']:
self.chars[x] = False
for x in ['<', '>', '?', ':', '"', '{', '}', '~', '!', '@', '#', '$', '%', '^', '&', '*', '(', ')', '_', '+', '|']:
self.chars[x] = True
self.keys = ["BackSpace", "Escape", "Return", "Down", "Up", "Left", "Right"]
def init(self):
if not self.window:
self.window = self.runxdo(["xdotool", "search", "--name", "%s" % (self.name)])
self.stop_shift()
def runxdo(self, cmd):
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
return out
def start_shift(self):
if self.shift == True:
return
self.runxdo(["xdotool", "keydown", "--window", "%s" % (self.window), "Shift"])
self.shift = True
def stop_shift(self):
if self.shift == False:
return
self.runxdo(["xdotool", "keyup", "--window", "%s" % (self.window), "Shift"])
self.shift = False
def type(self, str):
if str in self.keys:
self.runxdo(["xdotool", "key", "--delay", "%s" % (self.delay), "--window", "%s" % (self.window), "%s" % (str)])
return
for x in list(str):
if self.chars[x]:
self.start_shift()
else:
self.stop_shift()
self.runxdo(["xdotool", "type", "--delay", "%s" % (self.delay), "--window", "%s" % (self.window), "%s" % (x)])
self.stop_shift()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("string", help="string to type")
parser.add_argument("--ender", help="Key to press at the end", default=None)
parser.add_argument("--delay", help="delay between characters", default=1)
parser.add_argument("--window", help="window id")
parser.add_argument("--sleep", type=float, help="sleep time after commands", default=0.1)
args = parser.parse_args()
tk = TypeKeys()
if args.delay:
tk.delay = args.delay
if args.window:
tk.window = args.window
tk.init()
tk.type(args.string)
if(args.ender):
tk.type(args.ender)
time.sleep(args.sleep)
|
Hilary and Patrick’s waterfront St. Pete wedding was inspired by their love for the ocean. The blue hues featured throughout their wedding day brought together the organic and ethereal design inspiration that they had always dreamed of.
“We live in Ocala and chose to have the wedding in the Tampa area because our guests were coming from all over the U.S. and abroad. We wanted an area that showed off Florida’s best aspects: water, beaches, downtown night life and St.Pete delivered,” the bride shared.
Nestled amidst the mangroves and under the driftwood arches accented by beautiful florals, the bride and groom were effortlessly photographed by Tampa Bay wedding photographer Lifelong Photography Studio. From the ombre blue bridesmaids dresses flowing in the wind, to the perfect Florida weather, Lifelong captured every moment and detail in between.
“This organic, and gorgeous wedding reconfirmed our love for shooting beach inspired weddings in our beautiful state of Florida!” the Lifelong team gushed.
“Lifelong did a great job as our photographer!” Hilary added.
The bride focused on all aspects of her big day, including a light romantic bridal makeup look that complemented the waterfront setting.
“There wasn’t anything that was unimportant, as it was our wedding day!” Hilary shared. “I was happy with my wedding beauty look created by LDM Beauty Group (formerly known as Lindsay Does Makeup).
Looking back, the bride’s most cherished memories included a fun surprise by her brother.
We were visiting the Cedar Lakes Woods & Gardens in Williston, FL. We were in a gazebo when Pat proposed. Another visitor just happened to be walking by and snapped a picture to capture the moment.
We wrote our own vows, it was outside by the water, the food was fabulous and there was lots of dancing under the stars.
Someone who took the time to get to know us and what we liked.
Lifelong Photography Studio did such a great job as our photographer!
|
# Copyright 2018 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from f5.bigip.tm.sys.hardware import Hardware
class TestHardware(object):
def test_load_refresh(self, mgmt_root):
h1 = mgmt_root.tm.sys.hardware.load()
assert isinstance(h1, Hardware)
assert hasattr(h1, 'entries')
assert h1.kind == 'tm:sys:hardware:hardwarestats'
assert 'https://localhost/mgmt/tm/sys/hardware/platform' in h1.entries.keys()
h2 = mgmt_root.tm.sys.hardware.load()
assert isinstance(h2, Hardware)
assert hasattr(h2, 'entries')
assert h2.kind == 'tm:sys:hardware:hardwarestats'
assert 'https://localhost/mgmt/tm/sys/hardware/platform' in h2.entries.keys()
h1.refresh()
assert h1.kind == h2.kind
assert h1.entries.keys() == h2.entries.keys()
|
The largest firefighting force ever assembled in the country -- an army of at least 17,000 people recruited from coast to coast -- is battling stubborn wildfires that have scorched at least 1.2 million acres in 14 Western states and Canada. The firefighters -- including New Englanders, Tennesseans, and Eskimos and Indians from Alaska -- have been assembled in the shortest period of time ever, says Scott Brayton, a spokesman for the Interagency Fire Center in Boise, Idaho.
Fire officials say they are pinning their hopes for controlling the fires on man and machines, not on the weather, which remains hot and dry across most of the West.
The enormity of the fire problem is ``unprecedented this early in the season at this intensity,'' says Bill Bishop, another spokesman for the nation's firefighting nerve center.
But fire officials battling a 13,800-acre fire near Los Gatos, about 60 miles south of San Francisco, said they were glad to see a heavy dew on the ground Thursday morning. Scattered showers Wednesday afternoon brought cheers from firefighters, but weather experts say it will take more than a sprinkle to reduce the fire threat.
Firefighter Tony Acosta says crews have sacrificed brush to save buildings -- although at least 11 homes have been lost in the Los Gatos fire.
Mr. Acosta spoke as he was taking his first bath since Sunday, using water from a broken sprinkler. He says he began working the fire Sunday at 6:30 p.m., got a four-hour break from 3 a.m. to 7 a.m. Monday, and has been working more or less steadily since.
|
# This program computes the final value of an invested
# amount using the compound interest formula:
# amount = principle * (1 + rate / num) ** (num * term)
# Variables:
# amount ........... Final value of investment
# principle ........ Amount invested
# rate ............. Rate of interest as a decimal
# num .............. Number of times per year compounded
# term ............. Investment term in years
# percentagerate ... Rate of interest as a percentage
import locale
locale.setlocale( locale.LC_ALL, '' )
# welcome message
print ("Welcome to the Investing Program \n")
# Assign values of input data
principle = 4500
percentagerate = .096
term = 2
num = 4
# Compute final value of investment
rate = percentagerate * 100
amount = principle * (1 + percentagerate / num) ** (num * term)
# Display results
print ("Amount of money invested ....", principle, "dollars")
print ("Rate of interest ............", rate, "percent")
print ("Frequency of compounding ....", num, "times per year")
print ("Period of investment ........", term, "years")
print ()
print ("Final value of investment ...", locale.currency(amount), "dollars")
input("\nPress enter to exit")
|
The Derby Basket Weave Commode Chair is for those people who want the commode to blend naturally in as a piece of the furniture.
It is a traditional wooden frame commode with woven fibre upholstery in white and gold. The high chair sides provide support when rising.
It has a concealed toilet seat and pan, which is covered by a padded removable seat cushion.
Seat Height 18.5", seat width 17.25"
|
#!/usr/bin/env python3
# Download Census data files for the 2000 Decadal Summary File 1:
# https://www2.census.gov/census_2000/datasets/Summary_File_1/STATENAME
# If you look for documentation, you'll see pointers to the 730-page
# PDF sf1.pdf. Don't bother: it's completely wrong and must be for
# some earlier dataset.
# Instead, the documentation is in the files inside:
# http://www.census.gov/support/2000/SF1/SF1SAS.zip
# inside which, SF101.Sas describes the fields in st00001.uf1
# where st is the state abbreviation.
import os, sys
import re
import argparse
import zipfile
from collections import OrderedDict
# While testing:
from pprint import pprint
# A dictionary: { fileno: dic } where fileno is an int from 1 to 39 or 'geo'
# and dic is another dictionary of 'censuscode': "long description"
# where censuscode is a 7-char string like P000001 or H016H018.
CensusCodes = {}
# Fields in the sf1geo file
GeoFields = {}
def codesFromZipFile(zipfilename):
zf = zipfile.ZipFile(zipfilename, 'r')
pat = re.compile(b" *([A-Z][0-9]{3}[0-9A-Z]{3,4})=' *(.*)'")
for name in zf.namelist():
if not name.lower().endswith('.sas'):
continue
# The sf1geo file is special, so parse it separately
if name == 'sf1geo.sas':
parse_geo_sas_lines(zf.read(name).split(b'\n'))
continue
filematch = re.match('sf([0-9]{3}).sas', name.lower())
if not filematch:
# print(name, "doesn't match filematch pattern")
continue
code_dict = OrderedDict()
fileno = int(filematch.group(1))
# basename = os.path.basename(name)
# root, ext = os.path.splitext(basename)
# Every file stars with these five, which don't have p-numbers
code_dict['FILEID'] = 'File Identification'
code_dict['STUSAB'] = 'State/U.S.-Abbreviation (USPS)'
code_dict['CHARITER'] = 'Characteristic Iteration'
code_dict['CIFSN'] = 'Characteristic Iteration File Sequence Number'
code_dict['LOGRECNO'] = 'Logical Record Number'
saslines = zf.read(name).split(b'\n')
for line in saslines:
m = re.match(pat, line)
if m:
pcode, desc = [ s.decode() for s in m.groups() ]
# print("%7s -- %s" % (code, desc))
code_dict[pcode] = desc
# else:
# print("No match on line:", line)
CensusCodes[fileno] = code_dict
def parse_geo_sas_lines(lines):
"""lines are read from the sf1geo.sas file.
Create a dictionary of fields:
{ 'CODE': { 'name':'long name', 'start': int, 'end': int }
{ 'name', 'code', 'start', 'end' }
"""
labelpat = re.compile(b"(LABEL )?([A-Z0-9]*)\=\'(.*)\'")
fieldspat = re.compile(b"([A-Z0-9]+) \$ ([0-9]+)\-([0-9]+)")
for line in lines:
line = line.strip()
m = re.match(labelpat, line)
if m:
sys.stdout.flush()
# Assume here that labelpats all come before fieldspats,
# so if we're seeing a labelpat, it doesn't already exist
# inside GeoFields.
code = m.group(2).decode()
GeoFields[code] = { 'name': m.group(3).decode() }
continue
m = re.match(fieldspat, line)
if m:
# If there's a fieldspat for this code, it should have
# had a long description already using a labelpat,
# so the code (group(1)) should already be in GeoFields.
# print("groups:", m.groups())
code = m.group(1).decode()
GeoFields[code]['start'] = int(m.group(2)) - 1
GeoFields[code]['end'] = int(m.group(3))
continue
# pprint(GeoFields)
def file_for_code(code):
for fileno in CensusCodes:
if code in CensusCodes[fileno]:
return fileno
return None
def codes_for_description(desc):
codes = []
desc = desc.lower()
for fileno in CensusCodes:
for pcode in CensusCodes[fileno]:
if desc in CensusCodes[fileno][pcode].lower():
codes.append((pcode, CensusCodes[fileno][pcode]))
return codes
counties = []
def parse_geo_file(filename):
with open(filename) as fp:
for line in fp:
geo = parse_geo_line(line)
c = geo['COUNTY'].strip()
if c:
c = int(c)
if c not in counties:
counties.append(c)
counties.sort()
print("Counties:", counties)
def parse_geo_line(line):
"""Parse the <st>geo.uf1 file according to the GeoFields.
"""
d = {}
for code in GeoFields:
try:
d[code] = line[GeoFields[code]['start']:GeoFields[code]['end']]
except KeyError:
print("Key error, GeoFields[%s] =" % code, GeoFields[code])
break
# print("Line:", line)
# for field in d:
# print(field, ":", d[field], ":", GeoFields[field]['name'])
# print()
# print(d['COUNTY'], d['TRACT'], d['BLKGRP'], d['BLOCK'])
return d
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Parse US Decennial census data")
parser.add_argument('-c', action="store", dest="code",
help='Show filenumber containing 6-digit census code')
parser.add_argument('-d', action="store", dest="desc",
help='Show entries containing a long description')
parser.add_argument('-g', action="store", dest="geo",
help='Parse the <ST>geo.uf1 file')
parser.add_argument('zipfile', help="location of SF1SAS.zip file")
args = parser.parse_args(sys.argv[1:])
# print(args)
# Pass in the path to SF1SAS.zip
codesFromZipFile(args.zipfile)
if args.code:
print("Files with code %s:" % args.code, file_for_code(args.code))
elif args.desc:
codes = codes_for_description(args.desc)
print('Codes containing description "%s":' % args.desc)
for pair in codes:
print("%s: %s" % pair)
elif args.geo:
parse_geo_file(args.geo)
else:
for fileno in CensusCodes:
print("\n==== File", fileno)
for pcode in CensusCodes[fileno]:
print("%7s: %s" % (pcode, CensusCodes[fileno][pcode]))
|
ARE YOU LEVERAGING THE WOMEN EFFECT?
Do you have career clarity? Are you poised to increase your leadership confidence, power and impact?
Looking to increase innovation, competitiveness and revenue?
What is the WOMEN EFFECT?
Multiple studies confirm that women leaders are instrumental in leading companies to greater profits;and now we know why. It’s called the Women Effect. It arises from a distinct gender strength called CORE intelligence combined with female thinking versatility. Now is the time to learn these skills and master the new tools that will change your work life and positively impact your organization.
Will Marré, SMART POWER Academy founder, knows how to enable women’s unique abilities to transform businesses and drive high-value innovation. Will is on a mission to increase women’s influence, strategic contribution and leadership power. He has developed this revolutionary training based on neuro-science & leadership research to bring women more career satisfaction as they power their enterprises with new competitive advantages.
Change your future in a one day workshop you will never forget.
Join like-minded women leaders for a 2 ½ day science-based immersive and collaborative training. The summit format has proven to rapidly accelerate women’s business influence and career satisfaction. Participants are better enabled to direct their careers and more impactfully lead organizations.
©2014-2017 Will Marré | All rights reserved.
|
#!/usr/bin/python
# coding=UTF-8
import sys
import re
import os
import math
# Time formats
# mm:ss with mm > 59, mm > 99
# hh:mm:ss with mm <= 59, ss <= 59
# Arguments format
# 01 52nd Street Theme 00:11 02 A Night in Tunisia 05:03
# 03 All The Things You Are 08:07 04 Embraceable You 15:21
# 00:00:00 01 Serenade Grotesque 00:03:20 02 Menuet Antique
# 00:09:31 03 Pavane Pour Une infante defunte 00:15:55 04 Jeux D'eau
# The song names don't have to be between quotes and can be before or after the timestamps (the regex don't care)
# mp3splt format
# mp3splt foo.mp3 0.30 1.00 2.00 EOF -o @n
# 1.mp3 0:30 - 1:00
# 2.mp3 1:00 - 2:00
# 3.mp3 3:00 - EOF
def toMinSec(time):
splited = re.split(":", time)
if (len(splited) is 2):
# No need to convert
return (splited[0]+"."+splited[1])
elif (len(splited) is 3):
minutes = int(splited[0])*60 + int(splited[1])
minutes = str(minutes)
return(minutes+"."+splited[2])
else:
return None
# TODO if the argument has ' quotes must be closed
# even when the script doesn't use them.
# This happens before the script runs
#inputfile = sys.argv[1]
argv = ' '.join(sys.argv[1:len(sys.argv)])
# Removes name of the program argv[0] and input file and converts it # to srt separated by ' '
# \d+:\d{2} -> mm:ss
# \d+:\d{2}:\d{2} -> hh:mm:ss
time_regex = r'\d+:\d{2}:\d{2}|\d+:\d{2}'
arg_time = re.findall(time_regex, argv)
num_time = len(arg_time)
arg_name = re.split(time_regex, argv)
inputfile = arg_name[0]
del arg_name[0]
# arg_name has some empty strings entries we need to remove
try:
# Only eliminates one '' each time
for i in range(0, len(arg_name)):
arg_name.remove('') # If it doesn't find it throws an error
except ValueError:
pass
num_name = len(arg_name)
# There's always a space at the end of arg_name[0] y the rest have
# spaces both at the end and the beggining
temp = arg_name[0][0:len(arg_name[0])-1]
arg_name[0] = temp
for i in range(1, num_name):
temp = arg_name[i][1:len(arg_name[i])-1]
arg_name[i] = temp
# TODO check that nun_name = num_time + 1
# Initial timestamp may be implicit and num_name = num_time + 2
if (num_name == num_time):
mp3args = inputfile+" "
for i in range(0, num_time):
mp3args += toMinSec(arg_time[i])+" "
mp3args += "EOF -o @n"
else:
sys.exit("The number of names and timestamps doesn't match")
os.system("mp3splt "+mp3args)
pad = math.floor(math.log10(num_name))+1
# The mp3splt name files will be str(i).zfill(pad)
for i in range(1, num_name+1):
print (str(i).zfill(pad)+".mp3")
seqname = str(i).zfill(pad)
filename = '"' +seqname+" - "+arg_name[i-1]+".mp3"+ '"'
os.system("mv "+seqname+".mp3"+" "+filename)
|
Online property management software for landlords in Australia | RealRenta One month FREE usage of RealRenta for the best property manager nightmare story!
We have all heard them and some of us have been unlucky enough to be in them.
Nightmare property managers are everywhere.
Sure they are nice enough to your face and have all the right platitudes when needed, but do they really do what they say they do and more importantly, do they even care?
One of our new landlords found out that a tenant had been running a child care centre from his property for 5 years!
The property manager "forgot” to tell him and the damage to his property is enormous, not to mention all the insurance implications when this type of thing happens.
Send all your nightmarish stories to jason@realrenta.com and we will feature the best one in an upcoming blog.
If for some strange reason you are not yet a RealRenta Landlord, you can trial RealRenta for Free for up to 2 months.
|
from influxdb import client as influxdb
import json
from stompy.stomp import Stomp
import threading
import time
def listener():
# Connect to activemq and subscribe to the stats queue
stomp = Stomp("localhost")
stomp.connect()
stomp.subscribe({'destination':'/queue/stats', 'ack':'client'})
# Connect to influxdb
db = influxdb.InfluxDBClient(database="motech")
while True:
frame = stomp.receive_frame()
print(frame.headers['message-id'])
# Post to influxDB
msg = json.loads(frame.body)
if len(msg["subjects"]):
data = []
for subject in msg["subjects"]:
data.append(
{
"name":"activemq_queue_depth_" + subject["subject"],
"columns":["timestamp", "value"],
"points":[[msg["timestamp"], subject["value"]]],
})
print(data)
try:
db.write_points(data)
except:
db = influxdb.InfluxDBClient(database="motech")
db.write_points(data)
stomp.ack(frame)
# [
# {
# "name": "activemq_queue_depth",
# "columns": ["time", "subject", "value"],
# "points": [
# [1400425947368, "", ]
# ]
# }
# ]
if __name__ == "__main__":
stomp = Stomp("localhost")
stomp.connect()
t = threading.Thread(target=listener)
t.daemon = True
t.start()
while True:
time.sleep(1)
# Send message to activemq
stomp.send({'destination': '/queue/foo.bar',
'body': 'Testing',
'reply-to': '/queue/stats'})
|
You can count on Home Remodel Guys to offer the most effective professional services regarding Remodelers in Clarkdale, AZ. Our crew of highly skilled contractors can provide the support you'll need with the most innovative technology available. We will work with first-rate products and cash saving solutions to ensure that you are given the very best support for the best price. We intend to assist you to put together decisions for your own undertaking, reply to all your questions, and arrange a meeting with our workers when you give us a call at 800-466-6240.
At Home Remodel Guys, we know that you must stay within budget and spend less money whenever it's possible to. You will still need to have professional quality work with Remodelers in Clarkdale, AZ, and you can depend on us to save you money while still giving the finest quality work. Our endeavors to save you money will not sacrifice the high quality of our services. Whenever you deal with our company, you'll get the advantage of our expertise and top quality supplies to ensure that any project can last while saving time and money. This will be feasible because we understand how to save you time and resources on materials and labor. Choose Home Remodel Guys if you want the highest quality support at the cheapest cost. You're able to reach us by dialing 800-466-6240 to start.
You have to be well informed with regards to Remodelers in Clarkdale, AZ. We will not encourage you to come up with imprudent choices, since we know just what we are working at, and we ensure that you understand what to anticipate from the project. You won't need to face any unexpected surprises when you do business with Home Remodel Guys. Begin by contacting 800-466-6240 to talk about your work. Within this phone call, you will get your questions addressed, and we're going to arrange a time to initiate work. We are going to work together with you through the whole process, and our crew is going to arrive promptly and prepared.
If you find yourself planning a project for Remodelers in Clarkdale, AZ, you'll find great reasons to work with Home Remodel Guys. Our company is the best option when you need the most efficient cash saving options, the best equipment, and the greatest rank of client satisfaction. Our company is ready to help you with the greatest working experience and competence available. Call 800-466-6240 to connect with Home Remodel Guys and consider your goals about Remodelers in Clarkdale.
|
from docutils import nodes
from docutils.parsers.rst.directives import unchanged_required, unchanged, flag
import os
import sys
import copy
import fnmatch
import re
import textwrap
import collections
from docutils.parsers import rst
from docutils.statemachine import ViewList
from sphinx.domains.cpp import DefinitionParser
from breathe.finder import FinderFactory, NoMatchesError, MultipleMatchesError
from breathe.parser import DoxygenParserFactory, CacheFactory, ParserError
from breathe.renderer.rst.doxygen import DoxygenToRstRendererFactoryCreatorConstructor, RstContentCreator
from breathe.renderer.rst.doxygen.domain import DomainHandlerFactoryCreator, NullDomainHandler
from breathe.renderer.rst.doxygen.domain import CppDomainHelper, CDomainHelper
from breathe.renderer.rst.doxygen.filter import FilterFactory, GlobFactory
from breathe.renderer.rst.doxygen.target import TargetHandlerFactory
from breathe.finder.doxygen import DoxygenItemFinderFactoryCreator, ItemMatcherFactory
import docutils.nodes
import sphinx.addnodes
import sphinx.ext.mathbase
# Somewhat outrageously, reach in and fix a Sphinx regex
import sphinx.domains.cpp
sphinx.domains.cpp._identifier_re = re.compile(r'(~?\b[a-zA-Z_][a-zA-Z0-9_]*)\b')
class BreatheException(Exception):
pass
class NoMatchingFunctionError(BreatheException):
pass
class UnableToResolveFunctionError(BreatheException):
pass
class NoDefaultProjectError(BreatheException):
pass
class BaseDirective(rst.Directive):
def __init__(
self,
root_data_object,
renderer_factory_creator_constructor,
finder_factory,
matcher_factory,
project_info_factory,
filter_factory,
target_handler_factory,
*args
):
rst.Directive.__init__(self, *args)
self.root_data_object = root_data_object
self.renderer_factory_creator_constructor = renderer_factory_creator_constructor
self.finder_factory = finder_factory
self.matcher_factory = matcher_factory
self.project_info_factory = project_info_factory
self.filter_factory = filter_factory
self.target_handler_factory = target_handler_factory
# Directives
# ----------
class DoxygenIndexDirective(BaseDirective):
required_arguments = 0
optional_arguments = 2
option_spec = {
"path": unchanged_required,
"project": unchanged_required,
"outline": flag,
"no-link": flag,
}
has_content = False
def run(self):
try:
project_info = self.project_info_factory.create_project_info(self.options)
except NoDefaultProjectError as e:
warning = 'doxygenindex: %s' % e
return [docutils.nodes.warning("", docutils.nodes.paragraph("", "", docutils.nodes.Text(warning))),
self.state.document.reporter.warning(warning, line=self.lineno)]
try:
finder = self.finder_factory.create_finder(project_info)
except ParserError as e:
warning = 'doxygenindex: Unable to parse file "%s"' % e
return [docutils.nodes.warning("", docutils.nodes.paragraph("", "", docutils.nodes.Text(warning))),
self.state.document.reporter.warning(warning, line=self.lineno)]
data_object = finder.root()
target_handler = self.target_handler_factory.create(self.options, project_info, self.state.document)
filter_ = self.filter_factory.create_index_filter(self.options)
renderer_factory_creator = self.renderer_factory_creator_constructor.create_factory_creator(
project_info,
self.state.document,
self.options,
)
renderer_factory = renderer_factory_creator.create_factory(
data_object,
self.state,
self.state.document,
filter_,
target_handler,
)
object_renderer = renderer_factory.create_renderer(self.root_data_object, data_object)
node_list = object_renderer.render()
return node_list
class DoxygenFunctionDirective(BaseDirective):
required_arguments = 1
option_spec = {
"path": unchanged_required,
"project": unchanged_required,
"outline": flag,
"no-link": flag,
}
has_content = False
final_argument_whitespace = True
def run(self):
# Separate possible arguments (delimited by a "(") from the namespace::name
match = re.match( r"([^(]*)(.*)", self.arguments[0] )
namespaced_function, args = match.group(1), match.group(2)
# Split the namespace and the function name
try:
(namespace, function_name) = namespaced_function.rsplit( "::", 1 )
except ValueError:
(namespace, function_name) = "", namespaced_function
try:
project_info = self.project_info_factory.create_project_info(self.options)
except NoDefaultProjectError as e:
warning = 'doxygenfunction: %s' % e
return [docutils.nodes.warning("", docutils.nodes.paragraph("", "", docutils.nodes.Text(warning))),
self.state.document.reporter.warning(warning, line=self.lineno)]
finder = self.finder_factory.create_finder(project_info)
# Extract arguments from the function name.
args = self.parse_args(args)
matcher_stack = self.matcher_factory.create_matcher_stack(
{
"compound": self.matcher_factory.create_name_matcher(namespace),
"member": self.matcher_factory.create_name_type_matcher(function_name, "function")
},
"member"
)
results = finder.find(matcher_stack)
try:
data_object = self.resolve_function(results, args)
except NoMatchingFunctionError:
warning = ('doxygenfunction: Cannot find function "%s%s" in doxygen xml output '
'for project "%s" from directory: %s'
% (namespace, function_name, project_info.name(), project_info.path()))
return [docutils.nodes.warning("", docutils.nodes.paragraph("", "", docutils.nodes.Text(warning))),
self.state.document.reporter.warning(warning, line=self.lineno)]
except UnableToResolveFunctionError:
warning = ('doxygenfunction: Unable to resolve multiple matches for function "%s%s" with arguments (%s) in doxygen xml output '
'for project "%s" from directory: %s.'
% (namespace, function_name, ", ".join(args), project_info.name(), project_info.path()))
return [docutils.nodes.warning("", docutils.nodes.paragraph("", "", docutils.nodes.Text(warning))),
self.state.document.reporter.warning(warning, line=self.lineno)]
target_handler = self.target_handler_factory.create(self.options, project_info, self.state.document)
filter_ = self.filter_factory.create_outline_filter(self.options)
renderer_factory_creator = self.renderer_factory_creator_constructor.create_factory_creator(
project_info,
self.state.document,
self.options,
)
renderer_factory = renderer_factory_creator.create_factory(
data_object,
self.state,
self.state.document,
filter_,
target_handler,
)
object_renderer = renderer_factory.create_renderer(self.root_data_object, data_object)
node_list = object_renderer.render()
return node_list
def parse_args(self, function_description):
paren_index = function_description.find('(')
if paren_index == -1:
return []
else:
# Parse the function name string, eg. f(int, float) to
# extract the types so we can use them for matching
args = []
num_open_brackets = -1;
start = paren_index + 1
for i in range(paren_index, len(function_description)):
c = function_description[i]
if c == '(' or c == '<':
num_open_brackets += 1
elif c == ')' or c == '>':
num_open_brackets -= 1
elif c == ',' and num_open_brackets == 0:
args.append(function_description[start:i].strip())
start = i + 1
args.append(function_description[start:-1].strip())
return args
def resolve_function(self, matches, args):
if not matches:
raise NoMatchingFunctionError()
if len(matches) == 1:
return matches[0]
data_object = None
# Tries to match the args array agains the arguments listed in the
# doxygen data
# TODO: We don't have any doxygen xml dom accessing code at this level
# this might benefit from being abstracted away at some point
for entry in matches:
if len(args) == len(entry.param):
equal = True
for i in range(len(args)):
param_type = entry.param[i].type_.content_[0].value
if not isinstance(param_type, unicode) :
param_type = param_type.valueOf_
if args[i] != param_type:
equal = False
break
if equal:
data_object = entry
break
if not data_object:
raise UnableToResolveFunctionError()
return data_object
class DoxygenClassDirective(BaseDirective):
kind = "class"
required_arguments = 1
optional_arguments = 1
option_spec = {
"path": unchanged_required,
"project": unchanged_required,
"members": unchanged,
"sections": unchanged,
"show": unchanged_required,
"outline": flag,
"no-link": flag,
}
has_content = False
def run(self):
name = self.arguments[0]
try:
project_info = self.project_info_factory.create_project_info(self.options)
except NoDefaultProjectError as e:
warning = 'doxygen%s: %s' % (self.kind, e)
return [docutils.nodes.warning("", docutils.nodes.paragraph("", "", docutils.nodes.Text(warning))),
self.state.document.reporter.warning(warning, line=self.lineno)]
finder = self.finder_factory.create_finder(project_info)
matcher_stack = self.matcher_factory.create_matcher_stack(
{
"compound": self.matcher_factory.create_name_type_matcher(name, self.kind)
},
"compound"
)
try:
data_object = finder.find_one(matcher_stack)
except NoMatchesError as e:
warning = ('doxygen%s: Cannot find %s "%s" in doxygen xml output for project "%s" from directory: %s'
% (self.kind, self.kind, name, project_info.name(), project_info.path()))
return [docutils.nodes.warning("", docutils.nodes.paragraph("", "", docutils.nodes.Text(warning))),
self.state.document.reporter.warning(warning, line=self.lineno)]
target_handler = self.target_handler_factory.create(self.options, project_info, self.state.document)
filter_ = self.filter_factory.create_class_filter(self.options)
renderer_factory_creator = self.renderer_factory_creator_constructor.create_factory_creator(
project_info,
self.state.document,
self.options,
)
renderer_factory = renderer_factory_creator.create_factory(
data_object,
self.state,
self.state.document,
filter_,
target_handler,
)
object_renderer = renderer_factory.create_renderer(self.root_data_object, data_object)
node_list = object_renderer.render()
return node_list
class DoxygenFileDirective(BaseDirective):
kind = "file"
required_arguments = 1
optional_arguments = 1
option_spec = {
"path": unchanged_required,
"project": unchanged_required,
"no-link": flag,
}
has_content = False
def run(self):
name = self.arguments[0]
try:
project_info = self.project_info_factory.create_project_info(self.options)
except NoDefaultProjectError as e:
warning = 'doxygenfile: %s' % e
return [docutils.nodes.warning("", docutils.nodes.paragraph("", "", docutils.nodes.Text(warning))),
self.state.document.reporter.warning(warning, line=self.lineno)]
finder = self.finder_factory.create_finder(project_info)
finder_filter = self.filter_factory.create_file_finder_filter(name)
matches = []
finder.filter_(finder_filter, matches)
if len(matches) > 1:
warning = ('doxygenfile: Found multiple matches for file "%s" in doxygen xml output for project "%s" '
'from directory: %s' % (name, project_info.name(), project_info.path()))
return [docutils.nodes.warning("", docutils.nodes.paragraph("", "", docutils.nodes.Text(warning))),
self.state.document.reporter.warning(warning, line=self.lineno)]
elif not matches:
warning = ('doxygenfile: Cannot find file "%s" in doxygen xml output for project "%s" from directory: %s'
% (name, project_info.name(), project_info.path()))
return [docutils.nodes.warning("", docutils.nodes.paragraph("", "", docutils.nodes.Text(warning))),
self.state.document.reporter.warning(warning, line=self.lineno)]
target_handler = self.target_handler_factory.create(self.options, project_info, self.state.document)
filter_ = self.filter_factory.create_file_filter(name, self.options)
renderer_factory_creator = self.renderer_factory_creator_constructor.create_factory_creator(
project_info,
self.state.document,
self.options,
)
node_list = []
for data_object in matches:
renderer_factory = renderer_factory_creator.create_factory(
data_object,
self.state,
self.state.document,
filter_,
target_handler,
)
object_renderer = renderer_factory.create_renderer(self.root_data_object, data_object)
node_list.extend(object_renderer.render())
return node_list
class DoxygenBaseDirective(BaseDirective):
required_arguments = 1
optional_arguments = 1
option_spec = {
"path": unchanged_required,
"project": unchanged_required,
"outline": flag,
"no-link": flag,
}
has_content = False
def run(self):
try:
namespace, name = self.arguments[0].rsplit("::", 1)
except ValueError:
namespace, name = "", self.arguments[0]
try:
project_info = self.project_info_factory.create_project_info(self.options)
except NoDefaultProjectError as e:
warning = 'doxygen%s: %s' % (self.kind, e)
return [docutils.nodes.warning("", docutils.nodes.paragraph("", "", docutils.nodes.Text(warning))),
self.state.document.reporter.warning(warning, line=self.lineno)]
finder = self.finder_factory.create_finder(project_info)
matcher_stack = self.create_matcher_stack(namespace, name)
try:
data_object = finder.find_one(matcher_stack)
except NoMatchesError as e:
display_name = "%s::%s" % (namespace, name) if namespace else name
warning = ('doxygen%s: Cannot find %s "%s" in doxygen xml output for project "%s" from directory: %s'
% (self.kind, self.kind, display_name, project_info.name(), project_info.path()))
return [docutils.nodes.warning("", docutils.nodes.paragraph("", "", docutils.nodes.Text(warning))),
self.state.document.reporter.warning(warning, line=self.lineno)]
target_handler = self.target_handler_factory.create(self.options, project_info, self.state.document)
filter_ = self.filter_factory.create_outline_filter(self.options)
renderer_factory_creator = self.renderer_factory_creator_constructor.create_factory_creator(
project_info,
self.state.document,
self.options,
)
renderer_factory = renderer_factory_creator.create_factory(
data_object,
self.state,
self.state.document,
filter_,
target_handler,
)
object_renderer = renderer_factory.create_renderer(self.root_data_object, data_object)
node_list = object_renderer.render()
return node_list
class DoxygenStructDirective(DoxygenBaseDirective):
kind = "struct"
def create_matcher_stack(self, namespace, name):
# Structs are stored in the xml file with their fully namespaced name
# We're using C++ namespaces here, it might be best to make this file
# type dependent
#
xml_name = "%s::%s" % (namespace, name) if namespace else name
return self.matcher_factory.create_matcher_stack(
{
"compound": self.matcher_factory.create_name_type_matcher(xml_name, self.kind)
},
"compound"
)
# This class was the same as the DoxygenBaseDirective above, except that it
# wraps the output in a definition_list before passing it back. This should be
# abstracted in a far nicely way to avoid repeating so much code
#
# Now we're removed the definition_list wrap so we really need to refactor this!
class DoxygenBaseItemDirective(BaseDirective):
required_arguments = 1
optional_arguments = 1
option_spec = {
"path": unchanged_required,
"project": unchanged_required,
"outline": flag,
"no-link": flag,
}
has_content = False
def run(self):
try:
namespace, name = self.arguments[0].rsplit("::", 1)
except ValueError:
namespace, name = "", self.arguments[0]
try:
project_info = self.project_info_factory.create_project_info(self.options)
except NoDefaultProjectError as e:
warning = 'doxygen%s: %s' % (self.kind, e)
return [docutils.nodes.warning("", docutils.nodes.paragraph("", "", docutils.nodes.Text(warning))),
self.state.document.reporter.warning(warning, line=self.lineno)]
finder = self.finder_factory.create_finder(project_info)
matcher_stack = self.create_matcher_stack(namespace, name)
try:
data_object = finder.find_one(matcher_stack)
except NoMatchesError as e:
display_name = "%s::%s" % (namespace, name) if namespace else name
warning = ('doxygen%s: Cannot find %s "%s" in doxygen xml output for project "%s" from directory: %s'
% (self.kind, self.kind, display_name, project_info.name(), project_info.path()))
return [docutils.nodes.warning("", docutils.nodes.paragraph("", "", docutils.nodes.Text(warning))),
self.state.document.reporter.warning(warning, line=self.lineno)]
target_handler = self.target_handler_factory.create(self.options, project_info, self.state.document)
filter_ = self.filter_factory.create_outline_filter(self.options)
renderer_factory_creator = self.renderer_factory_creator_constructor.create_factory_creator(
project_info,
self.state.document,
self.options,
)
renderer_factory = renderer_factory_creator.create_factory(
data_object,
self.state,
self.state.document,
filter_,
target_handler,
)
object_renderer = renderer_factory.create_renderer(self.root_data_object, data_object)
node_list = object_renderer.render()
return node_list
class DoxygenVariableDirective(DoxygenBaseItemDirective):
kind = "variable"
def create_matcher_stack(self, namespace, name):
return self.matcher_factory.create_matcher_stack(
{
"compound": self.matcher_factory.create_name_matcher(namespace),
"member": self.matcher_factory.create_name_type_matcher(name, self.kind)
},
"member"
)
class DoxygenDefineDirective(DoxygenBaseItemDirective):
kind = "define"
def create_matcher_stack(self, namespace, name):
return self.matcher_factory.create_matcher_stack(
{
"compound": self.matcher_factory.create_name_matcher(namespace),
"member": self.matcher_factory.create_name_type_matcher(name, self.kind)
},
"member"
)
class DoxygenEnumDirective(DoxygenBaseItemDirective):
kind = "enum"
def create_matcher_stack(self, namespace, name):
return self.matcher_factory.create_matcher_stack(
{
"compound": self.matcher_factory.create_name_matcher(namespace),
"member": self.matcher_factory.create_name_type_matcher(name, self.kind)
},
"member"
)
class DoxygenTypedefDirective(DoxygenBaseItemDirective):
kind = "typedef"
def create_matcher_stack(self, namespace, name):
return self.matcher_factory.create_matcher_stack(
{
"compound": self.matcher_factory.create_name_matcher(namespace),
"member": self.matcher_factory.create_name_type_matcher(name, self.kind)
},
"member"
)
# Setup Administration
# --------------------
class DirectiveContainer(object):
def __init__(
self,
directive,
*args
):
self.directive = directive
self.args = args
# Required for sphinx to inspect
self.required_arguments = directive.required_arguments
self.optional_arguments = directive.optional_arguments
self.option_spec = directive.option_spec
self.has_content = directive.has_content
self.final_argument_whitespace = directive.final_argument_whitespace
def __call__(self, *args):
call_args = []
call_args.extend(self.args)
call_args.extend(args)
return self.directive(*call_args)
class ProjectInfo(object):
def __init__(self, name, path, reference, domain_by_extension, domain_by_file_pattern, match):
self._name = name
self._path = path
self._reference = reference
self._domain_by_extension = domain_by_extension
self._domain_by_file_pattern = domain_by_file_pattern
self._match = match
def name(self):
return self._name
def path(self):
return self._path
def reference(self):
return self._reference
def domain_for_file(self, file_):
domain = ""
extension = file_.split(".")[-1]
try:
domain = self._domain_by_extension[extension]
except KeyError:
pass
for pattern, pattern_domain in self._domain_by_file_pattern.items():
if self._match(file_, pattern):
domain = pattern_domain
return domain
class ProjectInfoFactory(object):
def __init__(self, match):
self.match = match
self.projects = {}
self.default_project = None
self.domain_by_extension = {}
self.domain_by_file_pattern = {}
self.project_count = 0
self.project_info_store = {}
def update(
self,
projects,
default_project,
domain_by_extension,
domain_by_file_pattern,
):
self.projects = projects
self.default_project = default_project
self.domain_by_extension = domain_by_extension
self.domain_by_file_pattern = domain_by_file_pattern
def default_path(self):
if not self.default_project:
raise NoDefaultProjectError(
"No breathe_default_project config setting to fall back on "
"for directive with no 'project' or 'path' specified."
)
return self.projects[self.default_project]
def create_project_info(self, options):
name = ""
if "project" in options:
try:
path = self.projects[options["project"]]
name = options["project"]
except KeyError as e:
sys.stderr.write(
"Unable to find project '%s' in breathe_projects dictionary" % options["project"]
)
elif "path" in options:
path = options["path"]
else:
path = self.default_path()
try:
return self.project_info_store[path]
except KeyError:
reference = name
if not name:
name = "project%s" % self.project_count
reference = path
self.project_count += 1
project_info = ProjectInfo(
name,
path,
reference,
self.domain_by_extension,
self.domain_by_file_pattern,
self.match
)
self.project_info_store[path] = project_info
return project_info
class DoxygenDirectiveFactory(object):
directives = {
"doxygenindex": DoxygenIndexDirective,
"doxygenfunction": DoxygenFunctionDirective,
"doxygenstruct": DoxygenStructDirective,
"doxygenclass": DoxygenClassDirective,
"doxygenvariable": DoxygenVariableDirective,
"doxygendefine": DoxygenDefineDirective,
"doxygenenum": DoxygenEnumDirective,
"doxygentypedef": DoxygenTypedefDirective,
"doxygenfile": DoxygenFileDirective,
}
def __init__(
self,
root_data_object,
renderer_factory_creator_constructor,
finder_factory,
matcher_factory,
project_info_factory,
filter_factory,
target_handler_factory
):
self.root_data_object = root_data_object
self.renderer_factory_creator_constructor = renderer_factory_creator_constructor
self.finder_factory = finder_factory
self.matcher_factory = matcher_factory
self.project_info_factory = project_info_factory
self.filter_factory = filter_factory
self.target_handler_factory = target_handler_factory
def create_index_directive_container(self):
return self.create_directive_container("doxygenindex")
def create_function_directive_container(self):
return self.create_directive_container("doxygenfunction")
def create_struct_directive_container(self):
return self.create_directive_container("doxygenstruct")
def create_enum_directive_container(self):
return self.create_directive_container("doxygenenum")
def create_typedef_directive_container(self):
return self.create_directive_container("doxygentypedef")
def create_class_directive_container(self):
return self.create_directive_container("doxygenclass")
def create_file_directive_container(self):
return self.create_directive_container("doxygenfile")
def create_variable_directive_container(self):
return self.create_directive_container("doxygenvariable")
def create_define_directive_container(self):
return self.create_directive_container("doxygendefine")
def create_directive_container(self, type_):
return DirectiveContainer(
self.directives[type_],
self.root_data_object,
self.renderer_factory_creator_constructor,
self.finder_factory,
self.matcher_factory,
self.project_info_factory,
self.filter_factory,
self.target_handler_factory
)
def get_config_values(self, app):
# All DirectiveContainers maintain references to this project info factory
# so we can update this to update them
self.project_info_factory.update(
app.config.breathe_projects,
app.config.breathe_default_project,
app.config.breathe_domain_by_extension,
app.config.breathe_domain_by_file_pattern,
)
class NodeFactory(object):
def __init__(self, *args):
self.sources = args
def __getattr__(self, node_name):
for source in self.sources:
try:
return getattr(source, node_name)
except AttributeError:
pass
raise NodeNotFoundError(node_name)
class RootDataObject(object):
node_type = "root"
class PathHandler(object):
def __init__(self, sep, basename, join):
self.sep = sep
self.basename = basename
self.join = join
def includes_directory(self, file_path):
return bool( file_path.count( self.sep ) )
class MTimer(object):
def __init__(self, getmtime):
self.getmtime = getmtime
def get_mtime(self, filename):
return self.getmtime(filename)
class FileStateCache(object):
"""
Stores the modified time of the various doxygen xml files against the
reStructuredText file that they are referenced from so that we know which
reStructuredText files to rebuild if the doxygen xml is modified.
We store the information in the environment object so that it is pickled
down and stored between builds as Sphinx is designed to do.
"""
def __init__(self, mtimer, app):
self.app = app
self.mtimer = mtimer
def update(self, source_file):
if not hasattr( self.app.env, "breathe_file_state" ):
self.app.env.breathe_file_state = {}
new_mtime = self.mtimer.get_mtime(source_file)
mtime, docnames = self.app.env.breathe_file_state.setdefault(source_file, (new_mtime, set()))
docnames.add(self.app.env.docname)
self.app.env.breathe_file_state[source_file] = (new_mtime, docnames)
def get_outdated(self, app, env, added, changed, removed):
if not hasattr( self.app.env, "breathe_file_state" ):
return []
stale = []
for filename, info in self.app.env.breathe_file_state.items():
old_mtime, docnames = info
if self.mtimer.get_mtime(filename) > old_mtime:
stale.extend(docnames)
return list(set(stale).difference(removed))
def purge_doc(self, app, env, docname):
if not hasattr( self.app.env, "breathe_file_state" ):
return
toremove = []
for filename, info in self.app.env.breathe_file_state.items():
_, docnames = info
docnames.discard(docname)
if not docnames:
toremove.append(filename)
for filename in toremove:
del self.app.env.breathe_file_state[filename]
# Setup
# -----
def setup(app):
cache_factory = CacheFactory()
cache = cache_factory.create_cache()
path_handler = PathHandler(os.sep, os.path.basename, os.path.join)
mtimer = MTimer(os.path.getmtime)
file_state_cache = FileStateCache(mtimer, app)
parser_factory = DoxygenParserFactory(cache, path_handler, file_state_cache)
matcher_factory = ItemMatcherFactory()
item_finder_factory_creator = DoxygenItemFinderFactoryCreator(parser_factory, matcher_factory)
index_parser = parser_factory.create_index_parser()
finder_factory = FinderFactory(index_parser, item_finder_factory_creator)
# Create a math_nodes object with a displaymath member for the displaymath
# node so that we can treat it in the same way as the nodes & addnodes
# modules in the NodeFactory
math_nodes = collections.namedtuple("MathNodes", ["displaymath"])
math_nodes.displaymath = sphinx.ext.mathbase.displaymath
node_factory = NodeFactory(docutils.nodes, sphinx.addnodes, math_nodes)
cpp_domain_helper = CppDomainHelper(DefinitionParser, re.sub)
c_domain_helper = CDomainHelper()
domain_helpers = {"c": c_domain_helper, "cpp": cpp_domain_helper}
domain_handler_factory_creator = DomainHandlerFactoryCreator(node_factory, domain_helpers)
rst_content_creator = RstContentCreator(ViewList, textwrap.dedent)
default_domain_handler = NullDomainHandler()
renderer_factory_creator_constructor = DoxygenToRstRendererFactoryCreatorConstructor(
node_factory,
parser_factory,
default_domain_handler,
domain_handler_factory_creator,
rst_content_creator
)
project_info_factory = ProjectInfoFactory(fnmatch.fnmatch)
glob_factory = GlobFactory(fnmatch.fnmatch)
filter_factory = FilterFactory(glob_factory, path_handler)
target_handler_factory = TargetHandlerFactory(node_factory)
root_data_object = RootDataObject()
directive_factory = DoxygenDirectiveFactory(
root_data_object,
renderer_factory_creator_constructor,
finder_factory,
matcher_factory,
project_info_factory,
filter_factory,
target_handler_factory
)
app.add_directive(
"doxygenindex",
directive_factory.create_index_directive_container(),
)
app.add_directive(
"doxygenfunction",
directive_factory.create_function_directive_container(),
)
app.add_directive(
"doxygenstruct",
directive_factory.create_struct_directive_container(),
)
app.add_directive(
"doxygenenum",
directive_factory.create_enum_directive_container(),
)
app.add_directive(
"doxygentypedef",
directive_factory.create_typedef_directive_container(),
)
app.add_directive(
"doxygenclass",
directive_factory.create_class_directive_container(),
)
app.add_directive(
"doxygenfile",
directive_factory.create_file_directive_container(),
)
app.add_directive(
"doxygenvariable",
directive_factory.create_variable_directive_container(),
)
app.add_directive(
"doxygendefine",
directive_factory.create_define_directive_container(),
)
app.add_config_value("breathe_projects", {}, True)
app.add_config_value("breathe_default_project", "", True)
app.add_config_value("breathe_domain_by_extension", {}, True)
app.add_config_value("breathe_domain_by_file_pattern", {}, True)
app.add_stylesheet("breathe.css")
app.connect("builder-inited", directive_factory.get_config_values)
app.connect("env-get-outdated", file_state_cache.get_outdated)
app.connect("env-purge-doc", file_state_cache.purge_doc)
|
SymTensorIterator is a FieldIterator that loops over the elements of a symmetric 3×3 tensor in Voigt order. As with all OOF2 FieldIterators, SymTensorIterator is often used when wrapped in an IteratorP. All of the available member functions are described in the IteratorP wrapper or the SymTensorIndex documentation.
|
from hamcrest import *
from models import Monkey as M
from test_models import create_monkeys
def test_view_monkey_list(client, session):
monkey_ginger, monkey_john, monkey_melissa = create_monkeys(session)
monkey_john.add_friend(monkey_melissa)
session.commit()
request = client.get('/')
assert_that(request.status_code, equal_to(200))
for monkey in (monkey_ginger, monkey_john, monkey_melissa):
assert_that(request.data, contains_string(monkey.name))
assert_that(request.data, contains_string(str(monkey.friends_count)))
request = client.get('/?page={0}'.format(100), follow_redirects=True)
assert_that(request.status_code, equal_to(200))
for monkey in (monkey_ginger, monkey_john, monkey_melissa):
assert_that(request.data, contains_string(monkey.name))
assert_that(request.data, contains_string(str(monkey.friends_count)))
def test_view_monkey(client, session):
monkey_ginger, monkey_john, monkey_melissa = create_monkeys(session)
monkey_ginger.set_best_friend(monkey_melissa)
session.commit()
request = client.get('/monkey/{0}'.format(monkey_ginger.id))
assert_that(
request.data,
contains_string('ginger@hotmail.icann')
)
assert_that(
request.data,
contains_string('Melissa')
)
def test_add_monkey(client, session):
request = client.get('/monkey/add')
assert_that(
request.data,
contains_string('Add monkey')
)
data = dict(name='John', age=2, email='john.doe@gmail.tt')
request = client.post('/monkey/add', data=data, follow_redirects=True)
assert_that(request.status_code, equal_to(200))
monkey = M.query.filter(M.email == 'john.doe@gmail.tt').one()
assert_that(monkey.name, equal_to('John'))
assert_that(monkey.email, equal_to('john.doe@gmail.tt'))
assert_that(monkey.age, equal_to(2))
data = dict(name='John', age='not_an_age', email='john.doe@gmail.tt')
request = client.post('/monkey/add', data=data, follow_redirects=True)
assert_that(request.status_code, equal_to(200))
assert_that(request.data, contains_string('Not a valid integer value'))
def test_edit_monkey(client, session):
data = dict(name='Melissa', age=19, email='granny@yahoo.club')
monkey = M(**data)
session.add(monkey)
session.commit()
request = client.get('/monkey/{0}/edit'.format(monkey.id))
assert_that(
request.data,
contains_string('Edit monkey')
)
assert_that(
request.data,
contains_string('granny@yahoo.club')
)
data['age'] = 20
request = client.post(
'/monkey/{0}/edit'.format(monkey.id),
data=data, follow_redirects=True
)
assert_that(request.status_code, equal_to(200))
assert_that(request.data, contains_string('Melissa'))
assert_that(request.data, contains_string('granny@yahoo.club'))
assert_that(request.data, contains_string('20'))
data['email'] = 123
request = client.post(
'/monkey/{0}/edit'.format(monkey.id),
data=data, follow_redirects=True
)
assert_that(request.status_code, equal_to(200))
assert_that(request.data, contains_string('Invalid email address'))
def test_delete_monkey(client, session):
monkey = M(name='John', age=2, email='john.doe@gmail.tt')
session.add(monkey)
session.commit()
request = client.get('/monkey/{0}/delete'.format(monkey.id))
assert_that(
request.data,
contains_string('Monkey to be deleted:')
)
assert_that(
request.data,
contains_string('john.doe@gmail.tt')
)
def test_delete_monkey_confirm(client, session):
monkey = M(name='Melissa', age=19, email='granny@yahoo.club')
session.add(monkey)
session.commit()
request = client.post(
'/monkey/{0}/delete'.format(monkey.id), follow_redirects=True
)
assert_that(request.status_code, equal_to(200))
assert_that(
request.data,
contains_string('{0} was succesfully deleted.'.format(monkey.name))
)
request = client.get(
'/monkey/{0}/delete/confirm'.format(-1), follow_redirects=True
)
assert_that(request.status_code, equal_to(404))
def test_view_friend_list(client, session):
monkey_ginger, monkey_john, monkey_melissa = create_monkeys(session)
monkey_john.add_friend(monkey_melissa)
session.commit()
request = client.get('/friend/{0}'.format(monkey_john.id))
assert_that(request.status_code, equal_to(200))
assert_that(request.data, contains_string(monkey_melissa.name))
assert_that(request.data, contains_string(str(monkey_melissa.age)))
assert_that(request.data, contains_string(monkey_melissa.email))
request = client.get(
'/friend/{0}?page={1}'.format(monkey_john.id, 100),
follow_redirects=True
)
assert_that(request.status_code, equal_to(200))
assert_that(request.data, contains_string(monkey_melissa.name))
assert_that(request.data, contains_string(str(monkey_melissa.age)))
assert_that(request.data, contains_string(monkey_melissa.email))
def test_view_add_friend(client, session):
monkey_ginger, monkey_john, monkey_melissa = create_monkeys(session)
request = client.get('/friend/{0}/add'.format(monkey_melissa.id))
assert_that(request.status_code, equal_to(200))
for monkey in (monkey_ginger, monkey_john):
assert_that(request.data, contains_string(monkey.name))
assert_that(request.data, contains_string(str(monkey.age)))
assert_that(request.data, contains_string(monkey.email))
request = client.get(
'/friend/{0}/add?page={0}'.format(monkey_melissa.id, 100),
follow_redirects=True
)
assert_that(request.status_code, equal_to(200))
for monkey in (monkey_ginger, monkey_john):
assert_that(request.data, contains_string(monkey.name))
assert_that(request.data, contains_string(str(monkey.age)))
assert_that(request.data, contains_string(monkey.email))
def test_add_friend(client, session):
monkey_ginger, monkey_john, monkey_melissa = create_monkeys(session)
request = client.post(
'/friend/{0}/add/{1}'.format(monkey_melissa.id, monkey_john.id),
follow_redirects=True
)
assert_that(request.status_code, equal_to(200))
assert_that(request.data, contains_string(
'{0} added to monkey {1} friends.'
.format(monkey_john.name, monkey_melissa.name)
))
def test_delete_friend(client, session):
monkey_ginger, monkey_john, monkey_melissa = create_monkeys(session)
monkey_ginger.add_friend(monkey_melissa)
session.commit()
request = client.post(
'/friend/{0}/delete/{1}'.format(monkey_ginger.id, monkey_melissa.id),
follow_redirects=True
)
assert_that(request.status_code, equal_to(200))
assert_that(request.data, contains_string(
'{0} deleted from monkey {1} friends.'
.format(monkey_melissa.name, monkey_ginger.name)
))
def test_set_best_friend(client, session):
monkey_ginger, monkey_john, monkey_melissa = create_monkeys(session)
request = client.post(
'/best_friend/{0}/set/{1}'.format(monkey_melissa.id, monkey_john.id),
follow_redirects=True
)
assert_that(request.status_code, equal_to(200))
assert_that(request.data, contains_string(
'Best friend {0} set for monkey {1}.'
.format(monkey_john.name, monkey_melissa.name)
))
def test_unset_best_friend(client, session):
monkey_ginger, monkey_john, monkey_melissa = create_monkeys(session)
monkey_melissa.set_best_friend(monkey_john)
session.commit()
request = client.post(
'/best_friend/{0}/unset/{1}'.format(monkey_melissa.id, monkey_john.id),
follow_redirects=True
)
assert_that(request.status_code, equal_to(200))
assert_that(request.data, contains_string(
'Best friend {0} unset for monkey {1}.'
.format(monkey_john.name, monkey_melissa.name)
))
|
Hamza Kashgari visited me several times before he wrote the ill-fated tweets that led to his arrest in February and then to solitary confinement in a Riyadh prison. We discussed social, political and philosophical issues, including some that are taboo in Saudi Arabia. I warned him that his thoughts, if expressed publicly, would lead religious hard-liners to call for his blood.
Every week, I am host to several dozen people at my home, most of them politically engaged Saudi youth. I started the salon after government and religious authorities clamped down on gatherings of liberal youth in cafes and bookstores in the wake of Hamza’s arrest, severely constricting the space for free expression in this city. The oppressive trend has accelerated as religious hard-liners have mounted a vicious campaign to cleanse society of what they deem “unbelief” and “deviant thought”: in reality, any ideology different from their own.
Rapt in admiration, I thought about how only 10 years ago I was expected to blindly obey the dictates of an Islamist organization — and how, then, I never would have dared to engage in a debate with its disciples. Those of us born in the 1970s, when extremist religious thought was at its apogee in Saudi Arabia, had a single choice if we wished to serve our communities: Join an Islamist organization.
Much has changed in Saudi society in the past decade. For a brief time, Saudi human rights activists had hoped that religious conservatives could agree with us on a general framework of human rights, including the demand for a constitutional monarchy, the release of prisoners of conscience, the fight against official corruption and civil rights for all. Many thousands of activists from across the political spectrum signed petitions for reform, most notably the 2011 statement “A State of Rights and Institutions.” Unfortunately, just because some people signed petitions does not mean that they genuinely believed in a system of human rights.
The Kashgari affair separated the religious hard-liners — those who demanded the death penalty for his alleged crime of blasphemy or apostasy — from genuine human rights activists. The religious conservatives have declared war, not simply on freedom of expression but also on freedom of belief. The hard-liners believe that they will lose their hold on the Saudi street, were the youth to embrace ideas opposed to religion. In essence, they wish to institute Orwellian practices in Saudi Arabia, by criminalizing mere thought.
Making use of social-media platforms such as Twitter and Facebook, religious hard-liners have launched an online inquisition against those who dare to think freely. In a frightening development, a judge and some clerics demanded in February that I be given the death penalty for allowing guests at my salon to speak freely. For the time being, I remain free.
But many young Saudis insist on freedom of expression and belief, and they are proud of their values of justice, tolerance and human rights. They give me hope. Our resolve is unshakable, whatever difficulties lie ahead.
And the road ahead is indeed difficult. Last month the public prosecutor’s office in Jiddah informed me that I was banned from traveling outside the country for “security reasons.” The ban came two days before I was scheduled to go to the United States to participate in a fellowship program sponsored by the State Department. A few days earlier, my wife, Samar Badawi, had returned from the United States as a proud recipient of the 2012 International Women of Courage award, bestowed upon her by Secretary of State Hillary Clinton and first lady Michelle Obama. She suspects that her award angered Saudi authorities and led to the ban on my travel.
I am unable to leave this country, but the sun of humanity shines upon me every day. I bask in its rays, gaining strength against the darkness of oppression. My voice and the voices of others like me shall reach the world, no matter how hard they try to silence us. We shall say, consistently and proudly: steadfastness.
|
#!/usr/bin/python
#
# Copyright (C) 2006 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""DocsService extends the GDataService to streamline Google Documents
operations.
DocsService: Provides methods to query feeds and manipulate items.
Extends GDataService.
DocumentQuery: Queries a Google Document list feed.
"""
__author__ = 'api.jfisher (Jeff Fisher)'
import urllib
import atom
import gdata.service
import gdata.docs
# XML Namespaces used in Google Documents entities.
DATA_KIND_SCHEME = 'http://schemas.google.com/g/2005#kind'
DOCUMENT_KIND_TERM = 'http://schemas.google.com/docs/2007#document'
SPREADSHEET_KIND_TERM = 'http://schemas.google.com/docs/2007#spreadsheet'
PRESENTATION_KIND_TERM = 'http://schemas.google.com/docs/2007#presentation'
# File extensions of documents that are permitted to be uploaded.
SUPPORTED_FILETYPES = {
'CSV': 'text/csv',
'TSV': 'text/tab-separated-values',
'TAB': 'text/tab-separated-values',
'DOC': 'application/msword',
'ODS': 'application/x-vnd.oasis.opendocument.spreadsheet',
'ODT': 'application/vnd.oasis.opendocument.text',
'RTF': 'application/rtf',
'SXW': 'application/vnd.sun.xml.writer',
'TXT': 'text/plain',
'XLS': 'application/vnd.ms-excel',
'PPT': 'application/vnd.ms-powerpoint',
'PPS': 'application/vnd.ms-powerpoint',
'HTM': 'text/html',
'HTML' : 'text/html'}
class DocsService(gdata.service.GDataService):
"""Client extension for the Google Documents service Document List feed."""
def __init__(self, email=None, password=None, source=None,
server='docs.google.com', additional_headers=None):
"""Constructor for the DocsService.
Args:
email: string (optional) The e-mail address of the account to use for
authentication.
password: string (optional) The password of the account to use for
authentication.
source: string (optional) The name of the user's application.
server: string (optional) The server the feed is hosted on.
additional_headers: dict (optional) Any additional HTTP headers to be
transmitted to the service in the form of key-value
pairs.
Yields:
A DocsService object used to communicate with the Google Documents
service.
"""
gdata.service.GDataService.__init__(self, email=email, password=password,
service='writely', source=source,
server=server,
additional_headers=additional_headers)
def Query(self, uri, converter=gdata.docs.DocumentListFeedFromString):
"""Queries the Document List feed and returns the resulting feed of
entries.
Args:
uri: string The full URI to be queried. This can contain query
parameters, a hostname, or simply the relative path to a Document
List feed. The DocumentQuery object is useful when constructing
query parameters.
converter: func (optional) A function which will be executed on the
retrieved item, generally to render it into a Python object.
By default the DocumentListFeedFromString function is used to
return a DocumentListFeed object. This is because most feed
queries will result in a feed and not a single entry.
"""
return self.Get(uri, converter=converter)
def QueryDocumentListFeed(self, uri):
"""Retrieves a DocumentListFeed by retrieving a URI based off the Document
List feed, including any query parameters. A DocumentQuery object can
be used to construct these parameters.
Args:
uri: string The URI of the feed being retrieved possibly with query
parameters.
Returns:
A DocumentListFeed object representing the feed returned by the server.
"""
return self.Get(uri, converter=gdata.docs.DocumentListFeedFromString)
def GetDocumentListEntry(self, uri):
"""Retrieves a particular DocumentListEntry by its unique URI.
Args:
uri: string The unique URI of an entry in a Document List feed.
Returns:
A DocumentListEntry object representing the retrieved entry.
"""
return self.Get(uri, converter=gdata.docs.DocumentListEntryFromString)
def GetDocumentListFeed(self):
"""Retrieves a feed containing all of a user's documents."""
q = gdata.docs.service.DocumentQuery();
return self.QueryDocumentListFeed(q.ToUri())
def UploadPresentation(self, media_source, title):
"""Uploads a presentation inside of a MediaSource object to the Document
List feed with the given title.
Args:
media_source: MediaSource The MediaSource object containing a
presentation file to be uploaded.
title: string The title of the presentation on the server after being
uploaded.
Returns:
A GDataEntry containing information about the presentation created on the
Google Documents service.
"""
category = atom.Category(scheme=DATA_KIND_SCHEME,
term=PRESENTATION_KIND_TERM)
return self._UploadFile(media_source, title, category)
def UploadSpreadsheet(self, media_source, title):
"""Uploads a spreadsheet inside of a MediaSource object to the Document
List feed with the given title.
Args:
media_source: MediaSource The MediaSource object containing a spreadsheet
file to be uploaded.
title: string The title of the spreadsheet on the server after being
uploaded.
Returns:
A GDataEntry containing information about the spreadsheet created on the
Google Documents service.
"""
category = atom.Category(scheme=DATA_KIND_SCHEME,
term=SPREADSHEET_KIND_TERM)
return self._UploadFile(media_source, title, category)
def UploadDocument(self, media_source, title):
"""Uploads a document inside of a MediaSource object to the Document List
feed with the given title.
Args:
media_source: MediaSource The gdata.MediaSource object containing a
document file to be uploaded.
title: string The title of the document on the server after being
uploaded.
Returns:
A GDataEntry containing information about the document created on the
Google Documents service.
"""
category = atom.Category(scheme=DATA_KIND_SCHEME,
term=DOCUMENT_KIND_TERM)
return self._UploadFile(media_source, title, category)
def _UploadFile(self, media_source, title, category):
"""Uploads a file to the Document List feed.
Args:
media_source: A gdata.MediaSource object containing the file to be
uploaded.
title: string The title of the document on the server after being
uploaded.
category: An atom.Category object specifying the appropriate document
type
Returns:
A GDataEntry containing information about the document created on
the Google Documents service.
"""
media_entry = gdata.GDataEntry()
media_entry.title = atom.Title(text=title)
media_entry.category.append(category)
media_entry = self.Post(media_entry, '/feeds/documents/private/full',
media_source = media_source,
extra_headers = {'Slug' : media_source.file_name })
return media_entry
class DocumentQuery(gdata.service.Query):
"""Object used to construct a URI to query the Google Document List feed"""
def __init__(self, feed='/feeds/documents', visibility='private',
projection='full', text_query=None, params=None,
categories=None):
"""Constructor for Document List Query
Args:
feed: string (optional) The path for the feed. (e.g. '/feeds/documents')
visibility: string (optional) The visibility chosen for the current feed.
projection: string (optional) The projection chosen for the current feed.
text_query: string (optional) The contents of the q query parameter. This
string is URL escaped upon conversion to a URI.
params: dict (optional) Parameter value string pairs which become URL
params when translated to a URI. These parameters are added to
the query's items.
categories: list (optional) List of category strings which should be
included as query categories. See gdata.service.Query for
additional documentation.
Yields:
A DocumentQuery object used to construct a URI based on the Document
List feed.
"""
self.visibility = visibility
self.projection = projection
gdata.service.Query.__init__(self, feed, text_query, params, categories)
def ToUri(self):
"""Generates a URI from the query parameters set in the object.
Returns:
A string containing the URI used to retrieve entries from the Document
List feed.
"""
old_feed = self.feed
self.feed = '/'.join([old_feed, self.visibility, self.projection])
new_feed = gdata.service.Query.ToUri(self)
self.feed = old_feed
return new_feed
def AddNamedFolder(self, email, folder_name):
"""Adds a named folder category, qualified by a schema.
This function lets you query for documents that are contained inside a
named folder without fear of collision with other categories.
Args:
email: string The email of the user who owns the folder.
folder_name: string The name of the folder.
Returns:
The string of the category that was added to the object.
"""
category = '{http://schemas.google.com/docs/2007/folders/'
category += email + '}' + folder_name
self.categories.append(category)
return category
def RemoveNamedFolder(self, email, folder_name):
"""Removes a named folder category, qualified by a schema.
Args:
email: string The email of the user who owns the folder.
folder_name: string The name of the folder.
Returns:
The string of the category that was removed to the object.
"""
category = '{http://schemas.google.com/docs/2007/folders/'
category += email + '}' + folder_name
self.categories.remove(category)
return category
|
An unprecedented cinematic journey ten years in the making and spanning the whole Marvel Cinematic Universe, Marvel Studios’ Avengers: Infinity War brings to the screen the ultimate, deadliest showdown of all time. Here an enigmatic band of warriors bear swords of no human metal a tribe of fierce wildlings carry guys off into madness a cruel young dragon prince barters his sister to win back his throne a youngster is lost in the twilight in between life and death and a determined woman undertakes a treacherous journey to shield all she holds dear.
As brought to life in the bestselling Summoner series, the magic of summoning is also an art, with a story of its own. The Summoner’s Handbook reveals the story of James Baker – the epic journal that inspired the series hero, Fletcher, to find out his personal summoning abilities. Starring Shailene Woodley (Fault in Our Stars, Divergent films) and Sam Claflin (Me Before You, The Hunger Games films), ADRIFT is primarily based on the inspiring accurate story of two sailors who set out to journey across the ocean from Tahiti to San Diego.
This is a war, she says, “with so a lot of casualties that we must call it by its accurate name, this war with so quite a few dead by police, by violent ex-husbands and partners and lovers, by people pursuing energy and profit at the point of a gun or just shooting 1st and figuring out who they hit later.” To get to the root of these American crises, she contends that “to acknowledge this state of war is to admit the need for peace,” countering the despair of our age with a dose of solidarity, creativity, and hope.
When you search on Google, we use your location to support show the most relevant search outcomes. Bernard Cornwell’s epic story of the producing of England continues in this eleventh installment in the bestselling Saxon Tales series—”like Game of Thrones, but actual” (The Observer)—the basis of the hit Netflix tv series The Last Kingdom. American Horror Story is an anthological miniseries that tracks a various terrifying tale of Americana each and every season.
By way of a series of daring escapades deep within a dark and harmful criminal underworld, Han Solo befriends his mighty future copilot Chewbacca and meets the notorious gambler Lando Calrissian, in a journey that will set the course of one particular of the Star Wars saga’s most unlikely heroes. Google automatically detects your computer’s place utilizing its IP address , Place History (if it really is turned on), and current places you’ve searched for.
Staying married to him is the fight of my life. Caught in between her expanding feelings for the rebellious Yvan Guriel and the seductive power supplied by Lukas, Elloren will have to uncover a way to keep true to what she knows is right and safeguard everybody she loves…even if that signifies protecting them from herself. A dark psychological thriller with heart-pounding suspense, Just before HE LONGS is book #ten in a riveting new series—with a beloved new character—that will leave you turning pages late into the evening.
Board the Millennium Falcon and journey to a galaxy far, far away in Solo: A Star Wars Story, an epic action adventure with the most beloved scoundrel in the galaxy. This gripping, stranger-than-fiction espionage thriller brings to life the incredible accurate story of Moe Berg, the specialist baseball player who became a World War II spy. In this new chapter of the Saxon Tales series—a rousing adventure of courage, treachery, duty, devotion, majesty, enjoy and battle, as noticed by means of the eyes of a warrior straddling two worlds—Uhtred returns to fight once again for the destiny of England.
Lularoe Consultant Prices – The Summoner’s Handbook reveals the story of James Baker – the epic journal that inspired the series hero, Fletcher, to learn his own summoning abilities.
|
"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# prepare for Python 3
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import subprocess
import sys
import os
import re
import benchexec.result as result
import benchexec.util as util
import benchexec.tools.template
from benchexec.model import SOFTTIMELIMIT
class Tool(benchexec.tools.template.BaseTool):
"""
Tool info for CPAchecker.
It has additional features such as building CPAchecker before running it
if executed within a source checkout.
It also supports extracting data from the statistics output of CPAchecker
for adding it to the result tables.
"""
REQUIRED_PATHS = [
"lib/java/runtime",
"lib/*.jar",
"lib/native/x86_64-linux",
"scripts",
"cpachecker.jar",
"config",
]
def executable(self):
executable = util.find_executable('cpa.sh', 'scripts/cpa.sh')
executableDir = os.path.join(os.path.dirname(executable), os.path.pardir)
if os.path.isdir(os.path.join(executableDir, 'src')):
self._buildCPAchecker(executableDir)
if not os.path.isfile(os.path.join(executableDir, "cpachecker.jar")):
logging.warning("Required JAR file for CPAchecker not found in {0}.".format(executableDir))
return executable
def program_files(self, executable):
installDir = os.path.join(os.path.dirname(executable), os.path.pardir)
return util.flatten(util.expand_filename_pattern(path, installDir) for path in self.REQUIRED_PATHS)
def _buildCPAchecker(self, executableDir):
logging.debug('Building CPAchecker in directory {0}.'.format(executableDir))
ant = subprocess.Popen(['ant', '-lib', 'lib/java/build', '-q', 'jar'], cwd=executableDir, shell=util.is_windows())
ant.communicate()
if ant.returncode:
sys.exit('Failed to build CPAchecker, please fix the build first.')
def version(self, executable):
stdout = self._version_from_tool(executable, '-help')
line = next(l for l in stdout.splitlines() if l.startswith('CPAchecker'))
line = line.replace('CPAchecker' , '')
line = line.split('(')[0]
return line.strip()
def name(self):
return 'CPAchecker'
def cmdline(self, executable, options, tasks, propertyfile=None, rlimits={}):
if SOFTTIMELIMIT in rlimits:
if "-timelimit" in options:
logging.warning('Time limit already specified in command-line options, not adding time limit from benchmark definition to the command line.')
else:
options = options + ["-timelimit", str(rlimits[SOFTTIMELIMIT]) + "s"] # benchmark-xml uses seconds as unit
# if data.MEMLIMIT in rlimits:
# if "-heap" not in options:
# heapsize = rlimits[MEMLIMIT]*0.8 # 20% overhead for non-java-memory
# options = options + ["-heap", str(int(heapsize))]
if ("-stats" not in options):
options = options + ["-stats"]
spec = ["-spec", propertyfile] if propertyfile is not None else []
return [executable] + options + spec + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
"""
@param returncode: code returned by CPAchecker
@param returnsignal: signal, which terminated CPAchecker
@param output: the output of CPAchecker
@return: status of CPAchecker after executing a run
"""
def isOutOfNativeMemory(line):
return ('std::bad_alloc' in line # C++ out of memory exception (MathSAT)
or 'Cannot allocate memory' in line
or 'Native memory allocation (malloc) failed to allocate' in line # JNI
or line.startswith('out of memory') # CuDD
)
status = None
for line in output:
if 'java.lang.OutOfMemoryError' in line:
status = 'OUT OF JAVA MEMORY'
elif isOutOfNativeMemory(line):
status = 'OUT OF NATIVE MEMORY'
elif 'There is insufficient memory for the Java Runtime Environment to continue.' in line \
or 'cannot allocate memory for thread-local data: ABORT' in line:
status = 'OUT OF MEMORY'
elif 'SIGSEGV' in line:
status = 'SEGMENTATION FAULT'
elif (returncode == 0 or returncode == 1) and 'java.lang.AssertionError' in line:
status = 'ASSERTION'
elif ((returncode == 0 or returncode == 1)
and ('Exception:' in line or line.startswith('Exception in thread'))
and not line.startswith('cbmc')): # ignore "cbmc error output: ... Minisat::OutOfMemoryException"
status = 'EXCEPTION'
elif 'Could not reserve enough space for object heap' in line:
status = 'JAVA HEAP ERROR'
elif line.startswith('Error: ') and not status:
status = result.RESULT_ERROR
if 'Unsupported' in line:
if 'recursion' in line:
status += ' (recursion)'
elif 'threads' in line:
status += ' (threads)'
elif 'Parsing failed' in line:
status += ' (parsing failed)'
elif line.startswith('For your information: CPAchecker is currently hanging at') and not status and isTimeout:
status = 'TIMEOUT'
elif line.startswith('Verification result: '):
line = line[21:].strip()
if line.startswith('TRUE'):
newStatus = result.RESULT_TRUE_PROP
elif line.startswith('FALSE'):
newStatus = result.RESULT_FALSE_REACH
match = re.match('.* Property violation \(([^:]*)(:.*)?\) found by chosen configuration.*', line)
if match and match.group(1) in ['valid-deref', 'valid-free', 'valid-memtrack', 'no-overflow']:
newStatus = result.STR_FALSE + '(' + match.group(1) + ')'
else:
newStatus = result.RESULT_UNKNOWN
if not status:
status = newStatus
elif newStatus != result.RESULT_UNKNOWN:
status = "{0} ({1})".format(status, newStatus)
if not status:
status = result.RESULT_ERROR
return status
def get_value_from_output(self, lines, identifier):
# search for the text in output and get its value,
# stop after the first line, that contains the searched text
for line in lines:
if identifier in line:
startPosition = line.find(':') + 1
endPosition = line.find('(', startPosition) # bracket maybe not found -> (-1)
if (endPosition == -1):
return line[startPosition:].strip()
else:
return line[startPosition: endPosition].strip()
return None
|
Kalanchoe tomentosa ‘Chocolate Soldier’ – Crazy Plants For Crazy Critters!
The Kalanchoes are native to Madagascar, frequently growing on granite and gneiss outcrops.
These species belongs to the Crassulaceae, or Crassula family, which includes such familiar genera as Sedum, Sempervivum, Crassula, and many others.
More than 100 varieties of Kalanchoe grow in the wilds of Africa and other parts of the Old World.
The heavily felted leaves and stems are an adaptation to drought.
They are especially effective in reducing water loss through evaporation in exposed and windy sites. In other plant species, heavily felted leaves are often an adaptation to higher elevations.
The felt may help to protect these plants from excessive exposure to ultraviolet light. In habitat, this species occurs at elevations between 3900 and 5200 feet.
‘Chocolate Soldier’ is an interesting and attractive small slow-growing succulent subshrub that grows to about 2 feet tall.
Along the upper leaf margin and the tip is a raised slightly toothed rim that is reddish on new leaves and matures to a rusty brown color, giving this plant a very unusual two-toned appearance.
In summer it can appear, clustered atop 18-inch tall stalks, the yellow-green flowers with dark brown petal tips, flowering is not that common.
Plant this succulent with a well-draining soil in full sun to part shade and water only occasionally to very little.
Most important position the Kalanchoe tomentosa indoor panda plant in medium to bright light. As with most succulents, soil should be allowed to dry between waterings. In fact, watering is a limited part of panda plant care.
When you do water, do so completely while giving the plant the occasional drink. You’ll find humidity is not an issue when learning how to grow a panda plant successfully.
The average room provides enough humidity for this easy-care, furry plant. The indoor panda plant can live for many years in these conditions. Move it outside during spring and summer, if desired, but provide protection from hot afternoon sun.
While this species will tolerate brief periods of temperatures into the lower 40’s and even into the upper 30’s, sustained cold and long periods of cold and wet conditions can kill this plant, and a frost will kill it outright.
Temperatures from the mid 60’s to the mid 50’s would probably be best for the winter dormancy of this species.
This plant should have good air circulation, especially during hot and muggy conditions where mold and mildew can become established in the felted leaves.
In fact, when you’re growing Kalanchoe panda plants, you’ll likely find more areas in the home that would benefit from one of these plants.
Propagation of the indoor panda plant is easy and an inexpensive way to get more of the plants. Root leaves of the plant in spring or summer in a sandy potting soil or a perlite mixture.
Next, new roots develop and the plant will grow new leaves, at which time it should be transferred into a new container.
Blooms are rare when growing Kalanchoe panda plants indoors. Therefore, if you wish to grow a Kalanchoe with regular indoor blossoms, look to the cultivar Kalanchoe blossfeldiana hybrids.
Over the years, a number of select cultivars have been introduced, these varying mostly in details of the shape, size, and density of the foliage, or the overall size of the plant.
“Chocolate Soldier” is the most popular of all, with broader regions of color on the leaf tips and margins. The plant is found in colors that range from a radiant rusty brown on new growth, and gradually maturing to a more chocolate-brown coloration.
Some online sources also indicate that “Chocolate Soldier” is a more reliable plant with a faster growth rate than others in the species.
Although this plant can flower within its natural habitat – it’s rare to see flowers bloom indoors, so it’s grown for primarily it’s foliage within homes or offices. If your lucky enough then you will see lovely small tubular shaped flowers bloom at the tips of the branches.
The flowers of Kalanchoe tomentosa , while small and not especially showy, are interesting, in that the outer petals are also densely covered with felt. Plants which are grown exclusively as a houseplant will seldom produce flowers.
Flower production requires warm temperatures and very bright light. To increase the likelihood of flower production, this plant should be moved to outdoor in spring and summer to benefit from increased temperatures and exposure to full sun.
Use a lower nitrogen “bloom booster” for best bloom result. Both Peters and Miracle Grow has a great product line.
The lower leaves are shed as the plant grows; while this is normal, in time, this can result in a plant with long stems topped with rosettes of leaves.
Pruning will encourage branching and will result in plants with a fuller appearance. The base of this succulent becomes woodier as this plant matures.
While the majority of other members of the Crassula family have flower parts in multiples of 5, the flowers of Kalanchoes have parts in multiples of 4, with 4 sepals, 4 petals, 8 stamens, and 4 free (not united) carpels.
Old-hand gardeners know for best success indoor cactus and succulent plants require a certain amount of neglect.
|
# python imports
import datetime
import urllib
import sys
# django settings
from django.conf import settings
from django.contrib.auth import SESSION_KEY
from django.contrib.auth import BACKEND_SESSION_KEY
from django.contrib.auth import load_backend
from django.contrib.auth.models import AnonymousUser
from django.core.cache import cache
from django.core.urlresolvers import reverse
from django.http import Http404
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from django.utils import simplejson
from django.utils.functional import Promise
from django.utils.encoding import force_unicode
from django.utils import translation
# lfc imports
import lfc.models
class HttpJsonResponse(HttpResponse):
def __init__(self, content, mimetype=None, status=None, content_type=None, **kwargs):
if mimetype is None:
mimetype = "application/json"
content = render_to_json(content, **kwargs)
HttpResponse.__init__(self, content=content,
mimetype=mimetype, status=status, content_type=content_type)
# TODO: Checkout Django's new message feature
class MessageHttpResponseRedirect(HttpResponseRedirect):
"""Specific HttpResponseRedirect to set a cookie with a message.
"""
def __init__(self, redirect_to, message):
HttpResponseRedirect.__init__(self, redirect_to)
# We just keep the message two seconds.
max_age = 2
expires = datetime.datetime.strftime(
datetime.datetime.utcnow() +
datetime.timedelta(seconds=max_age), "%a, %d-%b-%Y %H:%M:%S GMT")
self.set_cookie("message", lfc_quote(message), max_age=max_age, expires=expires)
def set_message_to_reponse(response, msg):
"""Sets message cookie with passed message to passed response.
"""
# We just keep the message two seconds.
max_age = 2
expires = datetime.datetime.strftime(
datetime.datetime.utcnow() +
datetime.timedelta(seconds=max_age), "%a, %d-%b-%Y %H:%M:%S GMT")
response.set_cookie("message", lfc_quote(msg), max_age=max_age, expires=expires)
return response
def render_to_json(html, **kwargs):
"""Renders given data to jsnon
"""
data = { "html" : html }
data.update(**kwargs)
return simplejson.dumps(data, cls = LazyEncoder)
def return_as_json(html, message):
"""
"""
return HttpResponse(get_json(html, message))
def get_json(html, message):
"""Returns html and message json encoded.
"""
return simplejson.dumps({ "html" : html, "message" : message, }, cls = LazyEncoder)
class LazyEncoder(simplejson.JSONEncoder):
"""JSONEncoder which encodes django's lazy i18n strings.
This is mainly used to return status messages along with content to ajax
calls.
"""
def default(self, obj):
if isinstance(obj, Promise):
return force_unicode(obj)
return obj
def get_content_object(request=None, *args, **kwargs):
"""Returns specific content object based on passed parameters.
This method should be used if one wants the specific content object
instead of the BaseContent object.
You can consider this as the equivalent to Django's get method.
"""
obj = lfc.models.BaseContent.objects.get(*args, **kwargs)
return obj.get_content_object()
def get_content_objects(request=None, *args, **kwargs):
"""Returns specific content objects based on passed parameters.
This method should be used if one wants the specific content object
instead of the BaseContent object.
Takes permissions of the current and start_date and end_date of object
into account.
You can consider this as the equivalent to Django's filter method.
"""
objs = lfc.models.BaseContent.objects.filter(*args, **kwargs)
parent = kwargs.get("parent")
if parent and parent.order_by:
objs = objs.order_by(parent.order_by)
result = []
if request is None or request.user.is_superuser:
for obj in objs:
obj = obj.get_content_object()
if lfc.utils.registration.get_info(obj):
result.append(obj)
else:
for obj in objs:
obj = obj.get_content_object()
if lfc.utils.registration.get_info(obj) and \
obj.has_permission(request.user, "view") and \
obj.is_active(request.user):
obj = obj.get_content_object()
result.append(obj)
return result
def get_portal(pk=1):
"""Returns the default portal.
"""
# CACHE
cache_key = "%s-portal-%s" % (settings.CACHE_MIDDLEWARE_KEY_PREFIX, pk)
portal = cache.get(cache_key)
if portal:
return portal
# At the moment the default portal should always exist.
try:
portal = lfc.models.Portal.objects.get(pk=pk)
except lfc.models.Portal.DoesNotExist:
portal = lfc.models.Portal.objects.filter()[0]
cache.set(cache_key, portal)
return portal
def get_user_from_session_key(session_key):
"""Returns the user from the passes session_key.
This is a workaround for SWFUpload, which is used to mass upload images
and files.
"""
try:
session_engine = __import__(settings.SESSION_ENGINE, {}, {}, [''])
session_wrapper = session_engine.SessionStore(session_key)
user_id = session_wrapper.get(SESSION_KEY)
auth_backend = load_backend(session_wrapper.get(BACKEND_SESSION_KEY))
if user_id and auth_backend:
return auth_backend.get_user(user_id)
else:
return AnonymousUser()
except AttributeError:
return AnonymousUser()
def login_form(next=None):
"""Returns the lfc login form.
"""
if next:
url = "%s?next=%s" % (reverse("lfc_login"), next)
else:
url = reverse("lfc_login")
return HttpResponseRedirect(url)
def traverse_object(request, path):
"""Returns the the object with the given path.
"""
language = translation.get_language()
# CACHE
cache_key = "%s-traverse-obj-%s-%s-%s" % (settings.CACHE_MIDDLEWARE_KEY_PREFIX,
path, request.user.id, language)
obj = cache.get(cache_key)
if obj:
return obj
paths = path.split("/")
language = translation.get_language()
try:
obj = lfc.utils.get_content_object(request, slug=paths[0],
parent=None, language__in = ("0", language))
except lfc.models.BaseContent.DoesNotExist:
raise Http404
for path in paths[1:]:
try:
obj = obj.children.get(slug=path, language__in = ("0", obj.language)).get_content_object()
except lfc.models.BaseContent.DoesNotExist:
raise Http404
cache.set(cache_key, obj)
return obj
def clear_cache():
"""Clears the complete cache.
"""
# memcached
try:
cache._cache.flush_all()
except AttributeError:
pass
else:
return
try:
cache._cache.clear()
except AttributeError:
pass
try:
cache._expire_info.clear()
except AttributeError:
pass
def import_module(module):
"""Imports module with given dotted name.
"""
try:
module = sys.modules[module]
except KeyError:
__import__(module)
module = sys.modules[module]
return module
def getLOL(objects, objects_per_row=3):
"""Returns a list of list of given objects.
"""
result = []
row = []
for i, object in enumerate(objects):
row.append(object)
if (i+1) % objects_per_row == 0:
result.append(row)
row = []
if len(row) > 0:
result.append(row)
return result
def lfc_quote(string, encoding="utf-8"):
"""Encodes string to encoding before quoting.
"""
return urllib.quote(string.encode(encoding))
# TODO: Not used at the moment - what to do?
def get_related_pages_by_tags(page, num=None):
"""Returns a dict with related products by tags.
This is just a thin wrapper for the get_related method of the
TaggedItem manager of the tagging product in order to provide caching.
From the tagging product's doc string (mutatis mutantis):
Returns a list of products which share tags with the product with passed id
ordered by the number of shared tags in descending order.
See there for more.
"""
# CACHE
cache_key = "%s-related-page-by-tags-%s" % \
(settings.CACHE_MIDDLEWARE_KEY_PREFIX, page.id)
related_pages = cache.get(cache_key)
if related_pages is not None:
return {"related_pages" : related_pages}
# Create related pages
related_pages = TaggedItem.objects.get_related(page, Page, num)
# Save related pages to cache
cache.set(cache_key, related_pages)
return {"related_pages" : related_pages}
|
Mad Dog and I have returned from our annual kid-free cruise. We were blessed with good weather, great company and outstanding service and food. It felt good to get away, but I missed my boys. We were away from them for four nights. When we picked them up from school, it was clear that they were happy to see us. I even got a genuine hug from Full Speed. If any of you have a third grader like him that willingly gives you a public hug on school grounds, you know how special it is.
My sister-in-law and brother-in-law had volunteered to watch them for us while we were away. It worked out well because they live right down the street and my nephew goes to the boys’ school. When I picked up their bags and asked how it went, I was a little taken aback at what I heard. Apparently, they didn’t really seem to miss me, they didn’t hardly speak my name and they had a great time while we were gone. Can you imagine? How is this possible? You mean their life doesn’t stop if I’m not in it?
Well, I’m going to do the only rational thing I can do for next year’s cruise. I’m going to have to take them with me.
1. Directions in unfamiliar territory seem to be a hot-button issue in my marriage. I don’t know what it is about not knowing where you are at that brings out the worst in me and my beloved. Regardless of a few tense moments, we always managed to reach our destination. For the most part, we also managed to still love each other even if we didn’t always like each other in the process.
2. Spending time with my nieces and nephew was fantastic. If I had it my way, I would have all my nieces and nephews live on my street and have them over as much as possible.
3. No matter where you put my boys, they are in a constant competition with life and each other. They had an hour-long debate over who came in first in a go-cart race. If I thought Mad Dog and I were heated over directions, it was nothing compared to the constant chatter of winner vs. loser between our boys. And, unfortunately, they never ever agreed to disagree. Made for some long rides back to our cabin.
4. So, their competitive streak has its downside, but surprisingly, it also has an immense upside. We were at Dixie Stampede which is Dolly Parton’s dinner attraction. The boys were selected by our server to be in a chicken chasing competition. They strategized from the moment they were selected. Full Speed instructed T.Puzzle where and how to run. T.Puzzle was overwhelmed at first by the enormity of the arena and the crowd. He stood quaking at the side of the gate right before they were sent in. Once inside the big, gaping, dust-covered space, he looked at Full Speed and they both got that look of determination on their faces. When the chickens were released, theirs didn’t stand a chance. Full Speed ran so intensely after it he fell down nearly crushing the chicken. They worked as a team and ran with such fire, the other two boys on the opposing side looked as if they were moving in slow-motion. Even after they successfully crossed the finish line, T.Puzzle kept chasing his chicken. He almost ran it all the way back to where the rest of show animals were being housed.
The boys show off their victory medals with pride. Thank goodness they were on the same team!
I may get frustrated by the intensity of my boys, but I have to say, seeing them give their all in everything they do, even chasing a chicken, keeps me inspired to always do my best.
When you are a stay-at-home parent it is hard to put into words what it feels like to be away from your kids for four whole nights. Our annual cruise without kids is something I look forward to all year-long. Having to only worry about myself is liberating. Being able to walk into art-filled stores and not worry about someone breaking something is a revelation. Sitting at a leisurely meal enjoying a glass of wine (or several) and not having to cajole table manners out of my boys is amazing. It’s nice to be at a table of adults who realize knives are for butter or meat, not for stabbing your brother in the face. This time away helps me remember that I am more than somebody’s Mom. It reminds me that I was a whole and interesting person before kids and it’s comforting to know I can be that again.
Our kids are not ours to hold onto forever. The second they are given to us is the same instant we must begin to let them go. It’s up to us to keep hold of ourselves and be who we are throughout this process. Time away from them is a great way to do this. It’s also great because when you return, you realize you were missed. You realize that even boys who seem to need so little except their determination to make it in the world, do in fact still need their Mom. Even T.Puzzle, my recently turned affection-resistant kid, easily gave up a hug or two upon my return. Those hugs were some of the best hugs I’ve had in a long time.
Mad Dog surprised us with a little family getaway to Amelia Island. We packed a lot into our small trip and had an absolute blast.
As the Mom of two, gregarious boys, I realize that serene moments in travel will be few and far between. The best line of defense is a good offense. Therefore, it is all about movement. Keep them active and all will be well. If they are confined for too long they invariably turn on each other. That’s when you know it’s time to move again.
That said, we thankfully managed some calm here and there. However, there was a lot of sprinting on the hotel grounds. As we left our room yesterday the boys, in typical fashion, sprinted ahead of us. As we turned the corner to the elevators, they were nowhere to be found.
Guess who decided it would be awesome to ride the elevator to the lobby by themselves? Thankfully, a kindly couple waited and watched over the boys until Mad Dog and I caught up to them. The boys were so full of pride. At least someone was because I certainly wasn’t. Embarrassed much?
After a stern talking to and a serious warning, we hope that parent-free elevator rides won’t become the norm. Of course at some point I’m going to have to let them go by themselves. It might be awkward if I follow them around their college dorm elevators.
|
import ctypes
import numpy
import OpenGL.GL as GL
class MenuItem(object):
pass
class TextButtonItem(MenuItem):
def __init__(self, x, y, h, text):
self.x, self.y, self.h = (x, y, h)
self.text = text
self.selected = False
def draw(self):
pass
class MenuScreen(object):
def __init__(self, app, background=None, items=None):
self._background = background
if items is None:
self._items = []
else:
self._items = items
# Load the background image info.
if self._background:
# Load the shader program
self._bg_shader = app.resources.load_shader_program("ortho.vs",
"texture.fs")
# Set up the texture
self._bg_tex = app.resources.load_texture(self._background)
self._bg_texunit_uniform = self._bg_shader.uniform('texUnit')
# Set up geometry
self._bg_vao = GL.glGenVertexArrays(1)
GL.glBindVertexArray(self._bg_vao)
self._bg_vbo = GL.glGenBuffers(1)
GL.glBindBuffer(GL.GL_ARRAY_BUFFER, self._bg_vbo)
verts = numpy.array(
# X Y U V
[-1, -1, 0, 0,
-1, 1, 0, 1,
1, -1, 1, 0,
1, 1, 1, 1],
dtype=numpy.float32)
GL.glBufferData(GL.GL_ARRAY_BUFFER, verts.nbytes, verts,
GL.GL_STATIC_DRAW)
GL.glEnableVertexAttribArray(0)
GL.glEnableVertexAttribArray(1)
GL.glVertexAttribPointer(0, 2, GL.GL_FLOAT, GL.GL_FALSE, 16, None)
GL.glVertexAttribPointer(1, 2, GL.GL_FLOAT, GL.GL_FALSE, 16,
ctypes.c_void_p(8))
GL.glBindVertexArray(0)
def __del__(self):
# TODO: release VAO etc?
pass
def draw(self):
if self._background:
self._bg_shader.use()
self._bg_tex.bind()
GL.glUniform1i(self._bg_texunit_uniform, 0)
GL.glBindVertexArray(self._bg_vao)
GL.glDrawArrays(GL.GL_TRIANGLE_STRIP, 0, 4)
GL.glBindVertexArray(0)
GL.glUseProgram(0)
class Menu(object):
"""Menu class.
Manages a number of menu screens and coordinates moving between them.
"""
def __init__(self, start_screen):
"""Initialize a menu."""
self._menu_stack = []
self.reset(start_screen)
def draw(self):
"""Draw the menu."""
self._menu_stack[-1].draw()
def reset(self, screen):
"""Reset the menu.
This discards the current menu stack and starts again at the given
screen.
"""
self._menu_stack = [screen]
def navigate_forward(self, screen):
"""Move to a new screen.
The current screen is kept on the stack so we can go back to it.
"""
self._menu_stack.append(screen)
def navigate_back(self):
"""Move to the previous screen."""
self._menu_stack.pop()
|
Digging these lines on creativity and the art of writing from an absolute treasure of a book by Bradbury. ‘Quantitative experience’ he writes, leads to quality over time. The analogies are identifiable.
Elsewhere, Bradbury adds ‘let the world burn through you’. If nothing else, the energy in his writing makes one write.
Sharing tonight for @dragonfirecrossbow and for one’s own sake.
|
"""
Converts a model to and from json string.
"""
import google.appengine.ext.ndb as ndb
import json
import logging
from anoteweb.util.time_util import from_epoch, to_epoch
from datetime import datetime
def _json_to_model(model_class, json_obj):
"""json to model string."""
_result = {}
url_safe_key = None
for k, value in json_obj.iteritems():
if k == 'key':
url_safe_key = value
continue
prop = model_class._properties.get(k)
if prop is None:
print dir(model_class)
logging.fatal('can not decode %s, Property is not defined on %s.%s.', k,
model_class.__module__, model_class.__name__)
if isinstance(prop, ndb.model.ComputedProperty):
continue
if prop._repeated:
value = [_get_value_for_json_to_model(prop, val) for val in value]
else:
value = _get_value_for_json_to_model(prop, value)
_result[k] = value
print 'result=', repr(_result)
m = model_class(**_result)
if url_safe_key:
m.key = ndb.Key(urlsafe=url_safe_key)
return m
def _get_value_for_json_to_model(prop, v):
"""json to model."""
logging.info('_get_value_for_json_to_model: %s, vaue: %s',
repr(prop), repr(v))
if isinstance(prop, (ndb.DateTimeProperty, ndb.DateProperty,
ndb.TimeProperty)):
return from_epoch(v)
if isinstance(prop, ndb.KeyProperty):
return ndb.Key(urlsafe=v)
if isinstance(prop, (ndb.StructuredProperty, ndb.LocalStructuredProperty)):
return _json_to_model(prop._modelclass, v)
if isinstance(prop, (ndb.IntegerProperty, ndb.StringProperty,
ndb.TextProperty)):
return v
logging.fatal('unsupported property type: %s', prop)
def _remove_null_value_from_map(value):
if isinstance(value, ndb.Model):
kv_map = value.to_dict()
kv_map['key'] = value.key.urlsafe()
kv_map['key_id'] = value.key.id()
return _remove_null_value_from_map(kv_map)
if isinstance(value, list):
return [_remove_null_value_from_map(i) for i in value]
elif isinstance(value, datetime):
return to_epoch(value)
elif isinstance(value, str) or isinstance(value, int) or isinstance(
value, unicode):
return value
elif isinstance(value, dict):
result = {}
for k, v in value.iteritems():
logging.info('current key: %s', k)
if isinstance(v, (list, dict)) and not v:
continue
if v is None:
continue
result[k] = _remove_null_value_from_map(v)
return result
else:
logging.fatal('unknown type: %s %s', type(value), repr(value))
def json2model(model_class, json_str):
return _json_to_model(model_class, json.loads(json_str))
def model2json(model):
if isinstance(model, list):
logging.info('model is list %s', model)
non_empty_map = [_remove_null_value_from_map(m) for m in model]
return json.dumps(non_empty_map, ensure_ascii=False, sort_keys=True)
else:
non_empty_map = _remove_null_value_from_map(model)
# Keep it sorted to make test easilier.
return json.dumps(non_empty_map, ensure_ascii=False, sort_keys=True)
|
We've enjoyed the summer garden, and especially harvesting. I thought my daughter might also enjoy planting things her own way. I set up the invitation on the porch, but she didn't notice it for a couple of days...such is the life of a curious 3-year old!
She decided to add water to a cooler and played with the water for a bit.
so she put them in the bucket.
Eventually she wanted to talk about what we did to grow our garden. We talked about the getting the soil ready, making room for and planting the seeds, covering the seeds, and adding water.
She took out some dirt and added the seeds.
She added water to the soil and then poured the soil over the seeds.
She didn't want the seeds to "wash away."
bucket and we rinsed it all off.
Within 10 days we had cilantro sprouts!
Children love to be involved in everyday activities like cooking, rinsing dishes, laundry, and gardening; it offers an opportunity to connect, grow and learn together. And when we present them with an invitation that deepens their experience, it also deepens their knowledge and understanding of the world around them. And to be honest, I also hope this will somehow make these daily life tasks seem less like drudgery and more like moments of nostalgia when my daughter is an adult ...a mama can hope!
|
#!/usr/bin/python
from setuptools import setup, Extension
from cgutils.version import VERSION
mod_linux = Extension('linux', sources=['cgutils/linux.c'])
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU General Public License v2 (GPLv2)',
'Operating System :: POSIX :: Linux',
'Programming Language :: C',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3 :: Only',
'Topic :: System :: Operating System Kernels :: Linux',
'Topic :: System :: Systems Administration',
'Topic :: Utilities',
]
long_description = open('README').read() + '\n' + open('Changelog').read()
setup(name='cgroup-utils',
version=VERSION,
description='Utility tools for control groups of Linux',
long_description=long_description,
scripts=['bin/cgutil'],
packages=['cgutils', 'cgutils.commands'],
ext_package='cgutils',
ext_modules=[mod_linux],
author='peo3',
author_email='peo314159265@gmail.com',
url='https://github.com/peo3/cgroup-utils',
license='GPLv2',
classifiers=classifiers,
install_requires=['argparse'],
tests_require=['nose', 'pep8'],
test_suite='nose.collector',
extras_require=dict(
test=[
'nose',
'pep8',
]
),)
|
Downtown Los Angeles: Is it's revitalization redefining the way people look at Los Angeles?
I think they can stay open as late as they want, but they can't legally sell you alcohol past 2.
I am past partying till 2 these days anyways, but even in my prime partying days 3-5 years ago I rarely made it past 2 AM anyways.
I've said this before elsewhere but back then, most of the times I wanted to keep drinking past 2 AM were times I should have stopped anyways!
It would be cool if they would push the last call time back a little bit, but I don't really care that much. Obviously a lot of people do.
It would be interesting to try it out on a trial basis .
I think another idea that could work would be to only allow it in certain areas .
Downtown LA and Hollywood would make the most sense since they are near transit - subways etc .
The bars could sponsor shuttle buses to the " late night district " that could bring people there from the westside or valley for example .
DTLA feels the premiere area in the city today.
Of course The Westside & Hollywood can be argued.
|
# -*- coding: utf-8 -*-
u"""simulation data operations
:copyright: Copyright (c) 2019 RadiaSoft LLC. All Rights Reserved.
:license: http://www.apache.org/licenses/LICENSE-2.0.html
"""
from __future__ import absolute_import, division, print_function
import sirepo.sim_data
class SimData(sirepo.sim_data.SimDataBase):
@classmethod
def fixup_old_data(cls, data):
dm = data.models
cls._init_models(dm)
if dm.simulation.flashType == 'CapLaser':
dm.IO.update(
plot_var_5='magz',
plot_var_6='depo',
)
@classmethod
def _compute_job_fields(cls, data, r, compute_model):
return [r]
@classmethod
def _lib_file_basenames(cls, data):
t = data.models.simulation.flashType
#return ['flash.par', 'al-imx-004.cn4', 'h-imx-004.cn4']
#return ['flash.par', 'helm_table.dat']
if t == 'RTFlame':
return ['helm_table.dat']
if t == 'CapLaser':
return ['al-imx-004.cn4', 'h-imx-004.cn4']
raise AssertionError('invalid flashType: {}'.format(t))
|
We’d like to share a video put together from a recent epic sea mount marlin trip in Costa Rica. Two boats fish (Maverick Yachts “Sea Fly” and “Geaux Fly”) for 5 days. Raised an unimaginable 185 Billfish and released 88 Blue marlin.
|
#
# svn-churn.py - determine file churn and fix count for Subversion repository.
#
# Example: python svn-churn.py |sort -n -t , +2 | tail -n 50 |sort -r -n -t , +2
#
# Runs with Python 2.7, 3.3
#
# License: MIT, see accompanying LICENSE.txt
#
# ------------------------------------------------------------------------
# Configuration:
# Repository: working copy path, or URL
# cfg_reposes = ['https://svn.webkit.org/repository/webkit/trunk']
cfg_reposes = []
# Recognise as fix:
cfg_fixed_issues = (
'[Ii]ssue[s]? #',
'[Ff]ix',
'[Cc]orrect'
)
# Substitute partial path with replacement
cfg_edited_paths = (
# ( r'/trunk/Source/core/', '/trunk/Source/WebCore/' ),
# ( r'/trunk/Source/' , '' ),
)
# Subversion command:
cfg_svn = 'svn'
# ------------------------------------------------------------------------
import re, subprocess, sys
class Context:
def __init__( self, svn, fixed_issues, edited_paths ):
self.svn = svn
self.fixed_issues = fixed_issues
self.edited_paths = edited_paths
class Churn:
"""storage: { path : [ changed, fixed, [messages] ] }
"""
def __init__( self, context ):
self.context = context
self.storage = dict()
self.edits = self.create_edits( context.edited_paths )
def __call__( self, reposes, options ):
for repos in reposes:
self.parse_svn_log( self.svn_log( repos, options ) )
self.update_fixes()
self.print_results( reposes )
def svn_log( self, repos, options ):
command = [ self.context.svn, 'log', '-v' ] + options + [ repos ]
process = subprocess.Popen( command, stdout=subprocess.PIPE, universal_newlines=True )
out, err = process.communicate()
return out
def issue_pattern( self ):
result = ''
for p in self.context.fixed_issues:
result += p if 0==len(result) else '|' + p
return r'(' + result + ')'
def update_fixes( self ):
for k, v in self.storage.items():
pattern = re.compile( self.issue_pattern() )
for m in v[2]:
if pattern.search( m ):
v[1] += 1
def print_results( self, reposes ):
print( 'Churn,Fixes,Churn*Fixes,File {reposes}'.format( reposes=reposes) )
for k, v in self.storage.items():
print( "{chg},{fix},{prod},{path}".format( chg=v[0], fix=v[1], prod=v[0] * v[1], path=k ) )
def parse_svn_log( self, text ):
s_dash = 1
s_revision = 2
s_paths = 3
s_message = 4
state = s_dash
for line in text.split( '\n' ):
if state == s_dash:
state = s_revision
elif state == s_revision:
msg = ''
files = []
state = s_paths
elif state == s_paths:
if line.startswith( 'Changed paths:' ):
continue
elif line == '':
state = s_message
else:
files.append( line )
elif state == s_message:
if line.startswith( '-----' ):
for name in files:
self.store( name, msg )
state = s_revision
else:
if msg == '':
msg = line
else:
msg += '|' + line
def store( self, name, msg ):
name = self.edit_path( name )
if name in self.storage:
self.storage[ name ][0] += 1
self.storage[ name ][2].append( msg )
else:
self.storage[ name ] = [ 1, 0, [msg] ]
def edit_path( self, path ):
for (p,r) in self.edits:
path = p.sub( r, path )
return path
def create_edits( self, edited_paths ):
result = [ ( re.compile( r'\s+[ADMR] /' ), '/' ) ]
for (p,r) in edited_paths:
result.append( ( re.compile( p ), r ) )
return result
def usage():
print(
"""Usage: svn-churn [options] [repos...]
Options
-h, --help this help screen
-- end options section
Other options upto -- are passed on to the 'svn log' command.
svn-churn mines the log of the given Subversion repository
and presents the number of changes and fixes for each file.
Repos can be specified as a working copy path or a URL.
Examples
Use repositories configured in script:
./svn-churn.py
Use repositories configured in script and limit log used to latest 200 items:
./svn-churn.py --limit 200 --
Report 50 most changed and fixed files (sort on changes*fixes):
./svn-churn.py |sort -n -t , +2 | tail -n 50 |sort -r -n -t , +2
Note
Among a few other things, you can configure the SVN repository in the script.""" )
def split_arguments( arguments ):
options = []
inputs = []
opt_help = False
in_options = True
for arg in arguments:
if in_options:
if arg == '--' : in_options = False; continue
elif arg == '-h' or '--help' == arg: opt_help = True ; continue
else: options.append( arg ); continue
inputs.append( arg )
return ( opt_help, options, cfg_reposes if len(inputs) == 0 else inputs )
def help( opt_help, reposes ):
return opt_help or len( reposes ) == 0
def main( arguments ):
churn = Churn( Context( cfg_svn, cfg_fixed_issues, cfg_edited_paths ) )
( opt_help, svn_options, svn_reposes ) = split_arguments( arguments[1:] )
if help( opt_help, svn_reposes ):
return usage()
churn( svn_reposes, svn_options )
if __name__ == "__main__":
try:
main( sys.argv )
except Exception as e:
output = e # transform representation to message
print( "Error: {e}".format( e=output ) )
#
# end of file
#
|
This 3 bedroom 2 bath home located minutes away from Bella Terra has upgraded kitchen with granite counter tops and new cabinets. Baths have been upgraded to tile and clear glass doors. Wood floors throughout. Nice open floor plan. Spacious living room with fireplace. Built in office off living room. Great for working at home or study area. Backyard is spacious with raised deck and jacuzzi, yard, and covered patio area. Great for entertaining. Close to neighborhood elementary school and walking distance to Bella Terra Mall. Minutes to the 405 Freeway. Two car garage with washer and dryer hookups.
Courtesy of Carey Huss, Home Property Management, Inc.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.