repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
KylinUI/android_kernel_oppo_find5 | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Core.py | 11088 | 3246 | # Core.py - Python extension for perf script, core functions
#
# Copyright (C) 2010 by Tom Zanussi <tzanussi@gmail.com>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
from collections import defaultdict
def autodict():
return defaultdict(autodict)
flag_fields = autodict()
symbolic_fields = autodict()
def define_flag_field(event_name, field_name, delim):
flag_fields[event_name][field_name]['delim'] = delim
def define_flag_value(event_name, field_name, value, field_str):
flag_fields[event_name][field_name]['values'][value] = field_str
def define_symbolic_field(event_name, field_name):
# nothing to do, really
pass
def define_symbolic_value(event_name, field_name, value, field_str):
symbolic_fields[event_name][field_name]['values'][value] = field_str
def flag_str(event_name, field_name, value):
string = ""
if flag_fields[event_name][field_name]:
print_delim = 0
keys = flag_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string += flag_fields[event_name][field_name]['values'][idx]
break
if idx and (value & idx) == idx:
if print_delim and flag_fields[event_name][field_name]['delim']:
string += " " + flag_fields[event_name][field_name]['delim'] + " "
string += flag_fields[event_name][field_name]['values'][idx]
print_delim = 1
value &= ~idx
return string
def symbol_str(event_name, field_name, value):
string = ""
if symbolic_fields[event_name][field_name]:
keys = symbolic_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string = symbolic_fields[event_name][field_name]['values'][idx]
break
if (value == idx):
string = symbolic_fields[event_name][field_name]['values'][idx]
break
return string
trace_flags = { 0x00: "NONE", \
0x01: "IRQS_OFF", \
0x02: "IRQS_NOSUPPORT", \
0x04: "NEED_RESCHED", \
0x08: "HARDIRQ", \
0x10: "SOFTIRQ" }
def trace_flag_str(value):
string = ""
print_delim = 0
keys = trace_flags.keys()
for idx in keys:
if not value and not idx:
string += "NONE"
break
if idx and (value & idx) == idx:
if print_delim:
string += " | ";
string += trace_flags[idx]
print_delim = 1
value &= ~idx
return string
def taskState(state):
states = {
0 : "R",
1 : "S",
2 : "D",
64: "DEAD"
}
if state not in states:
return "Unknown"
return states[state]
class EventHeaders:
def __init__(self, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
self.cpu = common_cpu
self.secs = common_secs
self.nsecs = common_nsecs
self.pid = common_pid
self.comm = common_comm
def ts(self):
return (self.secs * (10 ** 9)) + self.nsecs
def ts_format(self):
return "%d.%d" % (self.secs, int(self.nsecs / 1000))
| gpl-2.0 |
gangadhar-kadam/church-erpnext | setup/doctype/email_settings/email_settings.py | 6 | 2778 | # ERPNext - web based ERP (http://erpnext.com)
# Copyright (C) 2012 Web Notes Technologies Pvt Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import webnotes
sql = webnotes.conn.sql
from webnotes.utils import cint
class DocType:
def __init__(self,doc,doclist):
self.doc,self.doclist = doc,doclist
def validate(self):
"""Checks connectivity to email servers before saving"""
self.validate_outgoing()
self.validate_incoming()
def validate_outgoing(self):
"""Checks incoming email settings"""
self.doc.encode()
if self.doc.outgoing_mail_server:
from webnotes.utils import cint
from webnotes.utils.email_lib.smtp import SMTPServer
smtpserver = SMTPServer(login = self.doc.mail_login,
password = self.doc.mail_password,
server = self.doc.outgoing_mail_server,
port = cint(self.doc.mail_port),
use_ssl = self.doc.use_ssl
)
# exceptions are handled in session connect
sess = smtpserver.sess
def validate_incoming(self):
"""
Checks support ticket email settings
"""
if self.doc.sync_support_mails and self.doc.support_host:
from webnotes.utils.email_lib.receive import POP3Mailbox
from webnotes.model.doc import Document
import _socket, poplib
inc_email = Document('Incoming Email Settings')
inc_email.encode()
inc_email.host = self.doc.support_host
inc_email.use_ssl = self.doc.support_use_ssl
try:
err_msg = 'User Name or Support Password missing. Please enter and try again.'
if not (self.doc.support_username and self.doc.support_password):
raise AttributeError, err_msg
inc_email.username = self.doc.support_username
inc_email.password = self.doc.support_password
except AttributeError, e:
webnotes.msgprint(err_msg)
raise e
pop_mb = POP3Mailbox(inc_email)
try:
pop_mb.connect()
except _socket.error, e:
# Invalid mail server -- due to refusing connection
webnotes.msgprint('Invalid POP3 Mail Server. Please rectify and try again.')
raise e
except poplib.error_proto, e:
webnotes.msgprint('Invalid User Name or Support Password. Please rectify and try again.')
raise e
| agpl-3.0 |
0x0mar/theHarvester | theHarvester.py | 1 | 11491 | #!/usr/bin/env python
import string
import httplib, sys
from socket import *
import re
import getopt
from discovery import *
from lib import htmlExport
from lib import hostchecker
import time
print "\n*************************************"
print "*TheHarvester Ver. 2.3 *"
print "*Coded by Christian Martorella *"
#print "*Edited by Kevin Haubris *"
print "*Edge-Security Research *"
print "*cmartorella@edge-security.com *"
#print "*kevin.haubris@gmail.com *"
print "*************************************\n\n"
def usage():
print "Usage: theharvester options \n"
print " -d: Domain to search or company name"
print " -b: Data source (google,bing,bingapi,pgp,linkedin,google-profiles,people123,jigsaw,all)"
print " -s: Start in result number X (default 0)"
print " -v: Verify host name via dns resolution and search for virtual hosts"
print " -f: Save the results into an HTML and XML file"
print " -n: Perform a DNS reverse query on all ranges discovered"
print " -c: Perform a DNS brute force for the domain name"
print " -t: Perform a DNS TLD expansion discovery"
print " -e: Use this DNS server"
print " -l: Limit the number of results to work with(bing goes from 50 to 50 results,"
print " -h: use SHODAN database to query discovered hosts"
print " google 100 to 100, and pgp doesn't use this option)"
print "\nExamples:./theharvester.py -d microsoft.com -l 500 -b google"
print " ./theharvester.py -d microsoft.com -b pgp"
print " ./theharvester.py -d microsoft -l 200 -b linkedin\n"
def start(argv):
if len(sys.argv) < 4:
usage()
sys.exit()
try :
opts, args = getopt.getopt(argv, "l:d:b:s:vf:nhcte:")
except getopt.GetoptError:
usage()
sys.exit()
start=0
host_ip=[]
filename=""
bingapi="yes"
dnslookup=False
dnsbrute=False
dnstld=False
shodan=False
vhost=[]
virtual=False
limit = 100
dnsserver=False
for opt, arg in opts:
if opt == '-l' :
limit = int(arg)
elif opt == '-d':
word = arg
elif opt == '-s':
start = int(arg)
elif opt == '-v':
virtual = "basic"
elif opt == '-f':
filename= arg
elif opt == '-n':
dnslookup=True
elif opt == '-c':
dnsbrute=True
elif opt == '-h':
shodan=True
elif opt == '-e':
dnsserver=arg
elif opt == '-t':
dnstld=True
elif opt == '-b':
engine = arg
if engine not in ("google", "linkedin", "pgp", "all","google-profiles","bing","bing_api","yandex","people123","jigsaw"):
usage()
print "Invalid search engine, try with: bing, google, linkedin, pgp, exalead, jigsaw, bing_api, people123, google-profiles"
sys.exit()
else:
pass
if engine == "google":
print "[-] Searching in Google:"
search=googlesearch.search_google(word,limit,start)
search.process()
all_emails=search.get_emails()
all_hosts=search.get_hostnames()
if engine == "exalead":
print "[-] Searching in Exalead:"
search=exaleadsearch.search_exalead(word,limit,start)
search.process()
all_emails=search.get_emails()
all_hosts=search.get_hostnames()
elif engine == "bing" or engine =="bingapi":
print "[-] Searching in Bing:"
search=bingsearch.search_bing(word,limit,start)
if engine =="bingapi":
bingapi="yes"
else:
bingapi="no"
search.process(bingapi)
all_emails=search.get_emails()
all_hosts=search.get_hostnames()
##Yandex does NOT work i repeat it DOES not work!!!!!!!
##Blacklisted currently after one search
elif engine == "yandex":# Not working yet
print "[-] Searching in Yandex:"
search=yandexsearch.search_yandex(word,limit,start)
search.process()
all_emails=search.get_emails()
all_hosts=search.get_hostnames()
elif engine == "pgp":
print "[-] Searching in PGP key server.."
search=pgpsearch.search_pgp(word)
search.process()
all_emails=search.get_emails()
all_hosts=search.get_hostnames()
elif engine == "people123":
print "[-] Searching in 123People.."
search = people123.search_123people(word,limit)
search.process()
people = search.get_people()
print "Users from 123People:"
print "====================="
for user in people:
print user
sys.exit()
elif engine == "jigsaw":
print "[-] Searching in Jigsaw.."
search = jigsaw.search_jigsaw(word,limit)
search.process()
people = search.get_people()
print "Users from Jigsaw:"
print "====================="
for user in people:
print user
sys.exit()
elif engine == "linkedin":
print "[-] Searching in Linkedin.."
search=linkedinsearch.search_linkedin(word,limit)
search.process()
people=search.get_people()
print "Users from Linkedin:"
print "===================="
for user in people:
print user
sys.exit()
elif engine == "google-profiles":
print "[-] Searching in Google profiles.."
search=googlesearch.search_google(word,limit,start)
search.process_profiles()
people=search.get_profiles()
print "Users from Google profiles:"
print "---------------------------"
for users in people:
print users
sys.exit()
elif engine == "all":
print "Full harvest.."
all_emails=[]
all_hosts=[]
virtual = "basic"
##This one always worked.
print "[-] Searching in Google.."
search=googlesearch.search_google(word,limit,start)
search.process()
emails=search.get_emails()
hosts=search.get_hostnames()
all_emails.extend(emails)
all_hosts.extend(hosts)
##Verified to work as well.
print "[-] Searching in PGP Key server.."
search=pgpsearch.search_pgp(word)
search.process()
emails=search.get_emails()
hosts=search.get_hostnames()
all_hosts.extend(hosts)
all_emails.extend(emails)
##Verified to work by me.
print "[-] Searching in Bing.."
bingapi="no"
search=bingsearch.search_bing(word,limit,start)
search.process(bingapi)
emails=search.get_emails()
hosts=search.get_hostnames()
all_hosts.extend(hosts)
all_emails.extend(emails)
#fixed by me reported bug.
print "[-] Searching in Exalead.."
search=exaleadsearch.search_exalead(word,limit,start)
search.process()
emails=search.get_emails()
hosts=search.get_hostnames()
all_hosts.extend(hosts)
all_emails.extend(emails)
#these were not included in the all search
#This isnt working right now
#print "[-] Searching in Linkedin.."
#search=linkedinsearch.search_linkedin(word,limit)
#search.process()
#people=search.get_people()
#print "Users from Linkedin:"
#print "===================="
#for user in people:
#print user
print "[-] Searching in Google profiles.."
search=googlesearch.search_google(word,limit,start)
search.process_profiles()
people=search.get_profiles()
print "Users from Google profiles:"
print "---------------------------"
for users in people:
print users
print "[-] Searching in Jigsaw.."
search = jigsaw.search_jigsaw(word,limit)
search.process()
people = search.get_people()
print "Users from Jigsaw:"
print "====================="
for user in people:
print user
print "[-] Searching in 123People.."
search = people123.search_123people(word,limit)
search.process()
people = search.get_people()
print "Users from 123People:"
print "====================="
for user in people:
user = user.replace("+", " ")
print user
#print all_emails
#Results############################################################
print "\n[+] Emails found:"
print "------------------"
if all_emails ==[]:
print "No emails found"
else:
for emails in all_emails:
print emails
print "\n[+] Hosts found in search engines:"
print "------------------------------------"
if all_hosts == []:
print "No hosts found"
else:
full_host=hostchecker.Checker(all_hosts)
full=full_host.check()
for host in full:
ip=host.split(':')[0]
print host
if host_ip.count(ip.lower()):
pass
else:
host_ip.append(ip.lower())
#DNS reverse lookup#################################################
dnsrev=[]
if dnslookup==True:
print "\n[+] Starting active queries:"
analyzed_ranges=[]
for x in full:
ip=x.split(":")[0]
range=ip.split(".")
range[3]="0/24"
range=string.join(range,'.')
if not analyzed_ranges.count(range):
print "[-]Performing reverse lookup in :" + range
a=dnssearch.dns_reverse(range,True)
a.list()
res=a.process()
analyzed_ranges.append(range)
else:
continue
for x in res:
if x.count(word):
dnsrev.append(x)
if x not in full:
full.append(x)
print "Hosts found after reverse lookup:"
print "---------------------------------"
for xh in dnsrev:
print xh
#DNS Brute force####################################################
dnsres=[]
if dnsbrute==True:
print "[-] Starting DNS brute force:"
a=dnssearch.dns_force(word,dnsserver,verbose=True)
res=a.process()
print "[+] Hosts found after DNS brute force:"
for y in res:
print y
dnsres.append(y)
if y not in full:
full.append(y)
#DNS TLD expansion###################################################
dnstldres=[]
if dnstld==True:
print "[-] Starting DNS TLD expansion:"
a=dnssearch.dns_tld(word,dnsserver,verbose=True)
res=a.process()
print "\n[+] Hosts found after DNS TLD expansion:"
print "=========================================="
for y in res:
print y
dnstldres.append(y)
if y not in full:
full.append(y)
#Virtual hosts search###############################################
if virtual == "basic":
print "[+] Virtual hosts:"
print "=================="
for l in host_ip:
search=bingsearch.search_bing(l,limit,start)
search.process_vhost()
res=search.get_allhostnames()
for x in res:
print l+"\t"+x
vhost.append(l+":"+x)
full.append(l+":"+x)
else:
pass
shodanres=[]
shodanvisited=[]
if shodan == True:
print "[+] Shodan Database search:"
for x in full:
#print x
try:
ip=x.split(":")[0]
if not shodanvisited.count(ip):
#print "\tSearching for: " + ip
a=shodansearch.search_shodan(ip)
shodanvisited.append(ip)
results=a.run()
for res in results:
shodanres.append(x+"SAPO"+str(res['banner'])+"SAPO"+str(res['port']))
time.sleep(1)
except:
pass
print "[+] Shodan results:"
print "==================="
for x in shodanres:
print x.split("SAPO")[0] +":"+ x.split("SAPO")[1]
else:
pass
###################################################################
#Here i need to add explosion mode.
#Tengo que sacar los TLD para hacer esto.
recursion= None
if recursion:
start=0
for word in vhost:
search=googlesearch.search_google(word,limit,start)
search.process()
emails=search.get_emails()
hosts=search.get_hostnames()
print emails
print hosts
else:
pass
if filename!="":
try:
print "Saving file"
html = htmlExport.htmlExport(all_emails,full,vhost,dnsres,dnsrev,filename,word,shodanres,dnstldres)
save = html.writehtml()
sys.exit()
except Exception,e:
print e
print "Error creating the file"
filename = filename.split(".")[0]+".xml"
file = open(filename,'w')
file.write('<theHarvester>')
for x in all_emails:
file.write('<email>'+x+'</email>')
for x in all_hosts:
file.write('<host>'+x+'</host>')
for x in vhost:
file.write('<vhost>'+x+'</vhost>')
file.write('</theHarvester>')
file.close
if __name__ == "__main__":
try: start(sys.argv[1:])
except KeyboardInterrupt:
print "Search interrupted by user.."
except:
sys.exit()
| gpl-2.0 |
darmaa/odoo | addons/base_report_designer/plugin/openerp_report_designer/bin/script/lib/__init__.py | 392 | 1180 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import actions
import error
import functions
import gui
import logreport
import rpc
import tiny_socket
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
openilabs/falconlab | env/lib/python2.7/site-packages/tests/test_request_body.py | 1 | 4932 | import io
import multiprocessing
from wsgiref import simple_server
import requests
import falcon
from falcon import request_helpers
import falcon.testing as testing
SIZE_1_KB = 1024
class TestRequestBody(testing.TestBase):
def before(self):
self.resource = testing.TestResource()
self.api.add_route('/', self.resource)
def test_empty_body(self):
self.simulate_request('/', body='')
stream = self.resource.req.stream
stream.seek(0, 2)
self.assertEqual(stream.tell(), 0)
def test_tiny_body(self):
expected_body = '.'
self.simulate_request('', body=expected_body)
stream = self.resource.req.stream
actual_body = stream.read(1)
self.assertEqual(actual_body, expected_body.encode('utf-8'))
stream.seek(0, 2)
self.assertEqual(stream.tell(), 1)
def test_tiny_body_overflow(self):
expected_body = '.'
self.simulate_request('', body=expected_body)
stream = self.resource.req.stream
# Read too many bytes; shouldn't block
actual_body = stream.read(len(expected_body) + 1)
self.assertEqual(actual_body, expected_body.encode('utf-8'))
def test_read_body(self):
expected_body = testing.rand_string(SIZE_1_KB / 2, SIZE_1_KB)
expected_len = len(expected_body)
headers = {'Content-Length': str(expected_len)}
self.simulate_request('', body=expected_body, headers=headers)
content_len = self.resource.req.get_header('content-length')
self.assertEqual(content_len, str(expected_len))
stream = self.resource.req.stream
actual_body = stream.read()
self.assertEqual(actual_body, expected_body.encode('utf-8'))
stream.seek(0, 2)
self.assertEqual(stream.tell(), expected_len)
self.assertEqual(stream.tell(), expected_len)
def test_read_socket_body(self):
expected_body = testing.rand_string(SIZE_1_KB / 2, SIZE_1_KB)
def server():
class Echo(object):
def on_post(self, req, resp):
# wsgiref socket._fileobject blocks when len not given,
# but Falcon is smarter than that. :D
body = req.stream.read()
resp.body = body
def on_put(self, req, resp):
# wsgiref socket._fileobject blocks when len too long,
# but Falcon should work around that for me.
body = req.stream.read(req.content_length + 1)
resp.body = body
api = falcon.API()
api.add_route('/echo', Echo())
httpd = simple_server.make_server('127.0.0.1', 8989, api)
httpd.serve_forever()
process = multiprocessing.Process(target=server)
process.daemon = True
process.start()
# Let it boot
process.join(1)
url = 'http://127.0.0.1:8989/echo'
resp = requests.post(url, data=expected_body)
self.assertEqual(resp.text, expected_body)
resp = requests.put(url, data=expected_body)
self.assertEqual(resp.text, expected_body)
process.terminate()
def test_body_stream_wrapper(self):
data = testing.rand_string(SIZE_1_KB / 2, SIZE_1_KB)
expected_body = data.encode('utf-8')
expected_len = len(expected_body)
# NOTE(kgriffs): Append newline char to each line
# to match readlines behavior
expected_lines = [(line + '\n').encode('utf-8')
for line in data.split('\n')]
# NOTE(kgriffs): Remove trailing newline to simulate
# what readlines does
expected_lines[-1] = expected_lines[-1][:-1]
stream = io.BytesIO(expected_body)
body = request_helpers.Body(stream, expected_len)
self.assertEqual(body.read(), expected_body)
stream = io.BytesIO(expected_body)
body = request_helpers.Body(stream, expected_len)
self.assertEqual(body.read(2), expected_body[0:2])
stream = io.BytesIO(expected_body)
body = request_helpers.Body(stream, expected_len)
self.assertEqual(body.read(expected_len + 1), expected_body)
stream = io.BytesIO(expected_body)
body = request_helpers.Body(stream, expected_len)
self.assertEqual(body.readline(), expected_lines[0])
stream = io.BytesIO(expected_body)
body = request_helpers.Body(stream, expected_len)
self.assertEqual(body.readlines(), expected_lines)
stream = io.BytesIO(expected_body)
body = request_helpers.Body(stream, expected_len)
self.assertEqual(next(body), expected_lines[0])
stream = io.BytesIO(expected_body)
body = request_helpers.Body(stream, expected_len)
for i, line in enumerate(body):
self.assertEqual(line, expected_lines[i])
| mit |
iCarto/siga | extScripting/scripts/jython/Lib/tokenize.py | 4 | 10206 | """Tokenization help for Python programs.
This module exports a function called 'tokenize()' that breaks a stream of
text into Python tokens. It accepts a readline-like method which is called
repeatedly to get the next line of input (or "" for EOF) and a "token-eater"
function which is called once for each token found. The latter function is
passed the token type, a string containing the token, the starting and
ending (row, column) coordinates of the token, and the original line. It is
designed to match the working of the Python tokenizer exactly, except that
it produces COMMENT tokens for comments and gives type OP for all operators."""
__author__ = 'Ka-Ping Yee <ping@lfw.org>'
__credits__ = \
'GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro'
import string, re
from token import *
import token
__all__ = [x for x in dir(token) if x[0] != '_'] + ["COMMENT", "tokenize", "NL"]
del token
COMMENT = N_TOKENS
tok_name[COMMENT] = 'COMMENT'
NL = N_TOKENS + 1
tok_name[NL] = 'NL'
N_TOKENS += 2
def group(*choices): return '(' + '|'.join(choices) + ')'
def any(*choices): return apply(group, choices) + '*'
def maybe(*choices): return apply(group, choices) + '?'
Whitespace = r'[ \f\t]*'
Comment = r'#[^\r\n]*'
Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
Name = r'[a-zA-Z_]\w*'
Hexnumber = r'0[xX][\da-fA-F]*[lL]?'
Octnumber = r'0[0-7]*[lL]?'
Decnumber = r'[1-9]\d*[lL]?'
Intnumber = group(Hexnumber, Octnumber, Decnumber)
Exponent = r'[eE][-+]?\d+'
Pointfloat = group(r'\d+\.\d*', r'\.\d+') + maybe(Exponent)
Expfloat = r'[1-9]\d*' + Exponent
Floatnumber = group(Pointfloat, Expfloat)
Imagnumber = group(r'0[jJ]', r'[1-9]\d*[jJ]', Floatnumber + r'[jJ]')
Number = group(Imagnumber, Floatnumber, Intnumber)
# Tail end of ' string.
Single = r"[^'\\]*(?:\\.[^'\\]*)*'"
# Tail end of " string.
Double = r'[^"\\]*(?:\\.[^"\\]*)*"'
# Tail end of ''' string.
Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
# Tail end of """ string.
Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
Triple = group("[uU]?[rR]?'''", '[uU]?[rR]?"""')
# Single-line ' or " string.
String = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
# Because of leftmost-then-longest match semantics, be sure to put the
# longest operators first (e.g., if = came before ==, == would get
# recognized as two instances of =).
Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=",
r"[+\-*/%&|^=<>]=?",
r"~")
Bracket = '[][(){}]'
Special = group(r'\r?\n', r'[:;.,`]')
Funny = group(Operator, Bracket, Special)
PlainToken = group(Number, Funny, String, Name)
Token = Ignore + PlainToken
# First (or only) line of ' or " string.
ContStr = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
group("'", r'\\\r?\n'),
r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
group('"', r'\\\r?\n'))
PseudoExtras = group(r'\\\r?\n', Comment, Triple)
PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
tokenprog, pseudoprog, single3prog, double3prog = map(
re.compile, (Token, PseudoToken, Single3, Double3))
endprogs = {"'": re.compile(Single), '"': re.compile(Double),
"'''": single3prog, '"""': double3prog,
"r'''": single3prog, 'r"""': double3prog,
"u'''": single3prog, 'u"""': double3prog,
"ur'''": single3prog, 'ur"""': double3prog,
"R'''": single3prog, 'R"""': double3prog,
"U'''": single3prog, 'U"""': double3prog,
"uR'''": single3prog, 'uR"""': double3prog,
"Ur'''": single3prog, 'Ur"""': double3prog,
"UR'''": single3prog, 'UR"""': double3prog,
'r': None, 'R': None, 'u': None, 'U': None}
tabsize = 8
class TokenError(Exception): pass
class StopTokenizing(Exception): pass
def printtoken(type, token, (srow, scol), (erow, ecol), line): # for testing
print "%d,%d-%d,%d:\t%s\t%s" % \
(srow, scol, erow, ecol, tok_name[type], repr(token))
def tokenize(readline, tokeneater=printtoken):
try:
tokenize_loop(readline, tokeneater)
except StopTokenizing:
pass
def tokenize_loop(readline, tokeneater):
lnum = parenlev = continued = 0
namechars, numchars = string.letters + '_', string.digits
contstr, needcont = '', 0
contline = None
indents = [0]
while 1: # loop over lines in stream
line = readline()
lnum = lnum + 1
pos, max = 0, len(line)
if contstr: # continued string
if not line:
raise TokenError, ("EOF in multi-line string", strstart)
endmatch = endprog.match(line)
if endmatch:
pos = end = endmatch.end(0)
tokeneater(STRING, contstr + line[:end],
strstart, (lnum, end), contline + line)
contstr, needcont = '', 0
contline = None
elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
tokeneater(ERRORTOKEN, contstr + line,
strstart, (lnum, len(line)), contline)
contstr = ''
contline = None
continue
else:
contstr = contstr + line
contline = contline + line
continue
elif parenlev == 0 and not continued: # new statement
if not line: break
column = 0
while pos < max: # measure leading whitespace
if line[pos] == ' ': column = column + 1
elif line[pos] == '\t': column = (column/tabsize + 1)*tabsize
elif line[pos] == '\f': column = 0
else: break
pos = pos + 1
if pos == max: break
if line[pos] in '#\r\n': # skip comments or blank lines
tokeneater((NL, COMMENT)[line[pos] == '#'], line[pos:],
(lnum, pos), (lnum, len(line)), line)
continue
if column > indents[-1]: # count indents or dedents
indents.append(column)
tokeneater(INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
while column < indents[-1]:
indents = indents[:-1]
tokeneater(DEDENT, '', (lnum, pos), (lnum, pos), line)
else: # continued statement
if not line:
raise TokenError, ("EOF in multi-line statement", (lnum, 0))
continued = 0
while pos < max:
pseudomatch = pseudoprog.match(line, pos)
if pseudomatch: # scan for tokens
start, end = pseudomatch.span(1)
spos, epos, pos = (lnum, start), (lnum, end), end
token, initial = line[start:end], line[start]
if initial in numchars or \
(initial == '.' and token != '.'): # ordinary number
tokeneater(NUMBER, token, spos, epos, line)
elif initial in '\r\n':
tokeneater(parenlev > 0 and NL or NEWLINE,
token, spos, epos, line)
elif initial == '#':
tokeneater(COMMENT, token, spos, epos, line)
elif token in ("'''", '"""', # triple-quoted
"r'''", 'r"""', "R'''", 'R"""',
"u'''", 'u"""', "U'''", 'U"""',
"ur'''", 'ur"""', "Ur'''", 'Ur"""',
"uR'''", 'uR"""', "UR'''", 'UR"""'):
endprog = endprogs[token]
endmatch = endprog.match(line, pos)
if endmatch: # all on one line
pos = endmatch.end(0)
token = line[start:pos]
tokeneater(STRING, token, spos, (lnum, pos), line)
else:
strstart = (lnum, start) # multiple lines
contstr = line[start:]
contline = line
break
elif initial in ("'", '"') or \
token[:2] in ("r'", 'r"', "R'", 'R"',
"u'", 'u"', "U'", 'U"') or \
token[:3] in ("ur'", 'ur"', "Ur'", 'Ur"',
"uR'", 'uR"', "UR'", 'UR"' ):
if token[-1] == '\n': # continued string
strstart = (lnum, start)
endprog = (endprogs[initial] or endprogs[token[1]] or
endprogs[token[2]])
contstr, needcont = line[start:], 1
contline = line
break
else: # ordinary string
tokeneater(STRING, token, spos, epos, line)
elif initial in namechars: # ordinary name
tokeneater(NAME, token, spos, epos, line)
elif initial == '\\': # continued stmt
continued = 1
else:
if initial in '([{': parenlev = parenlev + 1
elif initial in ')]}': parenlev = parenlev - 1
tokeneater(OP, token, spos, epos, line)
else:
tokeneater(ERRORTOKEN, line[pos],
(lnum, pos), (lnum, pos+1), line)
pos = pos + 1
for indent in indents[1:]: # pop remaining indent levels
tokeneater(DEDENT, '', (lnum, 0), (lnum, 0), '')
tokeneater(ENDMARKER, '', (lnum, 0), (lnum, 0), '')
if __name__ == '__main__': # testing
import sys
if len(sys.argv) > 1: tokenize(open(sys.argv[1]).readline)
else: tokenize(sys.stdin.readline)
| gpl-3.0 |
projectinterzone/ITZ | contrib/bitrpc/bitrpc.py | 17 | 9207 | from jsonrpc import ServiceProxy
import sys
import string
import getpass
# ===== BEGIN USER SETTINGS =====
# if you do not set these you will be prompted for a password for every command
rpcuser = ""
rpcpass = ""
# ====== END USER SETTINGS ======
if rpcpass == "":
access = ServiceProxy("http://127.0.0.1:9998")
else:
access = ServiceProxy("http://"+rpcuser+":"+rpcpass+"@127.0.0.1:9998")
cmd = sys.argv[1].lower()
if cmd == "backupwallet":
try:
path = raw_input("Enter destination path/filename: ")
print access.backupwallet(path)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccount":
try:
addr = raw_input("Enter a Bitcoin address: ")
print access.getaccount(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccountaddress":
try:
acct = raw_input("Enter an account name: ")
print access.getaccountaddress(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getaddressesbyaccount":
try:
acct = raw_input("Enter an account name: ")
print access.getaddressesbyaccount(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getbalance":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getbalance(acct, mc)
except:
print access.getbalance()
except:
print "\n---An error occurred---\n"
elif cmd == "getblockbycount":
try:
height = raw_input("Height: ")
print access.getblockbycount(height)
except:
print "\n---An error occurred---\n"
elif cmd == "getblockcount":
try:
print access.getblockcount()
except:
print "\n---An error occurred---\n"
elif cmd == "getblocknumber":
try:
print access.getblocknumber()
except:
print "\n---An error occurred---\n"
elif cmd == "getconnectioncount":
try:
print access.getconnectioncount()
except:
print "\n---An error occurred---\n"
elif cmd == "getdifficulty":
try:
print access.getdifficulty()
except:
print "\n---An error occurred---\n"
elif cmd == "getgenerate":
try:
print access.getgenerate()
except:
print "\n---An error occurred---\n"
elif cmd == "gethashespersec":
try:
print access.gethashespersec()
except:
print "\n---An error occurred---\n"
elif cmd == "getinfo":
try:
print access.getinfo()
except:
print "\n---An error occurred---\n"
elif cmd == "getnewaddress":
try:
acct = raw_input("Enter an account name: ")
try:
print access.getnewaddress(acct)
except:
print access.getnewaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaccount":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaccount(acct, mc)
except:
print access.getreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaddress":
try:
addr = raw_input("Enter a Bitcoin address (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaddress(addr, mc)
except:
print access.getreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "gettransaction":
try:
txid = raw_input("Enter a transaction ID: ")
print access.gettransaction(txid)
except:
print "\n---An error occurred---\n"
elif cmd == "getwork":
try:
data = raw_input("Data (optional): ")
try:
print access.gettransaction(data)
except:
print access.gettransaction()
except:
print "\n---An error occurred---\n"
elif cmd == "help":
try:
cmd = raw_input("Command (optional): ")
try:
print access.help(cmd)
except:
print access.help()
except:
print "\n---An error occurred---\n"
elif cmd == "listaccounts":
try:
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.listaccounts(mc)
except:
print access.listaccounts()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaccount":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaccount(mc, incemp)
except:
print access.listreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaddress":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaddress(mc, incemp)
except:
print access.listreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "listtransactions":
try:
acct = raw_input("Account (optional): ")
count = raw_input("Number of transactions (optional): ")
frm = raw_input("Skip (optional):")
try:
print access.listtransactions(acct, count, frm)
except:
print access.listtransactions()
except:
print "\n---An error occurred---\n"
elif cmd == "move":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.move(frm, to, amt, mc, comment)
except:
print access.move(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendfrom":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendfrom(frm, to, amt, mc, comment, commentto)
except:
print access.sendfrom(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendmany":
try:
frm = raw_input("From: ")
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.sendmany(frm,to,mc,comment)
except:
print access.sendmany(frm,to)
except:
print "\n---An error occurred---\n"
elif cmd == "sendtoaddress":
try:
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
amt = raw_input("Amount:")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendtoaddress(to,amt,comment,commentto)
except:
print access.sendtoaddress(to,amt)
except:
print "\n---An error occurred---\n"
elif cmd == "setaccount":
try:
addr = raw_input("Address: ")
acct = raw_input("Account:")
print access.setaccount(addr,acct)
except:
print "\n---An error occurred---\n"
elif cmd == "setgenerate":
try:
gen= raw_input("Generate? (true/false): ")
cpus = raw_input("Max processors/cores (-1 for unlimited, optional):")
try:
print access.setgenerate(gen, cpus)
except:
print access.setgenerate(gen)
except:
print "\n---An error occurred---\n"
elif cmd == "settxfee":
try:
amt = raw_input("Amount:")
print access.settxfee(amt)
except:
print "\n---An error occurred---\n"
elif cmd == "stop":
try:
print access.stop()
except:
print "\n---An error occurred---\n"
elif cmd == "validateaddress":
try:
addr = raw_input("Address: ")
print access.validateaddress(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrase":
try:
pwd = getpass.getpass(prompt="Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
print "\n---Wallet unlocked---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrasechange":
try:
pwd = getpass.getpass(prompt="Enter old wallet passphrase: ")
pwd2 = getpass.getpass(prompt="Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
print
print "\n---Passphrase changed---\n"
except:
print
print "\n---An error occurred---\n"
print
else:
print "Command not found or not supported"
| mit |
nirmeshk/oh-mainline | vendor/packages/amqp/amqp/__init__.py | 20 | 2126 | """Low-level AMQP client for Python (fork of amqplib)"""
# Copyright (C) 2007-2008 Barry Pederson <bp@barryp.org>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
from __future__ import absolute_import
VERSION = (1, 4, 6)
__version__ = '.'.join(map(str, VERSION[0:3])) + ''.join(VERSION[3:])
__author__ = 'Barry Pederson'
__maintainer__ = 'Ask Solem'
__contact__ = 'pyamqp@celeryproject.org'
__homepage__ = 'http://github.com/celery/py-amqp'
__docformat__ = 'restructuredtext'
# -eof meta-
#
# Pull in the public items from the various sub-modules
#
from .basic_message import Message # noqa
from .channel import Channel # noqa
from .connection import Connection # noqa
from .exceptions import ( # noqa
AMQPError,
ConnectionError,
RecoverableConnectionError,
IrrecoverableConnectionError,
ChannelError,
RecoverableChannelError,
IrrecoverableChannelError,
ConsumerCancelled,
ContentTooLarge,
NoConsumers,
ConnectionForced,
InvalidPath,
AccessRefused,
NotFound,
ResourceLocked,
PreconditionFailed,
FrameError,
FrameSyntaxError,
InvalidCommand,
ChannelNotOpen,
UnexpectedFrame,
ResourceError,
NotAllowed,
AMQPNotImplementedError,
InternalError,
error_for_code,
__all__ as _all_exceptions,
)
from .utils import promise # noqa
__all__ = [
'Connection',
'Channel',
'Message',
] + _all_exceptions
| agpl-3.0 |
acourtney2015/boto | tests/integration/awslambda/__init__.py | 586 | 1123 | # Copyright (c) 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
| mit |
ndawe/rootpy | rootpy/ROOT.py | 2 | 4217 | """
:py:mod:`rootpy.ROOT`
=====================
This module is intended to be a drop-in replacement for ordinary
PyROOT imports by mimicking PyROOT's interface. If you find a case where it is
not, please report an issue to the rootpy developers.
Both ROOT and rootpy classes can be accessed in a harmonized way through this
module. This means you can take advantage of rootpy classes automatically by
replacing ``import ROOT`` with ``import rootpy.ROOT as ROOT`` or
``from rootpy import ROOT`` in your code, while maintaining backward
compatibility with existing use of ROOT's classes.
ROOT classes are automatically "asrootpy'd" *after* the constructor in ROOT has
been called:
.. sourcecode:: python
>>> import rootpy.ROOT as ROOT
>>> h = ROOT.TH1F('name', 'title', 10, 0, 1)
>>> h
Hist('name')
>>> h.TYPE
'F'
Also access rootpy classes under this same module without needing to remember
where to import them from in rootpy:
.. sourcecode:: python
>>> import rootpy.ROOT as ROOT
>>> h = ROOT.Hist(10, 0, 1, name='name', type='F')
>>> h
Hist('name')
>>> h.TYPE
'F'
Plain old ROOT can still be accessed through the ``R`` property:
.. sourcecode:: python
>>> from rootpy import ROOT
>>> ROOT.R.TFile
<class 'ROOT.TFile'>
"""
from __future__ import absolute_import
from copy import copy
import ROOT
from . import asrootpy, lookup_rootpy, ROOT_VERSION
from . import QROOT, stl
from .utils.module_facade import Facade
__all__ = []
def proxy_global(name, no_expand_macro=False, fname='func', args=()):
"""
Used to automatically asrootpy ROOT's thread local variables
"""
if no_expand_macro: # pragma: no cover
# handle older ROOT versions without _ExpandMacroFunction wrapping
@property
def gSomething_no_func(self):
glob = self(getattr(ROOT, name))
# create a fake func() that just returns self
def func():
return glob
glob.func = func
return glob
return gSomething_no_func
@property
def gSomething(self):
obj_func = getattr(getattr(ROOT, name), fname)
try:
obj = obj_func(*args)
except ReferenceError: # null pointer
return None
# asrootpy
return self(obj)
return gSomething
@Facade(__name__, expose_internal=False)
class Module(object):
__version__ = ROOT_VERSION
def __call__(self, arg, after_init=False):
return asrootpy(arg, warn=False, after_init=after_init)
def __getattr__(self, what):
try:
# check ROOT
result = self(getattr(ROOT, what), after_init=True)
except AttributeError:
# check rootpy
result = lookup_rootpy(what)
if result is None:
raise AttributeError(
'ROOT does not have the attribute `{0}` '
'and rootpy does not contain the class `{0}`'.format(what))
return result
try:
# Memoize
setattr(self, what, result)
except AttributeError:
# Oops... Oh well. I tried.
pass
return result
@property
def R(self):
return ROOT
gPad = proxy_global("gPad",
fname='GetPad' if ROOT_VERSION >= (6, 9, 2) else 'func',
args=(0,) if ROOT_VERSION >= (6, 9, 2) else ())
gVirtualX = proxy_global("gVirtualX")
if ROOT_VERSION < (5, 32, 0): # pragma: no cover
gDirectory = proxy_global("gDirectory", no_expand_macro=True)
gFile = proxy_global("gFile", no_expand_macro=True)
gInterpreter = proxy_global("gInterpreter", no_expand_macro=True)
else:
gDirectory = proxy_global("gDirectory",
fname='CurrentDirectory' if ROOT_VERSION >= (6, 9, 2) else 'func')
gFile = proxy_global("gFile",
fname='CurrentFile' if ROOT_VERSION >= (6, 9, 2) else 'func')
gInterpreter = proxy_global("gInterpreter",
no_expand_macro=ROOT_VERSION >= (6, 9, 2))
# use the smart template STL types from rootpy.stl instead
for t in QROOT.std.stlclasses:
locals()[t] = getattr(stl, t)
del t
| bsd-3-clause |
mjescobar/RF_Estimation | Clustering/helpers/clustering_mj/processAll.py | 2 | 2140 | #!/usr/bin/env python
import numpy as np
import os
import matplotlib.pyplot as plt
import scipy.io
import scipy
import sys
inmat = scipy.io.loadmat('temp_curves_150_50.mat')
tempCurves = inmat['tc']
tempCurvesSpl = inmat['tci']
idx = inmat['idx']
xc = inmat['xc']
print "Shape of tempCurves: ", np.shape(tempCurves)
print "Shape of tempCurvesSpl: ", np.shape(tempCurvesSpl)
print "Shape of idx: ", np.shape(idx)
ntime, ncells = np.shape(tempCurves)
ntimeSpl, ncells = np.shape(tempCurvesSpl)
print "nTime: ", ntime, " - nCells: ", ncells
nclusters = np.max(idx)
print "Number of clusters: ", nclusters
cluster_colors = ['blue', 'red', 'green', 'orange', 'black']
meanCurves = np.zeros( (nclusters,ntimeSpl) )
meanCount = np.zeros( (nclusters,1) )
# Computing mean values
for i in range(ncells):
if( idx[i] == 1 ):
meanCurves[0,:] += tempCurvesSpl[:,i]
meanCount[0] += 1
if( idx[i] == 2 ):
meanCurves[1,:] += tempCurvesSpl[:,i]
meanCount[1] += 1
if( idx[i] == 3 ):
meanCurves[2,:] += tempCurvesSpl[:,i]
meanCount[2] += 1
if( idx[i] == 4 ):
meanCurves[3,:] += tempCurvesSpl[:,i]
meanCount[3] += 1
if( idx[i] == 5 ):
meanCurves[4,:] += tempCurvesSpl[:,i]
meanCount[4] += 1
print meanCount[0], "-", cluster_colors[0]
print meanCount[1], "-", cluster_colors[1]
print meanCount[2], "-", cluster_colors[2]
print meanCount[3], "-", cluster_colors[3]
print meanCount[4], "-", cluster_colors[4]
for i in range(nclusters):
meanCurves[i,:] /= meanCount[i]
# Plotting figures
plt.figure()
for i in range(ncells):
plt.plot(tempCurves[:,i], cluster_colors[idx[i]-1], alpha=0.2)
plt.grid(True)
plt.figure()
for i in range(ncells):
plt.plot(xc, tempCurvesSpl[:,i], cluster_colors[idx[i]-1], linewidth=0.5, alpha=0.15)
for i in range(nclusters):
plt.plot(xc, meanCurves[i,:], cluster_colors[i], linewidth=4, label= "n = %d cells" % meanCount[i])
plt.grid(True)
plt.xlabel('Time before spike [ms]')
plt.ylim(-0.4,0.4)
plt.legend(loc=2)
plt.savefig('Clusters_50uM-150uM.pdf', format='pdf', bbox_inches='tight')
plt.show()
| gpl-2.0 |
arkmaxim/grpc | src/python/grpcio/grpc/beta/_server_adaptations.py | 16 | 13112 | # Copyright 2016, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Translates gRPC's server-side API into gRPC's server-side Beta API."""
import collections
import threading
import grpc
from grpc import _common
from grpc.beta import interfaces
from grpc.framework.common import cardinality
from grpc.framework.common import style
from grpc.framework.foundation import abandonment
from grpc.framework.foundation import logging_pool
from grpc.framework.foundation import stream
from grpc.framework.interfaces.face import face
_DEFAULT_POOL_SIZE = 8
class _ServerProtocolContext(interfaces.GRPCServicerContext):
def __init__(self, servicer_context):
self._servicer_context = servicer_context
def peer(self):
return self._servicer_context.peer()
def disable_next_response_compression(self):
pass # TODO(https://github.com/grpc/grpc/issues/4078): design, implement.
class _FaceServicerContext(face.ServicerContext):
def __init__(self, servicer_context):
self._servicer_context = servicer_context
def is_active(self):
return self._servicer_context.is_active()
def time_remaining(self):
return self._servicer_context.time_remaining()
def add_abortion_callback(self, abortion_callback):
raise NotImplementedError(
'add_abortion_callback no longer supported server-side!')
def cancel(self):
self._servicer_context.cancel()
def protocol_context(self):
return _ServerProtocolContext(self._servicer_context)
def invocation_metadata(self):
return _common.cygrpc_metadata(
self._servicer_context.invocation_metadata())
def initial_metadata(self, initial_metadata):
self._servicer_context.send_initial_metadata(initial_metadata)
def terminal_metadata(self, terminal_metadata):
self._servicer_context.set_terminal_metadata(terminal_metadata)
def code(self, code):
self._servicer_context.set_code(code)
def details(self, details):
self._servicer_context.set_details(details)
def _adapt_unary_request_inline(unary_request_inline):
def adaptation(request, servicer_context):
return unary_request_inline(request, _FaceServicerContext(servicer_context))
return adaptation
def _adapt_stream_request_inline(stream_request_inline):
def adaptation(request_iterator, servicer_context):
return stream_request_inline(
request_iterator, _FaceServicerContext(servicer_context))
return adaptation
class _Callback(stream.Consumer):
def __init__(self):
self._condition = threading.Condition()
self._values = []
self._terminated = False
self._cancelled = False
def consume(self, value):
with self._condition:
self._values.append(value)
self._condition.notify_all()
def terminate(self):
with self._condition:
self._terminated = True
self._condition.notify_all()
def consume_and_terminate(self, value):
with self._condition:
self._values.append(value)
self._terminated = True
self._condition.notify_all()
def cancel(self):
with self._condition:
self._cancelled = True
self._condition.notify_all()
def draw_one_value(self):
with self._condition:
while True:
if self._cancelled:
raise abandonment.Abandoned()
elif self._values:
return self._values.pop(0)
elif self._terminated:
return None
else:
self._condition.wait()
def draw_all_values(self):
with self._condition:
while True:
if self._cancelled:
raise abandonment.Abandoned()
elif self._terminated:
all_values = tuple(self._values)
self._values = None
return all_values
else:
self._condition.wait()
def _run_request_pipe_thread(request_iterator, request_consumer,
servicer_context):
thread_joined = threading.Event()
def pipe_requests():
for request in request_iterator:
if not servicer_context.is_active() or thread_joined.is_set():
return
request_consumer.consume(request)
if not servicer_context.is_active() or thread_joined.is_set():
return
request_consumer.terminate()
def stop_request_pipe(timeout):
thread_joined.set()
request_pipe_thread = _common.CleanupThread(
stop_request_pipe, target=pipe_requests)
request_pipe_thread.start()
def _adapt_unary_unary_event(unary_unary_event):
def adaptation(request, servicer_context):
callback = _Callback()
if not servicer_context.add_callback(callback.cancel):
raise abandonment.Abandoned()
unary_unary_event(
request, callback.consume_and_terminate,
_FaceServicerContext(servicer_context))
return callback.draw_all_values()[0]
return adaptation
def _adapt_unary_stream_event(unary_stream_event):
def adaptation(request, servicer_context):
callback = _Callback()
if not servicer_context.add_callback(callback.cancel):
raise abandonment.Abandoned()
unary_stream_event(
request, callback, _FaceServicerContext(servicer_context))
while True:
response = callback.draw_one_value()
if response is None:
return
else:
yield response
return adaptation
def _adapt_stream_unary_event(stream_unary_event):
def adaptation(request_iterator, servicer_context):
callback = _Callback()
if not servicer_context.add_callback(callback.cancel):
raise abandonment.Abandoned()
request_consumer = stream_unary_event(
callback.consume_and_terminate, _FaceServicerContext(servicer_context))
_run_request_pipe_thread(
request_iterator, request_consumer, servicer_context)
return callback.draw_all_values()[0]
return adaptation
def _adapt_stream_stream_event(stream_stream_event):
def adaptation(request_iterator, servicer_context):
callback = _Callback()
if not servicer_context.add_callback(callback.cancel):
raise abandonment.Abandoned()
request_consumer = stream_stream_event(
callback, _FaceServicerContext(servicer_context))
_run_request_pipe_thread(
request_iterator, request_consumer, servicer_context)
while True:
response = callback.draw_one_value()
if response is None:
return
else:
yield response
return adaptation
class _SimpleMethodHandler(
collections.namedtuple(
'_MethodHandler',
('request_streaming', 'response_streaming', 'request_deserializer',
'response_serializer', 'unary_unary', 'unary_stream', 'stream_unary',
'stream_stream',)),
grpc.RpcMethodHandler):
pass
def _simple_method_handler(
implementation, request_deserializer, response_serializer):
if implementation.style is style.Service.INLINE:
if implementation.cardinality is cardinality.Cardinality.UNARY_UNARY:
return _SimpleMethodHandler(
False, False, request_deserializer, response_serializer,
_adapt_unary_request_inline(implementation.unary_unary_inline), None,
None, None)
elif implementation.cardinality is cardinality.Cardinality.UNARY_STREAM:
return _SimpleMethodHandler(
False, True, request_deserializer, response_serializer, None,
_adapt_unary_request_inline(implementation.unary_stream_inline), None,
None)
elif implementation.cardinality is cardinality.Cardinality.STREAM_UNARY:
return _SimpleMethodHandler(
True, False, request_deserializer, response_serializer, None, None,
_adapt_stream_request_inline(implementation.stream_unary_inline),
None)
elif implementation.cardinality is cardinality.Cardinality.STREAM_STREAM:
return _SimpleMethodHandler(
True, True, request_deserializer, response_serializer, None, None,
None,
_adapt_stream_request_inline(implementation.stream_stream_inline))
elif implementation.style is style.Service.EVENT:
if implementation.cardinality is cardinality.Cardinality.UNARY_UNARY:
return _SimpleMethodHandler(
False, False, request_deserializer, response_serializer,
_adapt_unary_unary_event(implementation.unary_unary_event), None,
None, None)
elif implementation.cardinality is cardinality.Cardinality.UNARY_STREAM:
return _SimpleMethodHandler(
False, True, request_deserializer, response_serializer, None,
_adapt_unary_stream_event(implementation.unary_stream_event), None,
None)
elif implementation.cardinality is cardinality.Cardinality.STREAM_UNARY:
return _SimpleMethodHandler(
True, False, request_deserializer, response_serializer, None, None,
_adapt_stream_unary_event(implementation.stream_unary_event), None)
elif implementation.cardinality is cardinality.Cardinality.STREAM_STREAM:
return _SimpleMethodHandler(
True, True, request_deserializer, response_serializer, None, None,
None, _adapt_stream_stream_event(implementation.stream_stream_event))
def _flatten_method_pair_map(method_pair_map):
method_pair_map = method_pair_map or {}
flat_map = {}
for method_pair in method_pair_map:
method = _common.fully_qualified_method(method_pair[0], method_pair[1])
flat_map[method] = method_pair_map[method_pair]
return flat_map
class _GenericRpcHandler(grpc.GenericRpcHandler):
def __init__(
self, method_implementations, multi_method_implementation,
request_deserializers, response_serializers):
self._method_implementations = _flatten_method_pair_map(
method_implementations)
self._request_deserializers = _flatten_method_pair_map(
request_deserializers)
self._response_serializers = _flatten_method_pair_map(
response_serializers)
self._multi_method_implementation = multi_method_implementation
def service(self, handler_call_details):
method_implementation = self._method_implementations.get(
handler_call_details.method)
if method_implementation is not None:
return _simple_method_handler(
method_implementation,
self._request_deserializers.get(handler_call_details.method),
self._response_serializers.get(handler_call_details.method))
elif self._multi_method_implementation is None:
return None
else:
try:
return None #TODO(nathaniel): call the multimethod.
except face.NoSuchMethodError:
return None
class _Server(interfaces.Server):
def __init__(self, server):
self._server = server
def add_insecure_port(self, address):
return self._server.add_insecure_port(address)
def add_secure_port(self, address, server_credentials):
return self._server.add_secure_port(address, server_credentials)
def start(self):
self._server.start()
def stop(self, grace):
return self._server.stop(grace)
def __enter__(self):
self._server.start()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._server.stop(None)
return False
def server(
service_implementations, multi_method_implementation, request_deserializers,
response_serializers, thread_pool, thread_pool_size):
generic_rpc_handler = _GenericRpcHandler(
service_implementations, multi_method_implementation,
request_deserializers, response_serializers)
if thread_pool is None:
effective_thread_pool = logging_pool.pool(
_DEFAULT_POOL_SIZE if thread_pool_size is None else thread_pool_size)
else:
effective_thread_pool = thread_pool
return _Server(
grpc.server(effective_thread_pool, handlers=(generic_rpc_handler,)))
| bsd-3-clause |
pchavanne/yadll | examples/updates_examples.py | 2 | 4039 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
This example will show you the difference between the updates function:
- sgd: Stochastic Gradient Descent
- momentum: Stochastic Gradient Descent with momentum
- nesterov_momentum: Stochastic Gradient Descent with Nesterov momentum
- adagrad: Adaptive gradient descent
- rmsprop: scaling with the Root mean square of the gradient
- adadelta: adaptive learning rate
- adam: Adaptive moment gradient descent
- adamax: adam with infinity norm
"""
import os
import yadll
import logging
from collections import OrderedDict
logging.basicConfig(level=logging.DEBUG, format='%(message)s')
# load the data
data = yadll.data.Data(yadll.data.mnist_loader())
updates = OrderedDict([
('sgd', [['learning_rate', 0.1, [0.001, 0.01, 0.1]]]),
('momentum', [['learning_rate', 0.1, [0.001, 0.01, 0.1]],
['momentum', 0.9, [0.85, 0.9, 0.95, 0.99]]]),
('nesterov_momentum', [['learning_rate', 0.1, [0.001, 0.01, 0.1]],
['momentum', 0.9, [0.85, 0.9, 0.95, 0.99]]]),
('adagrad', [['learning_rate', 0.1, [0.001, 0.01, 0.1]]]),
('rmsprop', [['learning_rate', 0.1, [0.001, 0.01, 0.1]]]),
('adadelta', [['learning_rate', 0.1, [0.001, 0.01, 0.1]]]),
('adam', [['learning_rate', 0.1, [0.001, 0.01, 0.1]]]),
('adamax', [['learning_rate', 0.1, [0.001, 0.01, 0.1]]]),
])
def get_hps():
# Hyperparameters
hps = yadll.hyperparameters.Hyperparameters()
hps('batch_size', 50)
hps('n_epochs', 500)
hps('l1_reg', 0.001)
hps('l2_reg', 0.00001)
hps('patience', 5000)
return hps
def get_model(hp):
# create the model
model = yadll.model.Model(name='mlp with dropout', data=data)
# Create connected layers
# Input layer
l_in = yadll.layers.InputLayer(input_shape=(hp.batch_size, 28 * 28))
# Dropout Layer 1
l_dro1 = yadll.layers.Dropout(incoming=l_in, corruption_level=0.5)
# Dense Layer 1
l_hid1 = yadll.layers.DenseLayer(incoming=l_dro1, n_units=500, W=yadll.init.glorot_uniform,
l1=hp.l1_reg, l2=hp.l2_reg, activation=yadll.activations.relu)
# Dropout Layer 2
l_dro2 = yadll.layers.Dropout(incoming=l_hid1, corruption_level=0.25)
# Dense Layer 2
l_hid2 = yadll.layers.DenseLayer(incoming=l_dro2, n_units=500, W=yadll.init.glorot_uniform,
l1=hp.l1_reg, l2=hp.l2_reg, activation=yadll.activations.relu)
# Logistic regression Layer
l_out = yadll.layers.LogisticRegression(incoming=l_hid2, n_class=10, l1=hp.l1_reg, l2=hp.l2_reg)
# Create network and add layers
net = yadll.network.Network('2 layers mlp with dropout')
net.add(l_in)
net.add(l_dro1)
net.add(l_hid1)
net.add(l_dro2)
net.add(l_hid2)
net.add(l_out)
# add the network to the model
model.network = net
# add the hyperparameters to the model
model.hp = hp
return model
report = list()
for update, hyperparams in updates.items():
hps = get_hps()
for hyperparam in hyperparams:
hps(*hyperparam)
for hp in hps:
model = get_model(hp)
model.updates = getattr(yadll.updates, update)
model.train()
r = list()
r.append(update)
for hyperparam in hyperparams:
r.append(hyperparam[0])
r.append(hp.hp_value[hyperparam[0]])
r.append('epoch')
r.append(model.report['epoch'])
r.append('early_stop')
r.append(model.report['early_stop'])
r.append('best_validation')
r.append(round(model.report['best_validation'], 2))
r.append('best_iter')
r.append(model.report['best_iter'])
r.append('test_score')
r.append(round(model.report['test_score'], 2))
r.append('training_duration')
r.append(model.report['training_duration'])
report.append(r)
print(report)
with open('report', 'w') as f:
f.writelines(' '.join(str(e) for e in r))
| mit |
amir-qayyum-khan/edx-platform | openedx/core/djangoapps/theming/template_loaders.py | 23 | 2625 | """
Theming aware template loaders.
"""
from django.utils._os import safe_join
from django.core.exceptions import SuspiciousFileOperation
from django.template.loaders.filesystem import Loader as FilesystemLoader
from edxmako.makoloader import MakoLoader
from openedx.core.djangoapps.theming.helpers import get_current_request, \
get_current_theme, get_all_theme_template_dirs
class ThemeTemplateLoader(MakoLoader):
"""
Filesystem Template loaders to pickup templates from theme directory based on the current site.
"""
is_usable = True
_accepts_engine_in_init = True
def __init__(self, *args):
MakoLoader.__init__(self, ThemeFilesystemLoader(*args))
class ThemeFilesystemLoader(FilesystemLoader):
"""
Filesystem Template loaders to pickup templates from theme directory based on the current site.
"""
is_usable = True
_accepts_engine_in_init = True
def get_template_sources(self, template_name, template_dirs=None):
"""
Returns the absolute paths to "template_name", when appended to each
directory in "template_dirs". Any paths that don't lie inside one of the
template dirs are excluded from the result set, for security reasons.
"""
if not template_dirs:
template_dirs = self.engine.dirs
theme_dirs = self.get_theme_template_sources()
# append theme dirs to the beginning so templates are looked up inside theme dir first
if isinstance(theme_dirs, list):
template_dirs = theme_dirs + template_dirs
for template_dir in template_dirs:
try:
yield safe_join(template_dir, template_name)
except SuspiciousFileOperation:
# The joined path was located outside of this template_dir
# (it might be inside another one, so this isn't fatal).
pass
@staticmethod
def get_theme_template_sources():
"""
Return template sources for the given theme and if request object is None (this would be the case for
management commands) return template sources for all themes.
"""
if not get_current_request():
# if request object is not present, then this method is being called inside a management
# command and return all theme template sources for compression
return get_all_theme_template_dirs()
else:
# template is being accessed by a view, so return templates sources for current theme
theme = get_current_theme()
return theme and theme.template_dirs
| agpl-3.0 |
sysadminmatmoz/odoo-clearcorp | TODO-6.1/ccorp_account/report/invoice.py | 4 | 2611 | # -*- encoding: utf-8 -*-
##############################################################################
#
# invoice.py
# ccorp_account
# First author: Carlos Vásquez <carlos.vasquez@clearcorp.co.cr> (ClearCorp S.A.)
# Second author: Mag Guevara <mag.guevara@clearcorp.co.cr> (ClearCorp S.A.)
# Copyright (c) 2010-TODAY ClearCorp S.A. (http://clearcorp.co.cr). All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are
# permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of
# conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list
# of conditions and the following disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY <COPYRIGHT HOLDER> ``AS IS'' AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are those of the
# authors and should not be interpreted as representing official policies, either expressed
# or implied, of ClearCorp S.A..
#
##############################################################################
import time
import pooler
from report import report_sxw
import locale
class account_invoice_ccorp(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(account_invoice_ccorp, self).__init__(cr, uid, name, context=context)
self.localcontext.update({
'time': time,
'cr' : cr,
'uid': uid,
})
report_sxw.report_sxw(
'report.account.invoice.layout_ccorp',
'account.invoice',
'addons/ccorp_account/report/invoice.mako',
parser=account_invoice_ccorp)
| agpl-3.0 |
agaurav/ansible | test/units/plugins/test_plugins.py | 137 | 2968 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible.compat.tests import unittest
from ansible.compat.tests import BUILTINS
from ansible.compat.tests.mock import mock_open, patch, MagicMock
from ansible.plugins import MODULE_CACHE, PATH_CACHE, PLUGIN_PATH_CACHE, _basedirs, push_basedir, PluginLoader
class TestErrors(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
@patch.object(PluginLoader, '_get_paths')
def test_print_paths(self, mock_method):
mock_method.return_value = ['/path/one', '/path/two', '/path/three']
pl = PluginLoader('foo', 'foo', '', 'test_plugins')
paths = pl.print_paths()
expected_paths = os.pathsep.join(['/path/one', '/path/two', '/path/three'])
self.assertEqual(paths, expected_paths)
def test_plugins__get_package_paths_no_package(self):
pl = PluginLoader('test', '', 'test', 'test_plugin')
self.assertEqual(pl._get_package_paths(), [])
def test_plugins__get_package_paths_with_package(self):
# the _get_package_paths() call uses __import__ to load a
# python library, and then uses the __file__ attribute of
# the result for that to get the library path, so we mock
# that here and patch the builtin to use our mocked result
m = MagicMock()
m.return_value.__file__ = '/path/to/my/test.py'
pl = PluginLoader('test', 'foo.bar.bam', 'test', 'test_plugin')
with patch('{0}.__import__'.format(BUILTINS), m):
self.assertEqual(pl._get_package_paths(), ['/path/to/my/bar/bam'])
def test_plugins__get_paths(self):
pl = PluginLoader('test', '', 'test', 'test_plugin')
pl._paths = ['/path/one', '/path/two']
self.assertEqual(pl._get_paths(), ['/path/one', '/path/two'])
# NOT YET WORKING
#def fake_glob(path):
# if path == 'test/*':
# return ['test/foo', 'test/bar', 'test/bam']
# elif path == 'test/*/*'
#m._paths = None
#mock_glob = MagicMock()
#mock_glob.return_value = []
#with patch('glob.glob', mock_glob):
# pass
| gpl-3.0 |
40323226/26 | users/a/g4/ag4_40323138_task3abcd.py | 8 | 11219 | # 各組分別在各自的 .py 程式中建立應用程式 (第1步/總共3步)
from flask import Blueprint, render_template
# 利用 Blueprint建立 ag1, 並且 url 前綴為 /ag1, 並設定 template 存放目錄
ag4_40323138task3 = Blueprint('ag4_40323138task3', __name__, url_prefix='/ag4_40323138task3', template_folder='templates')
# 展示傳回 Brython 程式
@ag4_40323138task3.route('/task3')
def task1():
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>網際 2D 繪圖</title>
<!-- IE 9: display inline SVG -->
<meta http-equiv="X-UA-Compatible" content="IE=9">
<script type="text/javascript" src="http://brython.info/src/brython_dist.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/Cango-8v03.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/Cango2D-6v13.js"></script>
<script type="text/javascript" src="http://cptocadp-2015fallhw.rhcloud.com/static/CangoAxes-1v33.js"></script>
</head>
<body>
<script>
window.onload=function(){
brython(1);
}
</script>
<canvas id="plotarea" width="600" height="800"></canvas>
<script type="text/python">
from javascript import JSConstructor
from browser import window
import math
cango = JSConstructor(window.Cango)
cobj = JSConstructor(window.Cobj)
shapedefs = window.shapeDefs
obj2d = JSConstructor(window.Obj2D)
cgo = cango("plotarea")
cgo.setWorldCoords(-250, -250, 500, 500)
# 決定要不要畫座標軸線
cgo.drawAxes(0, 240, 0, 240, {
"strokeColor":"#aaaaaa",
"fillColor": "#aaaaaa",
"xTickInterval": 20,
"xLabelInterval": 20,
"yTickInterval": 20,
"yLabelInterval": 20})
#cgo.drawText("使用 Cango 繪圖程式庫!", 0, 0, {"fontSize":60, "fontWeight": 1200, "lorg":5 })
deg = math.pi/180
def O(x, y, rx, ry, rot, color, border, linewidth):
# 旋轉必須要針對相對中心 rot not working yet
chamber = "M -6.8397, -1.4894 \
A 7, 7, 0, 1, 0, 6.8397, -1.4894 \
A 40, 40, 0, 0, 1, 6.8397, -18.511 \
A 7, 7, 0, 1, 0, -6.8397, -18.511 \
A 40, 40, 0, 0, 1, -6.8397, -1.4894 z"
cgoChamber = window.svgToCgoSVG(chamber)
cmbr = cobj(cgoChamber, "SHAPE", {
"fillColor": color,
"border": border,
"strokeColor": "tan",
"lineWidth": linewidth })
# 複製 cmbr, 然後命名為 basic1
basic1 = cmbr.dup()
basic1.rotate(0)
basic1.translate(0, 0)
basic2 = cmbr.dup()
basic2.rotate(0)
basic2.translate(0, 20)
basic3 = cmbr.dup()
basic3.rotate(0)
basic3.translate(0, 40)
basic4 = cmbr.dup()
basic4.rotate(0)
basic4.translate(0, 60)
basic5 = cmbr.dup()
basic5.rotate(90)
basic5.translate(0, 60)
basic6 = cmbr.dup()
basic6.rotate(60)
basic6.translate(20, 60)
basic7 = cmbr.dup()
basic7.rotate(90)
basic7.translate(0, 20)
basic8 = cmbr.dup()
basic8.rotate(120)
basic8.translate(20, 20)
basic9 = cmbr.dup()
basic9.rotate(60)
basic9.translate(20, 20)
basic10 = cmbr.dup()
basic10.rotate(90)
basic10.translate(0, -20)
basic11 = cmbr.dup()
basic11.rotate(120)
basic11.translate(20, -20)
basic12 = cmbr.dup()
basic12.rotate(180)
basic12.translate((20*math.cos(30*deg)+20), (20*math.sin(30*deg)-20))
basic13 = cmbr.dup()
basic13.rotate(180)
basic13.translate((20*math.cos(30*deg)+20), (20*math.sin(30*deg)+20))
cmbr.appendPath(basic12)
cmbr.appendPath(basic13)
cmbr.appendPath(basic10)
cmbr.appendPath(basic11)
cmbr.appendPath(basic8)
cmbr.appendPath(basic9)
cmbr.appendPath(basic6)
cmbr.appendPath(basic7)
cmbr.appendPath(basic4)
cmbr.appendPath(basic5)
cmbr.appendPath(basic1)
cmbr.appendPath(basic2)
cmbr.appendPath(basic3)
# 表示放大 3 倍
#cgo.render(cmbr, x, y, 3, rot)
# 放大 5 倍
cgo.render(cmbr, x, y, 1, rot)
O(0, 20, 0, 0, 0, "green", True, 4)
cgo.setWorldCoords(-250, -250, 500, 500)
# 決定要不要畫座標軸線
cgo.drawAxes(0, 240, 0, 240, {
"strokeColor":"#aaaaaa",
"fillColor": "#aaaaaa",
"xTickInterval": 20,
"xLabelInterval": 20,
"yTickInterval": 20,
"yLabelInterval": 20})
#cgo.drawText("使用 Cango 繪圖程式庫!", 0, 0, {"fontSize":60, "fontWeight": 1200, "lorg":5 })
deg = math.pi/180
def O(x, y, rx, ry, rot, color, border, linewidth):
# 旋轉必須要針對相對中心 rot not working yet
chamber = "M -6.8397, -1.4894 \
A 7, 7, 0, 1, 0, 6.8397, -1.4894 \
A 40, 40, 0, 0, 1, 6.8397, -18.511 \
A 7, 7, 0, 1, 0, -6.8397, -18.511 \
A 40, 40, 0, 0, 1, -6.8397, -1.4894 z"
cgoChamber = window.svgToCgoSVG(chamber)
cmbr = cobj(cgoChamber, "SHAPE", {
"fillColor": color,
"border": border,
"strokeColor": "tan",
"lineWidth": linewidth })
# 複製 cmbr, 然後命名為 basic1
basic1 = cmbr.dup()
basic1.rotate(180)
basic2 = cmbr.dup()
basic2.rotate(90)
basic2.translate(0, 0)
basic3 = cmbr.dup()
basic3.rotate(90)
basic3.translate(20, 0)
basic4 = cmbr.dup()
basic4.rotate(0)
basic4.translate(40, 0)
basic5 = cmbr.dup()
basic5.rotate(0)
basic5.translate(40, 20)
basic6 = cmbr.dup()
basic6.rotate(150)
basic6.translate(0, 40)
basic7 = cmbr.dup()
basic7.rotate(210)
basic7.translate(40, 40)
basic8 = cmbr.dup()
basic8.rotate(90)
basic8.translate(20*math.cos(60*deg), (20*math.sin(60*deg)+40))
basic9 = cmbr.dup()
basic9.rotate(0)
basic9.translate(0, 40)
basic11 = cmbr.dup()
basic11.rotate(0)
basic11.translate(40, 40)
cmbr.appendPath(basic1)
cmbr.appendPath(basic2)
cmbr.appendPath(basic3)
cmbr.appendPath(basic4)
cmbr.appendPath(basic5)
cmbr.appendPath(basic6)
cmbr.appendPath(basic7)
cmbr.appendPath(basic8)
cmbr.appendPath(basic9)
cmbr.appendPath(basic11)
# hole 為原點位置
hole = cobj(shapedefs.circle(4), "PATH")
cmbr.appendPath(hole)
# 表示放大 3 倍
#cgo.render(cmbr, x, y, 3, rot)
# 放大 5 倍
cgo.render(cmbr, x, y, 1, rot)
O(0, 110, 0, 0, 0, "green", True, 4)
cgo.setWorldCoords(-250, -250, 500, 500)
# 決定要不要畫座標軸線
cgo.drawAxes(0, 240, 0, 240, {
"strokeColor":"#aaaaaa",
"fillColor": "#aaaaaa",
"xTickInterval": 20,
"xLabelInterval": 20,
"yTickInterval": 20,
"yLabelInterval": 20})
#cgo.drawText("使用 Cango 繪圖程式庫!", 0, 0, {"fontSize":60, "fontWeight": 1200, "lorg":5 })
deg = math.pi/180
def O(x, y, rx, ry, rot, color, border, linewidth):
# 旋轉必須要針對相對中心 rot not working yet
chamber = "M -6.8397, -1.4894 \
A -7,- 7, 0, 1, 0, 6.8397, -1.4894 \
A 40, 40, 0, 0, 1, 6.8397, -18.511 \
A -7, -7, 0, 1, 0, -6.8397, -18.511 \
A 40, 40, 0, 0, 1, -6.8397, -1.4894 z"
cgoChamber = window.svgToCgoSVG(chamber)
cmbr = cobj(cgoChamber, "SHAPE", {
"fillColor": color,
"border": border,
"strokeColor": "tan",
"lineWidth": linewidth })
# 複製 cmbr, 然後命名為 basic1
basic1 = cmbr.dup()
basic1.rotate(30)
basic1.translate(0, -20)
basic2 = cmbr.dup()
basic2.rotate(0)
basic2.translate(0, 0)
basic3 = cmbr.dup()
basic3.rotate(0)
basic3.translate(0, 20)
basic4 = cmbr.dup()
basic4.rotate(150)
basic4.translate(0, 20)
basic5 = cmbr.dup()
basic5.rotate(90)
basic5.translate((20*math.cos(60*deg)+0), (20*math.sin(60*deg)+20))
basic6 = cmbr.dup()
basic6.rotate(90)
basic6.translate((20*math.cos(60*deg)+0), -20*math.sin(60*deg)-20)
basic7 = cmbr.dup()
basic7.rotate(210)
basic7.translate(40, 20)
basic8 = cmbr.dup()
basic8.rotate(330)
basic8.translate(40, -20)
cmbr.appendPath(basic7)
cmbr.appendPath(basic8)
cmbr.appendPath(basic6)
cmbr.appendPath(basic5)
cmbr.appendPath(basic4)
cmbr.appendPath(basic3)
cmbr.appendPath(basic1)
cmbr.appendPath(basic2)
cmbr.appendPath(basic3)
# 表示放大 3 倍
#cgo.render(cmbr, x, y, 3, rot)
# 放大 5 倍
cgo.render(cmbr, x, y, 1, rot)
O(0, -60, 0, 0, 0, "green", True, 4)
cgo.setWorldCoords(-250, -250, 500, 500)
# 決定要不要畫座標軸線
cgo.drawAxes(0, 240, 0, 240, {
"strokeColor":"#aaaaaa",
"fillColor": "#aaaaaa",
"xTickInterval": 20,
"xLabelInterval": 20,
"yTickInterval": 20,
"yLabelInterval": 20})
#cgo.drawText("使用 Cango 繪圖程式庫!", 0, 0, {"fontSize":60, "fontWeight": 1200, "lorg":5 })
deg = math.pi/180
def O(x, y, rx, ry, rot, color, border, linewidth):
# 旋轉必須要針對相對中心 rot not working yet
chamber = "M -6.8397, -1.4894 \
A -7,- 7, 0, 1, 0, 6.8397, -1.4894 \
A 40, 40, 0, 0, 1, 6.8397, -18.511 \
A -7, -7, 0, 1, 0, -6.8397, -18.511 \
A 40, 40, 0, 0, 1, -6.8397, -1.4894 z"
cgoChamber = window.svgToCgoSVG(chamber)
cmbr = cobj(cgoChamber, "SHAPE", {
"fillColor": color,
"border": border,
"strokeColor": "tan",
"lineWidth": linewidth })
# 複製 cmbr, 然後命名為 basic1
basic1 = cmbr.dup()
basic1.rotate(0)
basic1.translate(0, 0)
basic2 = cmbr.dup()
basic2.rotate(0)
basic2.translate(0, 20)
basic3 = cmbr.dup()
basic3.rotate(0)
basic3.translate(0, 40)
basic4 = cmbr.dup()
basic4.rotate(0)
basic4.translate(0, 60)
basic5 = cmbr.dup()
basic5.rotate(90)
basic5.translate(0, 60)
basic6 = cmbr.dup()
basic6.rotate(90)
basic6.translate(0, -20)
basic7 = cmbr.dup()
basic7.rotate(60)
basic7.translate(20, 60)
basic8 = cmbr.dup()
basic8.rotate(120)
basic8.translate(20, -20)
basic9 = cmbr.dup()
basic9.rotate(180)
basic9.translate((20*math.cos(30*deg)+20), (20*math.sin(30*deg)-20))
basic10 = cmbr.dup()
basic10.rotate(180)
basic10.translate((20*math.cos(30*deg)+20), 20*math.sin(30*deg))
basic11 = cmbr.dup()
basic11.rotate(180)
basic11.translate((20*math.cos(30*deg)+20), (20*math.sin(30*deg)+20))
cmbr.appendPath(basic11)
cmbr.appendPath(basic9)
cmbr.appendPath(basic10)
cmbr.appendPath(basic7)
cmbr.appendPath(basic8)
cmbr.appendPath(basic5)
cmbr.appendPath(basic6)
cmbr.appendPath(basic1)
cmbr.appendPath(basic2)
cmbr.appendPath(basic4)
cmbr.appendPath(basic3)
# 表示放大 3 倍
#cgo.render(cmbr, x, y, 3, rot)
# 放大 5 倍
cgo.render(cmbr, x, y, 1, rot)
O(0, -160, 0, 0, 0, "green", True, 4)
'''
return outstring | agpl-3.0 |
aseigneurin/ansible-modules-core | files/unarchive.py | 3 | 10575 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
# (c) 2013, Dylan Martin <dmartin@seattlecentral.edu>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: unarchive
version_added: 1.4
short_description: Copies an archive to a remote location and unpack it
extends_documentation_fragment: files
description:
- The M(unarchive) module copies an archive file from the local machine to a remote and unpacks it.
options:
src:
description:
- Local path to archive file to copy to the remote server; can be absolute or relative.
required: true
default: null
dest:
description:
- Remote absolute path where the archive should be unpacked
required: true
default: null
copy:
description:
- "if true, the file is copied from the 'master' to the target machine, otherwise, the plugin will look for src archive at the target machine."
required: false
choices: [ "yes", "no" ]
default: "yes"
creates:
description:
- a filename, when it already exists, this step will B(not) be run.
required: no
default: null
version_added: "1.6"
author: Dylan Martin
todo:
- detect changed/unchanged for .zip files
- handle common unarchive args, like preserve owner/timestamp etc...
notes:
- requires C(tar)/C(unzip) command on target host
- can handle I(gzip), I(bzip2) and I(xz) compressed as well as uncompressed tar files
- detects type of archive automatically
- uses tar's C(--diff arg) to calculate if changed or not. If this C(arg) is not
supported, it will always unpack the archive
- does not detect if a .zip file is different from destination - always unzips
- existing files/directories in the destination which are not in the archive
are not touched. This is the same behavior as a normal archive extraction
- existing files/directories in the destination which are not in the archive
are ignored for purposes of deciding if the archive should be unpacked or not
'''
EXAMPLES = '''
# Example from Ansible Playbooks
- unarchive: src=foo.tgz dest=/var/lib/foo
# Unarchive a file that is already on the remote machine
- unarchive: src=/tmp/foo.zip dest=/usr/local/bin copy=no
'''
import os
from zipfile import ZipFile
class UnarchiveError(Exception):
pass
# class to handle .zip files
class ZipArchive(object):
def __init__(self, src, dest, module):
self.src = src
self.dest = dest
self.module = module
self.cmd_path = self.module.get_bin_path('unzip')
self._files_in_archive = []
@property
def files_in_archive(self, force_refresh=False):
if self._files_in_archive and not force_refresh:
return self._files_in_archive
archive = ZipFile(self.src)
try:
self._files_in_archive = archive.namelist()
except:
raise UnarchiveError('Unable to list files in the archive')
return self._files_in_archive
def is_unarchived(self, mode, owner, group):
return dict(unarchived=False)
def unarchive(self):
cmd = '%s -o "%s" -d "%s"' % (self.cmd_path, self.src, self.dest)
rc, out, err = self.module.run_command(cmd)
return dict(cmd=cmd, rc=rc, out=out, err=err)
def can_handle_archive(self):
if not self.cmd_path:
return False
cmd = '%s -l "%s"' % (self.cmd_path, self.src)
rc, out, err = self.module.run_command(cmd)
if rc == 0:
return True
return False
# class to handle gzipped tar files
class TgzArchive(object):
def __init__(self, src, dest, module):
self.src = src
self.dest = dest
self.module = module
# Prefer gtar (GNU tar) as it supports the compression options -zjJ
self.cmd_path = self.module.get_bin_path('gtar', None)
if not self.cmd_path:
# Fallback to tar
self.cmd_path = self.module.get_bin_path('tar')
self.zipflag = 'z'
self._files_in_archive = []
@property
def files_in_archive(self, force_refresh=False):
if self._files_in_archive and not force_refresh:
return self._files_in_archive
cmd = '%s -t%sf "%s"' % (self.cmd_path, self.zipflag, self.src)
rc, out, err = self.module.run_command(cmd)
if rc != 0:
raise UnarchiveError('Unable to list files in the archive')
for filename in out.splitlines():
if filename:
self._files_in_archive.append(filename)
return self._files_in_archive
def is_unarchived(self, mode, owner, group):
cmd = '%s -C "%s" --diff -%sf "%s"' % (self.cmd_path, self.dest, self.zipflag, self.src)
rc, out, err = self.module.run_command(cmd)
unarchived = (rc == 0)
if not unarchived:
# Check whether the differences are in something that we're
# setting anyway
# What will be set
to_be_set = set()
for perm in (('Mode', mode), ('Gid', group), ('Uid', owner)):
if perm[1] is not None:
to_be_set.add(perm[0])
# What is different
changes = set()
difference_re = re.compile(r': (.*) differs$')
for line in out.splitlines():
match = difference_re.search(line)
if not match:
# Unknown tar output. Assume we have changes
return dict(unarchived=unarchived, rc=rc, out=out, err=err, cmd=cmd)
changes.add(match.groups()[0])
if changes and changes.issubset(to_be_set):
unarchived = True
return dict(unarchived=unarchived, rc=rc, out=out, err=err, cmd=cmd)
def unarchive(self):
cmd = '%s -x%sf "%s"' % (self.cmd_path, self.zipflag, self.src)
rc, out, err = self.module.run_command(cmd, cwd=self.dest)
return dict(cmd=cmd, rc=rc, out=out, err=err)
def can_handle_archive(self):
if not self.cmd_path:
return False
try:
if self.files_in_archive:
return True
except UnarchiveError:
pass
# Errors and no files in archive assume that we weren't able to
# properly unarchive it
return False
# class to handle tar files that aren't compressed
class TarArchive(TgzArchive):
def __init__(self, src, dest, module):
super(TarArchive, self).__init__(src, dest, module)
self.zipflag = ''
# class to handle bzip2 compressed tar files
class TarBzipArchive(TgzArchive):
def __init__(self, src, dest, module):
super(TarBzipArchive, self).__init__(src, dest, module)
self.zipflag = 'j'
# class to handle xz compressed tar files
class TarXzArchive(TgzArchive):
def __init__(self, src, dest, module):
super(TarXzArchive, self).__init__(src, dest, module)
self.zipflag = 'J'
# try handlers in order and return the one that works or bail if none work
def pick_handler(src, dest, module):
handlers = [TgzArchive, ZipArchive, TarArchive, TarBzipArchive, TarXzArchive]
for handler in handlers:
obj = handler(src, dest, module)
if obj.can_handle_archive():
return obj
module.fail_json(msg='Failed to find handler to unarchive. Make sure the required command to extract the file is installed.')
def main():
module = AnsibleModule(
# not checking because of daisy chain to file module
argument_spec = dict(
src = dict(required=True),
original_basename = dict(required=False), # used to handle 'dest is a directory' via template, a slight hack
dest = dict(required=True),
copy = dict(default=True, type='bool'),
creates = dict(required=False),
),
add_file_common_args=True,
)
src = os.path.expanduser(module.params['src'])
dest = os.path.expanduser(module.params['dest'])
copy = module.params['copy']
file_args = module.load_file_common_arguments(module.params)
# did tar file arrive?
if not os.path.exists(src):
if copy:
module.fail_json(msg="Source '%s' failed to transfer" % src)
else:
module.fail_json(msg="Source '%s' does not exist" % src)
if not os.access(src, os.R_OK):
module.fail_json(msg="Source '%s' not readable" % src)
# is dest OK to receive tar file?
if not os.path.isdir(dest):
module.fail_json(msg="Destination '%s' is not a directory" % dest)
if not os.access(dest, os.W_OK):
module.fail_json(msg="Destination '%s' not writable" % dest)
handler = pick_handler(src, dest, module)
res_args = dict(handler=handler.__class__.__name__, dest=dest, src=src)
# do we need to do unpack?
res_args['check_results'] = handler.is_unarchived(file_args['mode'],
file_args['owner'], file_args['group'])
if res_args['check_results']['unarchived']:
res_args['changed'] = False
else:
# do the unpack
try:
res_args['extract_results'] = handler.unarchive()
if res_args['extract_results']['rc'] != 0:
module.fail_json(msg="failed to unpack %s to %s" % (src, dest), **res_args)
except IOError:
module.fail_json(msg="failed to unpack %s to %s" % (src, dest))
else:
res_args['changed'] = True
# do we need to change perms?
for filename in handler.files_in_archive:
file_args['path'] = os.path.join(dest, filename)
res_args['changed'] = module.set_fs_attributes_if_different(file_args, res_args['changed'])
module.exit_json(**res_args)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 |
sestrella/ansible | lib/ansible/modules/network/meraki/meraki_mx_l3_firewall.py | 24 | 11602 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Kevin Breit (@kbreit) <kevin.breit@kevinbreit.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: meraki_mx_l3_firewall
short_description: Manage MX appliance layer 3 firewalls in the Meraki cloud
version_added: "2.7"
description:
- Allows for creation, management, and visibility into layer 3 firewalls implemented on Meraki MX firewalls.
notes:
- Module assumes a complete list of firewall rules are passed as a parameter.
- If there is interest in this module allowing manipulation of a single firewall rule, please submit an issue against this module.
options:
state:
description:
- Create or modify an organization.
choices: ['present', 'query']
default: present
net_name:
description:
- Name of network which MX firewall is in.
net_id:
description:
- ID of network which MX firewall is in.
rules:
description:
- List of firewall rules.
suboptions:
policy:
description:
- Policy to apply if rule is hit.
choices: [allow, deny]
protocol:
description:
- Protocol to match against.
choices: [any, icmp, tcp, udp]
dest_port:
description:
- Comma separated list of destination port numbers to match against.
dest_cidr:
description:
- Comma separated list of CIDR notation destination networks.
src_port:
description:
- Comma separated list of source port numbers to match against.
src_cidr:
description:
- Comma separated list of CIDR notation source networks.
comment:
description:
- Optional comment to describe the firewall rule.
syslog_enabled:
description:
- Whether to log hints against the firewall rule.
- Only applicable if a syslog server is specified against the network.
syslog_default_rule:
description:
- Whether to log hits against the default firewall rule.
- Only applicable if a syslog server is specified against the network.
- This is not shown in response from Meraki. Instead, refer to the C(syslog_enabled) value in the default rule.
type: bool
default: no
author:
- Kevin Breit (@kbreit)
extends_documentation_fragment: meraki
'''
EXAMPLES = r'''
- name: Query firewall rules
meraki_mx_l3_firewall:
auth_key: abc123
org_name: YourOrg
net_name: YourNet
state: query
delegate_to: localhost
- name: Set two firewall rules
meraki_mx_l3_firewall:
auth_key: abc123
org_name: YourOrg
net_name: YourNet
state: present
rules:
- comment: Block traffic to server
src_cidr: 192.0.1.0/24
src_port: any
dest_cidr: 192.0.2.2/32
dest_port: any
protocol: any
policy: deny
- comment: Allow traffic to group of servers
src_cidr: 192.0.1.0/24
src_port: any
dest_cidr: 192.0.2.0/24
dest_port: any
protocol: any
policy: permit
delegate_to: localhost
- name: Set one firewall rule and enable logging of the default rule
meraki_mx_l3_firewall:
auth_key: abc123
org_name: YourOrg
net_name: YourNet
state: present
rules:
- comment: Block traffic to server
src_cidr: 192.0.1.0/24
src_port: any
dest_cidr: 192.0.2.2/32
dest_port: any
protocol: any
policy: deny
syslog_default_rule: yes
delegate_to: localhost
'''
RETURN = r'''
data:
description: Firewall rules associated to network.
returned: success
type: complex
contains:
comment:
description: Comment to describe the firewall rule.
returned: always
type: str
sample: Block traffic to server
src_cidr:
description: Comma separated list of CIDR notation source networks.
returned: always
type: str
sample: 192.0.1.1/32,192.0.1.2/32
src_port:
description: Comma separated list of source ports.
returned: always
type: str
sample: 80,443
dest_cidr:
description: Comma separated list of CIDR notation destination networks.
returned: always
type: str
sample: 192.0.1.1/32,192.0.1.2/32
dest_port:
description: Comma separated list of destination ports.
returned: always
type: str
sample: 80,443
protocol:
description: Network protocol for which to match against.
returned: always
type: str
sample: tcp
policy:
description: Action to take when rule is matched.
returned: always
type: str
syslog_enabled:
description: Whether to log to syslog when rule is matched.
returned: always
type: bool
sample: true
'''
import os
from ansible.module_utils.basic import AnsibleModule, json, env_fallback
from ansible.module_utils.urls import fetch_url
from ansible.module_utils._text import to_native
from ansible.module_utils.network.meraki.meraki import MerakiModule, meraki_argument_spec
def assemble_payload(meraki):
params_map = {'policy': 'policy',
'protocol': 'protocol',
'dest_port': 'destPort',
'dest_cidr': 'destCidr',
'src_port': 'srcPort',
'src_cidr': 'srcCidr',
'syslog_enabled': 'syslogEnabled',
'comment': 'comment',
}
rules = []
for rule in meraki.params['rules']:
proposed_rule = dict()
for k, v in rule.items():
proposed_rule[params_map[k]] = v
rules.append(proposed_rule)
payload = {'rules': rules}
return payload
def get_rules(meraki, net_id):
path = meraki.construct_path('get_all', net_id=net_id)
response = meraki.request(path, method='GET')
if meraki.status == 200:
return response
def main():
# define the available arguments/parameters that a user can pass to
# the module
fw_rules = dict(policy=dict(type='str', choices=['allow', 'deny']),
protocol=dict(type='str', choices=['tcp', 'udp', 'icmp', 'any']),
dest_port=dict(type='str'),
dest_cidr=dict(type='str'),
src_port=dict(type='str'),
src_cidr=dict(type='str'),
comment=dict(type='str'),
syslog_enabled=dict(type='bool', default=False),
)
argument_spec = meraki_argument_spec()
argument_spec.update(state=dict(type='str', choices=['present', 'query'], default='present'),
net_name=dict(type='str'),
net_id=dict(type='str'),
rules=dict(type='list', default=None, elements='dict', options=fw_rules),
syslog_default_rule=dict(type='bool'),
)
# seed the result dict in the object
# we primarily care about changed and state
# change is if this module effectively modified the target
# state will include any data that you want your module to pass back
# for consumption, for example, in a subsequent task
result = dict(
changed=False,
)
# the AnsibleModule object will be our abstraction working with Ansible
# this includes instantiation, a couple of common attr would be the
# args/params passed to the execution, as well as if the module
# supports check mode
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True,
)
meraki = MerakiModule(module, function='mx_l3_firewall')
meraki.params['follow_redirects'] = 'all'
query_urls = {'mx_l3_firewall': '/networks/{net_id}/l3FirewallRules/'}
update_urls = {'mx_l3_firewall': '/networks/{net_id}/l3FirewallRules/'}
meraki.url_catalog['get_all'].update(query_urls)
meraki.url_catalog['update'] = update_urls
payload = None
# if the user is working with this module in only check mode we do not
# want to make any changes to the environment, just return the current
# state with no modifications
# FIXME: Work with Meraki so they can implement a check mode
if module.check_mode:
meraki.exit_json(**meraki.result)
# execute checks for argument completeness
# manipulate or modify the state as needed (this is going to be the
# part where your module will do what it needs to do)
org_id = meraki.params['org_id']
orgs = None
if org_id is None:
orgs = meraki.get_orgs()
for org in orgs:
if org['name'] == meraki.params['org_name']:
org_id = org['id']
net_id = meraki.params['net_id']
if net_id is None:
if orgs is None:
orgs = meraki.get_orgs()
net_id = meraki.get_net_id(net_name=meraki.params['net_name'],
data=meraki.get_nets(org_id=org_id))
if meraki.params['state'] == 'query':
meraki.result['data'] = get_rules(meraki, net_id)
elif meraki.params['state'] == 'present':
rules = get_rules(meraki, net_id)
path = meraki.construct_path('get_all', net_id=net_id)
if meraki.params['rules']:
payload = assemble_payload(meraki)
else:
payload = dict()
update = False
if meraki.params['syslog_default_rule'] is not None:
payload['syslogDefaultRule'] = meraki.params['syslog_default_rule']
try:
if len(rules) - 1 != len(payload['rules']): # Quick and simple check to avoid more processing
update = True
if meraki.params['syslog_default_rule'] is not None:
if rules[len(rules) - 1]['syslogEnabled'] != meraki.params['syslog_default_rule']:
update = True
if update is False:
default_rule = rules[len(rules) - 1].copy()
del rules[len(rules) - 1] # Remove default rule for comparison
for r in range(len(rules) - 1):
if meraki.is_update_required(rules[r], payload['rules'][r]) is True:
update = True
rules.append(default_rule)
except KeyError:
pass
if update is True:
response = meraki.request(path, method='PUT', payload=json.dumps(payload))
if meraki.status == 200:
meraki.result['data'] = response
meraki.result['changed'] = True
else:
meraki.result['data'] = rules
# in the event of a successful module execution, you will want to
# simple AnsibleModule.exit_json(), passing the key/value results
meraki.exit_json(**meraki.result)
if __name__ == '__main__':
main()
| gpl-3.0 |
ravigadila/django-init | common_func/forms.py | 1 | 1506 | import os
from django import forms
from django.contrib.auth import authenticate
from django.core.exceptions import ValidationError
from django.contrib.auth.forms import PasswordResetForm, AuthenticationForm
from common_func.models import User
class RegisterForm(forms.ModelForm):
"""Registration form for new users
"""
confirm_password = forms.CharField(widget=forms.PasswordInput)
class Meta:
model = User
fields = ["first_name", "last_name", "email", "password", "confirm_password"]
def clean_confirm_password(self):
"""Check for password and confirm_password matching
"""
password = self.cleaned_data.get("password")
confirm_password = self.cleaned_data.get("confirm_password")
if password != confirm_password:
raise forms.ValidationError("Passwords doesn't match")
return confirm_password
class LoginForm(AuthenticationForm):
"""Login Form with email and password
"""
pass
class ProfileForm(forms.ModelForm):
"""User Profile edit form
"""
class Meta:
model = User
fields = ["first_name", "last_name", "date_of_birth"]
class PasswordResetEmailForm(PasswordResetForm):
def clean_email(self):
email = self.cleaned_data.get('email')
if not User.objects.filter(email__iexact=email, is_active=True).exists():
raise ValidationError("Error: There is no user registered with the specified email address!")
return email | gpl-3.0 |
varunagrawal/azure-services | varunagrawal/site-packages/django/core/management/commands/shell.py | 92 | 3142 | import os
from django.core.management.base import NoArgsCommand
from optparse import make_option
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option('--plain', action='store_true', dest='plain',
help='Tells Django to use plain Python, not IPython.'),
)
help = "Runs a Python interactive interpreter. Tries to use IPython, if it's available."
shells = ['ipython', 'bpython']
requires_model_validation = False
def ipython(self):
try:
from IPython import embed
embed()
except ImportError:
# IPython < 0.11
# Explicitly pass an empty list as arguments, because otherwise
# IPython would use sys.argv from this script.
try:
from IPython.Shell import IPShell
shell = IPShell(argv=[])
shell.mainloop()
except ImportError:
# IPython not found at all, raise ImportError
raise
def bpython(self):
import bpython
bpython.embed()
def run_shell(self):
for shell in self.shells:
try:
return getattr(self, shell)()
except ImportError:
pass
raise ImportError
def handle_noargs(self, **options):
# XXX: (Temporary) workaround for ticket #1796: force early loading of all
# models from installed apps.
from django.db.models.loading import get_models
get_models()
use_plain = options.get('plain', False)
try:
if use_plain:
# Don't bother loading IPython, because the user wants plain Python.
raise ImportError
self.run_shell()
except ImportError:
import code
# Set up a dictionary to serve as the environment for the shell, so
# that tab completion works on objects that are imported at runtime.
# See ticket 5082.
imported_objects = {}
try: # Try activating rlcompleter, because it's handy.
import readline
except ImportError:
pass
else:
# We don't have to wrap the following import in a 'try', because
# we already know 'readline' was imported successfully.
import rlcompleter
readline.set_completer(rlcompleter.Completer(imported_objects).complete)
readline.parse_and_bind("tab:complete")
# We want to honor both $PYTHONSTARTUP and .pythonrc.py, so follow system
# conventions and get $PYTHONSTARTUP first then import user.
if not use_plain:
pythonrc = os.environ.get("PYTHONSTARTUP")
if pythonrc and os.path.isfile(pythonrc):
try:
execfile(pythonrc)
except NameError:
pass
# This will import .pythonrc.py as a side-effect
import user
code.interact(local=imported_objects)
| gpl-2.0 |
pwoodworth/intellij-community | python/lib/Lib/site-packages/django/utils/unittest/loader.py | 353 | 13437 | """Loading unittests."""
import os
import re
import sys
import traceback
import types
import unittest
from fnmatch import fnmatch
from django.utils.unittest import case, suite
try:
from os.path import relpath
except ImportError:
from django.utils.unittest.compatibility import relpath
__unittest = True
def _CmpToKey(mycmp):
'Convert a cmp= function into a key= function'
class K(object):
def __init__(self, obj):
self.obj = obj
def __lt__(self, other):
return mycmp(self.obj, other.obj) == -1
return K
# what about .pyc or .pyo (etc)
# we would need to avoid loading the same tests multiple times
# from '.py', '.pyc' *and* '.pyo'
VALID_MODULE_NAME = re.compile(r'[_a-z]\w*\.py$', re.IGNORECASE)
def _make_failed_import_test(name, suiteClass):
message = 'Failed to import test module: %s' % name
if hasattr(traceback, 'format_exc'):
# Python 2.3 compatibility
# format_exc returns two frames of discover.py as well
message += '\n%s' % traceback.format_exc()
return _make_failed_test('ModuleImportFailure', name, ImportError(message),
suiteClass)
def _make_failed_load_tests(name, exception, suiteClass):
return _make_failed_test('LoadTestsFailure', name, exception, suiteClass)
def _make_failed_test(classname, methodname, exception, suiteClass):
def testFailure(self):
raise exception
attrs = {methodname: testFailure}
TestClass = type(classname, (case.TestCase,), attrs)
return suiteClass((TestClass(methodname),))
class TestLoader(unittest.TestLoader):
"""
This class is responsible for loading tests according to various criteria
and returning them wrapped in a TestSuite
"""
testMethodPrefix = 'test'
sortTestMethodsUsing = cmp
suiteClass = suite.TestSuite
_top_level_dir = None
def loadTestsFromTestCase(self, testCaseClass):
"""Return a suite of all tests cases contained in testCaseClass"""
if issubclass(testCaseClass, suite.TestSuite):
raise TypeError("Test cases should not be derived from TestSuite."
" Maybe you meant to derive from TestCase?")
testCaseNames = self.getTestCaseNames(testCaseClass)
if not testCaseNames and hasattr(testCaseClass, 'runTest'):
testCaseNames = ['runTest']
loaded_suite = self.suiteClass(map(testCaseClass, testCaseNames))
return loaded_suite
def loadTestsFromModule(self, module, use_load_tests=True):
"""Return a suite of all tests cases contained in the given module"""
tests = []
for name in dir(module):
obj = getattr(module, name)
if isinstance(obj, type) and issubclass(obj, unittest.TestCase):
tests.append(self.loadTestsFromTestCase(obj))
load_tests = getattr(module, 'load_tests', None)
tests = self.suiteClass(tests)
if use_load_tests and load_tests is not None:
try:
return load_tests(self, tests, None)
except Exception, e:
return _make_failed_load_tests(module.__name__, e,
self.suiteClass)
return tests
def loadTestsFromName(self, name, module=None):
"""Return a suite of all tests cases given a string specifier.
The name may resolve either to a module, a test case class, a
test method within a test case class, or a callable object which
returns a TestCase or TestSuite instance.
The method optionally resolves the names relative to a given module.
"""
parts = name.split('.')
if module is None:
parts_copy = parts[:]
while parts_copy:
try:
module = __import__('.'.join(parts_copy))
break
except ImportError:
del parts_copy[-1]
if not parts_copy:
raise
parts = parts[1:]
obj = module
for part in parts:
parent, obj = obj, getattr(obj, part)
if isinstance(obj, types.ModuleType):
return self.loadTestsFromModule(obj)
elif isinstance(obj, type) and issubclass(obj, unittest.TestCase):
return self.loadTestsFromTestCase(obj)
elif (isinstance(obj, types.UnboundMethodType) and
isinstance(parent, type) and
issubclass(parent, case.TestCase)):
return self.suiteClass([parent(obj.__name__)])
elif isinstance(obj, unittest.TestSuite):
return obj
elif hasattr(obj, '__call__'):
test = obj()
if isinstance(test, unittest.TestSuite):
return test
elif isinstance(test, unittest.TestCase):
return self.suiteClass([test])
else:
raise TypeError("calling %s returned %s, not a test" %
(obj, test))
else:
raise TypeError("don't know how to make test from: %s" % obj)
def loadTestsFromNames(self, names, module=None):
"""Return a suite of all tests cases found using the given sequence
of string specifiers. See 'loadTestsFromName()'.
"""
suites = [self.loadTestsFromName(name, module) for name in names]
return self.suiteClass(suites)
def getTestCaseNames(self, testCaseClass):
"""Return a sorted sequence of method names found within testCaseClass
"""
def isTestMethod(attrname, testCaseClass=testCaseClass,
prefix=self.testMethodPrefix):
return attrname.startswith(prefix) and \
hasattr(getattr(testCaseClass, attrname), '__call__')
testFnNames = filter(isTestMethod, dir(testCaseClass))
if self.sortTestMethodsUsing:
testFnNames.sort(key=_CmpToKey(self.sortTestMethodsUsing))
return testFnNames
def discover(self, start_dir, pattern='test*.py', top_level_dir=None):
"""Find and return all test modules from the specified start
directory, recursing into subdirectories to find them. Only test files
that match the pattern will be loaded. (Using shell style pattern
matching.)
All test modules must be importable from the top level of the project.
If the start directory is not the top level directory then the top
level directory must be specified separately.
If a test package name (directory with '__init__.py') matches the
pattern then the package will be checked for a 'load_tests' function. If
this exists then it will be called with loader, tests, pattern.
If load_tests exists then discovery does *not* recurse into the package,
load_tests is responsible for loading all tests in the package.
The pattern is deliberately not stored as a loader attribute so that
packages can continue discovery themselves. top_level_dir is stored so
load_tests does not need to pass this argument in to loader.discover().
"""
set_implicit_top = False
if top_level_dir is None and self._top_level_dir is not None:
# make top_level_dir optional if called from load_tests in a package
top_level_dir = self._top_level_dir
elif top_level_dir is None:
set_implicit_top = True
top_level_dir = start_dir
top_level_dir = os.path.abspath(top_level_dir)
if not top_level_dir in sys.path:
# all test modules must be importable from the top level directory
# should we *unconditionally* put the start directory in first
# in sys.path to minimise likelihood of conflicts between installed
# modules and development versions?
sys.path.insert(0, top_level_dir)
self._top_level_dir = top_level_dir
is_not_importable = False
if os.path.isdir(os.path.abspath(start_dir)):
start_dir = os.path.abspath(start_dir)
if start_dir != top_level_dir:
is_not_importable = not os.path.isfile(os.path.join(start_dir, '__init__.py'))
else:
# support for discovery from dotted module names
try:
__import__(start_dir)
except ImportError:
is_not_importable = True
else:
the_module = sys.modules[start_dir]
top_part = start_dir.split('.')[0]
start_dir = os.path.abspath(os.path.dirname((the_module.__file__)))
if set_implicit_top:
self._top_level_dir = os.path.abspath(os.path.dirname(os.path.dirname(sys.modules[top_part].__file__)))
sys.path.remove(top_level_dir)
if is_not_importable:
raise ImportError('Start directory is not importable: %r' % start_dir)
tests = list(self._find_tests(start_dir, pattern))
return self.suiteClass(tests)
def _get_name_from_path(self, path):
path = os.path.splitext(os.path.normpath(path))[0]
_relpath = relpath(path, self._top_level_dir)
assert not os.path.isabs(_relpath), "Path must be within the project"
assert not _relpath.startswith('..'), "Path must be within the project"
name = _relpath.replace(os.path.sep, '.')
return name
def _get_module_from_name(self, name):
__import__(name)
return sys.modules[name]
def _match_path(self, path, full_path, pattern):
# override this method to use alternative matching strategy
return fnmatch(path, pattern)
def _find_tests(self, start_dir, pattern):
"""Used by discovery. Yields test suites it loads."""
paths = os.listdir(start_dir)
for path in paths:
full_path = os.path.join(start_dir, path)
if os.path.isfile(full_path):
if not VALID_MODULE_NAME.match(path):
# valid Python identifiers only
continue
if not self._match_path(path, full_path, pattern):
continue
# if the test file matches, load it
name = self._get_name_from_path(full_path)
try:
module = self._get_module_from_name(name)
except:
yield _make_failed_import_test(name, self.suiteClass)
else:
mod_file = os.path.abspath(getattr(module, '__file__', full_path))
realpath = os.path.splitext(mod_file)[0]
fullpath_noext = os.path.splitext(full_path)[0]
if realpath.lower() != fullpath_noext.lower():
module_dir = os.path.dirname(realpath)
mod_name = os.path.splitext(os.path.basename(full_path))[0]
expected_dir = os.path.dirname(full_path)
msg = ("%r module incorrectly imported from %r. Expected %r. "
"Is this module globally installed?")
raise ImportError(msg % (mod_name, module_dir, expected_dir))
yield self.loadTestsFromModule(module)
elif os.path.isdir(full_path):
if not os.path.isfile(os.path.join(full_path, '__init__.py')):
continue
load_tests = None
tests = None
if fnmatch(path, pattern):
# only check load_tests if the package directory itself matches the filter
name = self._get_name_from_path(full_path)
package = self._get_module_from_name(name)
load_tests = getattr(package, 'load_tests', None)
tests = self.loadTestsFromModule(package, use_load_tests=False)
if load_tests is None:
if tests is not None:
# tests loaded from package file
yield tests
# recurse into the package
for test in self._find_tests(full_path, pattern):
yield test
else:
try:
yield load_tests(self, tests, pattern)
except Exception, e:
yield _make_failed_load_tests(package.__name__, e,
self.suiteClass)
defaultTestLoader = TestLoader()
def _makeLoader(prefix, sortUsing, suiteClass=None):
loader = TestLoader()
loader.sortTestMethodsUsing = sortUsing
loader.testMethodPrefix = prefix
if suiteClass:
loader.suiteClass = suiteClass
return loader
def getTestCaseNames(testCaseClass, prefix, sortUsing=cmp):
return _makeLoader(prefix, sortUsing).getTestCaseNames(testCaseClass)
def makeSuite(testCaseClass, prefix='test', sortUsing=cmp,
suiteClass=suite.TestSuite):
return _makeLoader(prefix, sortUsing, suiteClass).loadTestsFromTestCase(testCaseClass)
def findTestCases(module, prefix='test', sortUsing=cmp,
suiteClass=suite.TestSuite):
return _makeLoader(prefix, sortUsing, suiteClass).loadTestsFromModule(module)
| apache-2.0 |
JioCloud/python-neutronclient | neutronclient/tests/unit/test_cli20_credential.py | 10 | 3348 | # Copyright 2013 Cisco Systems Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Kyle Mestery, Cisco Systems, Inc.
#
import sys
from neutronclient.neutron.v2_0 import credential
from neutronclient.tests.unit import test_cli20
class CLITestV20Credential(test_cli20.CLITestV20Base):
def test_create_credential(self):
"""Create credential: myid."""
resource = 'credential'
cmd = credential.CreateCredential(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
myid = 'myid'
type = 'mytype'
args = [name, type]
position_names = ['credential_name', 'type']
position_values = [name, type]
self._test_create_resource(resource, cmd, name, myid, args,
position_names, position_values)
def test_list_credentials_detail(self):
"""List credentials: -D."""
resources = 'credentials'
cmd = credential.ListCredential(test_cli20.MyApp(sys.stdout), None)
contents = [{'credential_name': 'myname', 'type': 'mytype'}]
self._test_list_resources(resources, cmd, True,
response_contents=contents)
def test_list_credential_known_option_after_unknown(self):
"""List credential: -- --tags a b --request-format xml."""
resources = 'credentials'
cmd = credential.ListCredential(test_cli20.MyApp(sys.stdout), None)
contents = [{'credential_name': 'myname', 'type': 'mytype'}]
self._test_list_resources(resources, cmd, tags=['a', 'b'],
response_contents=contents)
def test_list_credential_fields(self):
"""List credential: --fields a --fields b -- --fields c d."""
resources = 'credentials'
cmd = credential.ListCredential(test_cli20.MyApp(sys.stdout), None)
contents = [{'credential_name': 'myname', 'type': 'mytype'}]
self._test_list_resources(resources, cmd,
fields_1=['a', 'b'], fields_2=['c', 'd'],
response_contents=contents)
def test_show_credential(self):
"""Show credential: --fields id --fields name myid."""
resource = 'credential'
cmd = credential.ShowCredential(test_cli20.MyApp(sys.stdout), None)
args = ['--fields', 'id', '--fields', 'name', self.test_id]
self._test_show_resource(resource, cmd, self.test_id, args,
['id', 'name'])
def test_delete_credential(self):
"""Delete credential: myid."""
resource = 'credential'
cmd = credential.DeleteCredential(test_cli20.MyApp(sys.stdout), None)
myid = 'myid'
args = [myid]
self._test_delete_resource(resource, cmd, myid, args)
| apache-2.0 |
scifiswapnil/Project-LoCatr | lib/python2.7/site-packages/django/contrib/admindocs/views.py | 39 | 17893 | import inspect
import os
from importlib import import_module
from django.apps import apps
from django.conf import settings
from django.contrib import admin
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.admindocs import utils
from django.contrib.admindocs.utils import (
replace_named_groups, replace_unnamed_groups,
)
from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
from django.db import models
from django.http import Http404
from django.template.engine import Engine
from django.urls import get_mod_func, get_resolver, get_urlconf, reverse
from django.utils import six
from django.utils.decorators import method_decorator
from django.utils.inspect import (
func_accepts_kwargs, func_accepts_var_args, func_has_no_args,
get_func_full_args,
)
from django.utils.translation import ugettext as _
from django.views.generic import TemplateView
# Exclude methods starting with these strings from documentation
MODEL_METHODS_EXCLUDE = ('_', 'add_', 'delete', 'save', 'set_')
class BaseAdminDocsView(TemplateView):
"""
Base view for admindocs views.
"""
@method_decorator(staff_member_required)
def dispatch(self, request, *args, **kwargs):
if not utils.docutils_is_available:
# Display an error message for people without docutils
self.template_name = 'admin_doc/missing_docutils.html'
return self.render_to_response(admin.site.each_context(request))
return super(BaseAdminDocsView, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
kwargs.update({'root_path': reverse('admin:index')})
kwargs.update(admin.site.each_context(self.request))
return super(BaseAdminDocsView, self).get_context_data(**kwargs)
class BookmarkletsView(BaseAdminDocsView):
template_name = 'admin_doc/bookmarklets.html'
def get_context_data(self, **kwargs):
context = super(BookmarkletsView, self).get_context_data(**kwargs)
context.update({
'admin_url': "%s://%s%s" % (
self.request.scheme, self.request.get_host(), context['root_path'])
})
return context
class TemplateTagIndexView(BaseAdminDocsView):
template_name = 'admin_doc/template_tag_index.html'
def get_context_data(self, **kwargs):
tags = []
try:
engine = Engine.get_default()
except ImproperlyConfigured:
# Non-trivial TEMPLATES settings aren't supported (#24125).
pass
else:
app_libs = sorted(engine.template_libraries.items())
builtin_libs = [('', lib) for lib in engine.template_builtins]
for module_name, library in builtin_libs + app_libs:
for tag_name, tag_func in library.tags.items():
title, body, metadata = utils.parse_docstring(tag_func.__doc__)
if title:
title = utils.parse_rst(title, 'tag', _('tag:') + tag_name)
if body:
body = utils.parse_rst(body, 'tag', _('tag:') + tag_name)
for key in metadata:
metadata[key] = utils.parse_rst(metadata[key], 'tag', _('tag:') + tag_name)
tag_library = module_name.split('.')[-1]
tags.append({
'name': tag_name,
'title': title,
'body': body,
'meta': metadata,
'library': tag_library,
})
kwargs.update({'tags': tags})
return super(TemplateTagIndexView, self).get_context_data(**kwargs)
class TemplateFilterIndexView(BaseAdminDocsView):
template_name = 'admin_doc/template_filter_index.html'
def get_context_data(self, **kwargs):
filters = []
try:
engine = Engine.get_default()
except ImproperlyConfigured:
# Non-trivial TEMPLATES settings aren't supported (#24125).
pass
else:
app_libs = sorted(engine.template_libraries.items())
builtin_libs = [('', lib) for lib in engine.template_builtins]
for module_name, library in builtin_libs + app_libs:
for filter_name, filter_func in library.filters.items():
title, body, metadata = utils.parse_docstring(filter_func.__doc__)
if title:
title = utils.parse_rst(title, 'filter', _('filter:') + filter_name)
if body:
body = utils.parse_rst(body, 'filter', _('filter:') + filter_name)
for key in metadata:
metadata[key] = utils.parse_rst(metadata[key], 'filter', _('filter:') + filter_name)
tag_library = module_name.split('.')[-1]
filters.append({
'name': filter_name,
'title': title,
'body': body,
'meta': metadata,
'library': tag_library,
})
kwargs.update({'filters': filters})
return super(TemplateFilterIndexView, self).get_context_data(**kwargs)
class ViewIndexView(BaseAdminDocsView):
template_name = 'admin_doc/view_index.html'
@staticmethod
def _get_full_name(func):
mod_name = func.__module__
if six.PY3:
return '%s.%s' % (mod_name, func.__qualname__)
else:
# PY2 does not support __qualname__
func_name = getattr(func, '__name__', func.__class__.__name__)
return '%s.%s' % (mod_name, func_name)
def get_context_data(self, **kwargs):
views = []
urlconf = import_module(settings.ROOT_URLCONF)
view_functions = extract_views_from_urlpatterns(urlconf.urlpatterns)
for (func, regex, namespace, name) in view_functions:
views.append({
'full_name': self._get_full_name(func),
'url': simplify_regex(regex),
'url_name': ':'.join((namespace or []) + (name and [name] or [])),
'namespace': ':'.join((namespace or [])),
'name': name,
})
kwargs.update({'views': views})
return super(ViewIndexView, self).get_context_data(**kwargs)
class ViewDetailView(BaseAdminDocsView):
template_name = 'admin_doc/view_detail.html'
@staticmethod
def _get_view_func(view):
urlconf = get_urlconf()
if get_resolver(urlconf)._is_callback(view):
mod, func = get_mod_func(view)
try:
# Separate the module and function, e.g.
# 'mymodule.views.myview' -> 'mymodule.views', 'myview').
return getattr(import_module(mod), func)
except ImportError:
# Import may fail because view contains a class name, e.g.
# 'mymodule.views.ViewContainer.my_view', so mod takes the form
# 'mymodule.views.ViewContainer'. Parse it again to separate
# the module and class.
mod, klass = get_mod_func(mod)
return getattr(getattr(import_module(mod), klass), func)
except AttributeError:
# PY2 generates incorrect paths for views that are methods,
# e.g. 'mymodule.views.ViewContainer.my_view' will be
# listed as 'mymodule.views.my_view' because the class name
# can't be detected. This causes an AttributeError when
# trying to resolve the view.
return None
def get_context_data(self, **kwargs):
view = self.kwargs['view']
view_func = self._get_view_func(view)
if view_func is None:
raise Http404
title, body, metadata = utils.parse_docstring(view_func.__doc__)
if title:
title = utils.parse_rst(title, 'view', _('view:') + view)
if body:
body = utils.parse_rst(body, 'view', _('view:') + view)
for key in metadata:
metadata[key] = utils.parse_rst(metadata[key], 'model', _('view:') + view)
kwargs.update({
'name': view,
'summary': title,
'body': body,
'meta': metadata,
})
return super(ViewDetailView, self).get_context_data(**kwargs)
class ModelIndexView(BaseAdminDocsView):
template_name = 'admin_doc/model_index.html'
def get_context_data(self, **kwargs):
m_list = [m._meta for m in apps.get_models()]
kwargs.update({'models': m_list})
return super(ModelIndexView, self).get_context_data(**kwargs)
class ModelDetailView(BaseAdminDocsView):
template_name = 'admin_doc/model_detail.html'
def get_context_data(self, **kwargs):
model_name = self.kwargs['model_name']
# Get the model class.
try:
app_config = apps.get_app_config(self.kwargs['app_label'])
except LookupError:
raise Http404(_("App %(app_label)r not found") % self.kwargs)
try:
model = app_config.get_model(model_name)
except LookupError:
raise Http404(_("Model %(model_name)r not found in app %(app_label)r") % self.kwargs)
opts = model._meta
title, body, metadata = utils.parse_docstring(model.__doc__)
if title:
title = utils.parse_rst(title, 'model', _('model:') + model_name)
if body:
body = utils.parse_rst(body, 'model', _('model:') + model_name)
# Gather fields/field descriptions.
fields = []
for field in opts.fields:
# ForeignKey is a special case since the field will actually be a
# descriptor that returns the other object
if isinstance(field, models.ForeignKey):
data_type = field.remote_field.model.__name__
app_label = field.remote_field.model._meta.app_label
verbose = utils.parse_rst(
(_("the related `%(app_label)s.%(data_type)s` object") % {
'app_label': app_label, 'data_type': data_type,
}),
'model',
_('model:') + data_type,
)
else:
data_type = get_readable_field_data_type(field)
verbose = field.verbose_name
fields.append({
'name': field.name,
'data_type': data_type,
'verbose': verbose or '',
'help_text': field.help_text,
})
# Gather many-to-many fields.
for field in opts.many_to_many:
data_type = field.remote_field.model.__name__
app_label = field.remote_field.model._meta.app_label
verbose = _("related `%(app_label)s.%(object_name)s` objects") % {
'app_label': app_label,
'object_name': data_type,
}
fields.append({
'name': "%s.all" % field.name,
"data_type": 'List',
'verbose': utils.parse_rst(_("all %s") % verbose, 'model', _('model:') + opts.model_name),
})
fields.append({
'name': "%s.count" % field.name,
'data_type': 'Integer',
'verbose': utils.parse_rst(_("number of %s") % verbose, 'model', _('model:') + opts.model_name),
})
methods = []
# Gather model methods.
for func_name, func in model.__dict__.items():
if inspect.isfunction(func):
try:
for exclude in MODEL_METHODS_EXCLUDE:
if func_name.startswith(exclude):
raise StopIteration
except StopIteration:
continue
verbose = func.__doc__
if verbose:
verbose = utils.parse_rst(utils.trim_docstring(verbose), 'model', _('model:') + opts.model_name)
# If a method has no arguments, show it as a 'field', otherwise
# as a 'method with arguments'.
if func_has_no_args(func) and not func_accepts_kwargs(func) and not func_accepts_var_args(func):
fields.append({
'name': func_name,
'data_type': get_return_data_type(func_name),
'verbose': verbose or '',
})
else:
arguments = get_func_full_args(func)
# Join arguments with ', ' and in case of default value,
# join it with '='. Use repr() so that strings will be
# correctly displayed.
print_arguments = ', '.join([
'='.join(list(arg_el[:1]) + [repr(el) for el in arg_el[1:]])
for arg_el in arguments
])
methods.append({
'name': func_name,
'arguments': print_arguments,
'verbose': verbose or '',
})
# Gather related objects
for rel in opts.related_objects:
verbose = _("related `%(app_label)s.%(object_name)s` objects") % {
'app_label': rel.related_model._meta.app_label,
'object_name': rel.related_model._meta.object_name,
}
accessor = rel.get_accessor_name()
fields.append({
'name': "%s.all" % accessor,
'data_type': 'List',
'verbose': utils.parse_rst(_("all %s") % verbose, 'model', _('model:') + opts.model_name),
})
fields.append({
'name': "%s.count" % accessor,
'data_type': 'Integer',
'verbose': utils.parse_rst(_("number of %s") % verbose, 'model', _('model:') + opts.model_name),
})
kwargs.update({
'name': '%s.%s' % (opts.app_label, opts.object_name),
'summary': title,
'description': body,
'fields': fields,
'methods': methods,
})
return super(ModelDetailView, self).get_context_data(**kwargs)
class TemplateDetailView(BaseAdminDocsView):
template_name = 'admin_doc/template_detail.html'
def get_context_data(self, **kwargs):
template = self.kwargs['template']
templates = []
try:
default_engine = Engine.get_default()
except ImproperlyConfigured:
# Non-trivial TEMPLATES settings aren't supported (#24125).
pass
else:
# This doesn't account for template loaders (#24128).
for index, directory in enumerate(default_engine.dirs):
template_file = os.path.join(directory, template)
if os.path.exists(template_file):
with open(template_file) as f:
template_contents = f.read()
else:
template_contents = ''
templates.append({
'file': template_file,
'exists': os.path.exists(template_file),
'contents': template_contents,
'order': index,
})
kwargs.update({
'name': template,
'templates': templates,
})
return super(TemplateDetailView, self).get_context_data(**kwargs)
####################
# Helper functions #
####################
def get_return_data_type(func_name):
"""Return a somewhat-helpful data type given a function name"""
if func_name.startswith('get_'):
if func_name.endswith('_list'):
return 'List'
elif func_name.endswith('_count'):
return 'Integer'
return ''
def get_readable_field_data_type(field):
"""Returns the description for a given field type, if it exists,
Fields' descriptions can contain format strings, which will be interpolated
against the values of field.__dict__ before being output."""
return field.description % field.__dict__
def extract_views_from_urlpatterns(urlpatterns, base='', namespace=None):
"""
Return a list of views from a list of urlpatterns.
Each object in the returned list is a two-tuple: (view_func, regex)
"""
views = []
for p in urlpatterns:
if hasattr(p, 'url_patterns'):
try:
patterns = p.url_patterns
except ImportError:
continue
views.extend(extract_views_from_urlpatterns(
patterns,
base + p.regex.pattern,
(namespace or []) + (p.namespace and [p.namespace] or [])
))
elif hasattr(p, 'callback'):
try:
views.append((p.callback, base + p.regex.pattern,
namespace, p.name))
except ViewDoesNotExist:
continue
else:
raise TypeError(_("%s does not appear to be a urlpattern object") % p)
return views
def simplify_regex(pattern):
r"""
Clean up urlpattern regexes into something more readable by humans. For
example, turn "^(?P<sport_slug>\w+)/athletes/(?P<athlete_slug>\w+)/$"
into "/<sport_slug>/athletes/<athlete_slug>/".
"""
pattern = replace_named_groups(pattern)
pattern = replace_unnamed_groups(pattern)
# clean up any outstanding regex-y characters.
pattern = pattern.replace('^', '').replace('$', '').replace('?', '')
if not pattern.startswith('/'):
pattern = '/' + pattern
return pattern
| mit |
ging/horizon | openstack_dashboard/dashboards/admin/volumes/volumes/tables.py | 7 | 2138 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from horizon import tables
from openstack_dashboard.dashboards.project.volumes \
.volumes import tables as volumes_tables
class VolumesFilterAction(tables.FilterAction):
def filter(self, table, volumes, filter_string):
"""Naive case-insensitive search."""
q = filter_string.lower()
return [volume for volume in volumes
if q in volume.name.lower()]
class UpdateVolumeStatusAction(tables.LinkAction):
name = "update_status"
verbose_name = _("Update Volume Status")
url = "horizon:admin:volumes:volumes:update_status"
classes = ("ajax-modal",)
icon = "pencil"
policy_rules = (("volume",
"volume_extension:volume_admin_actions:reset_status"),)
class VolumesTable(volumes_tables.VolumesTable):
name = tables.Column("name",
verbose_name=_("Name"),
link="horizon:admin:volumes:volumes:detail")
host = tables.Column("os-vol-host-attr:host", verbose_name=_("Host"))
tenant = tables.Column("tenant_name", verbose_name=_("Project"))
class Meta:
name = "volumes"
verbose_name = _("Volumes")
status_columns = ["status"]
row_class = volumes_tables.UpdateRow
table_actions = (volumes_tables.DeleteVolume, VolumesFilterAction)
row_actions = (volumes_tables.DeleteVolume, UpdateVolumeStatusAction)
columns = ('tenant', 'host', 'name', 'size', 'status', 'volume_type',
'attachments', 'bootable', 'encryption',)
| apache-2.0 |
Aeva/voxelpress | old_stuff/old_python_stuff/hardware/serial_device.py | 1 | 1834 |
from serial import Serial
from device_kind import DeviceKind
import drivers
class SerialDevice(DeviceKind):
"""Serial device backend."""
baud_rates = (
2400,
9600,
19200,
38400,
57600,
115200,
250000,
)
def __init__(self, *args, **kwords):
DeviceKind.__init__(self, *args, **kwords)
self.__path = None
self.__baud = None
self.__usb = None
self.__com = None
self.state = "busy"
def __detect_driver(self, baud):
"""Used by on_connect to attempt to discover the baud rate of
the port and applicable firmware."""
def connect():
return Serial(self.__path, baud)
return drivers.select_driver("serial", connect)
def warm_up(self, config):
"""Notifies the driver to run any setup code."""
if not self.__com:
self.__com = Serial(self.__path, self.__baud)
self.state = "busy"
driver = drivers.DeviceDriver(self.driver, self.__com)
driver.warm_up(config)
self.state = "ready"
def run_job(self, config, job_file):
"""Runs a print job."""
self.state = "busy"
driver = drivers.DeviceDriver(self.driver, self.__com)
driver.run_job(config, job_file)
self.state = "jammed"
def on_connect(self, tty_path):
self.__path = tty_path
for baud in self.baud_rates[::-1]:
try:
print "trying baud", baud
self.driver = self.__detect_driver(baud)
if self.driver:
self.__baud = baud
self.state = "ready"
break
except IOError:
continue
if self.driver:
return True
| gpl-3.0 |
dakoner/keras-molecules | sample_latent.py | 4 | 4038 | from __future__ import print_function
import argparse
import os, sys
import h5py
import numpy as np
from molecules.model import MoleculeVAE
from molecules.utils import one_hot_array, one_hot_index, from_one_hot_array, \
decode_smiles_from_indexes, load_dataset
from sklearn.manifold import TSNE
from sklearn.decomposition import PCA
from pylab import figure, axes, scatter, title, show
from rdkit import Chem
from rdkit.Chem import Draw
from keras.models import Sequential, Model, load_model
LATENT_DIM = 292
PCA_COMPONENTS = 50
TSNE_LEARNING_RATE = 750.0
TSNE_ITERATIONS = 1000
TSNE_COMPONENTS = 2
TSNE_PERPLEXITY = 30.0
def get_arguments():
parser = argparse.ArgumentParser(description='Molecular autoencoder network')
parser.add_argument('data', type=str, help='HDF5 file to read input data from.')
parser.add_argument('model', type=str, help='Trained Keras model to use.')
parser.add_argument('--save_h5', type=str, help='Name of a file to write HDF5 output to.')
parser.add_argument('--latent_dim', type=int, metavar='N', default=LATENT_DIM,
help='Dimensionality of the latent representation.')
parser.add_argument('--tsne_lr', metavar='LR', type=float, default=TSNE_LEARNING_RATE,
help='Learning to use for t-SNE.')
parser.add_argument('--tsne_components', metavar='N', type=int, default=TSNE_COMPONENTS,
help='Number of components to use for t-SNE.')
parser.add_argument('--tsne_perplexity', metavar='P', type=float, default=TSNE_PERPLEXITY)
parser.add_argument('--tsne_iterations', metavar='N', type=int, default=TSNE_ITERATIONS)
parser.add_argument('--visualize', dest='visualize', action='store_true',
help='Fit manifold and render a visualization. If this flag is not used, the sampled data' +
' will simply be returned with no further processing.')
parser.add_argument('--skip-pca', dest='use_pca', action='store_false',
help='Skip PCA preprocessing of data to feed into t-SNE.')
parser.add_argument('--pca_components', metavar='N', type=int, default=PCA_COMPONENTS,
help='Number of components to use for PCA.')
parser.set_defaults(use_pca = True)
parser.set_defaults(visualize = False)
return parser.parse_args()
def visualize_latent_rep(args, model, x_latent):
print("pca_on=%r pca_comp=%d tsne_comp=%d tsne_perplexity=%f tsne_lr=%f" % (
args.use_pca,
args.pca_components,
args.tsne_components,
args.tsne_perplexity,
args.tsne_lr
))
if args.use_pca:
pca = PCA(n_components = args.pca_components)
x_latent = pca.fit_transform(x_latent)
figure(figsize=(6, 6))
scatter(x_latent[:, 0], x_latent[:, 1], marker='.')
show()
tsne = TSNE(n_components = args.tsne_components,
perplexity = args.tsne_perplexity,
learning_rate = args.tsne_lr,
n_iter = args.tsne_iterations,
verbose = 4)
x_latent_proj = tsne.fit_transform(x_latent)
del x_latent
figure(figsize=(6, 6))
scatter(x_latent_proj[:, 0], x_latent_proj[:, 1], marker='.')
show()
def main():
args = get_arguments()
model = MoleculeVAE()
data, data_test, charset = load_dataset(args.data)
if os.path.isfile(args.model):
model.load(charset, args.model, latent_rep_size = args.latent_dim)
else:
raise ValueError("Model file %s doesn't exist" % args.model)
x_latent = model.encoder.predict(data)
if not args.visualize:
if not args.save_h5:
np.savetxt(sys.stdout, x_latent, delimiter = '\t')
else:
h5f = h5py.File(args.save_h5, 'w')
h5f.create_dataset('charset', data = charset)
h5f.create_dataset('latent_vectors', data = x_latent)
h5f.close()
else:
visualize_latent_rep(args, model, x_latent)
if __name__ == '__main__':
main()
| mit |
arcturusannamalai/open-tamil | solthiruthi/morphology.py | 1 | 6362 | ## -*- coding: utf-8 -*-
## (C) 2015 Muthiah Annamalai,
## This module is part of solthiruthi project under open-tamil umbrella.
## This code maybe used/distributed under MIT LICENSE.
from __future__ import print_function
import abc
import codecs
import copy
from tamil import utf8
# Suffix removal algorithm
class RemoveSuffix(object):
__metaclass__ = abc.ABCMeta
def __init__(self):
self.possible_suffixes = None
self.replace_suffixes = {} #valid dictionary
self.reversed_suffixes = []
@abc.abstractmethod
def setSuffixes(self):
pass
@abc.abstractmethod
def apply(self,word):
pass
def prepareSuffixes(self):
assert self.possible_suffixes
# reverse the words in each letter.
for word in self.possible_suffixes:
self.reversed_suffixes.append( utf8.reverse_word(word) )
return
def removeSuffix(self,word):
removed = False
if not self.possible_suffixes:
# init once
self.setSuffixes()
self.prepareSuffixes()
word_lett = utf8.get_letters(word)
rword_lett = copy.copy(word_lett)
rword_lett.reverse()
#print('rev word ->',rword_lett)
rword = u"".join(rword_lett)
longest_match = ""
for itr in range(len(self.reversed_suffixes)):
suffix = self.reversed_suffixes[itr]
#print(itr,utf8.get_letters(suffix))
if rword.startswith(suffix):
if len(longest_match) <= len(suffix):
longest_match = suffix
#print('L-match-->',utf8.get_letters(longest_match))
continue
if len(longest_match) > 0:
removed = True
sfx = []
for itr in range(len(utf8.get_letters(longest_match))):
sfx.append( word_lett.pop() )
word = u"".join(word_lett)
sfx.reverse()
sfx= u"".join(sfx)
# rule to replace suffix
alt_suffix = self.replace_suffixes.get(sfx,None)
if alt_suffix:
word = word + alt_suffix
return word,removed
# remove prefix using the suffix removal algorithm via reversal of word
class RemovePrefix(RemoveSuffix):
def __init__(self):
super(RemovePrefix,self).__init__()
def setSuffixes(self):
self.replace_suffixes = {u"மா":u"",u"பேர்":u"",u"அதி":u"",u"பெரிய":u"",u"பெரு":u"",u"சின்ன":u"",\
u"ஆதி":u"",u"சிறு":u"",u"அக்":u"",u"இக்":u"",u"எக்":u""}
self.possible_suffixes=[utf8.reverse_word(word) for word in self.replace_suffixes.keys()]
def apply(self,word):
return self.removePrefix(word)
def removePrefix(self,word):
word_lett = utf8.get_letters(word)
word_lett.reverse()
a,b = self.removeSuffix(u"".join(word_lett))
return [utf8.reverse_word(a),b]
class RemoveCaseSuffix(RemoveSuffix):
def __init__(self):
super(RemoveCaseSuffix,self).__init__()
def apply(self,word):
return self.removeSuffix(word)
def setSuffixes(self):
accusative = u"ை"
instrumental =u"ஆல்"
associative=u"ஓடு"
dative=u"க்கு"
genitive=u"இன்"
possessive=u"உடைய"
locative=u"இடம்"
ablative=u"இடமிருந்து"
self.possible_suffixes=[u"உக்கு",u"க்கு",u"ளை",u"கள்",
accusative,instrumental,associative,
dative,genitive,possessive,locative,ablative]
self.replace_suffixes = dict()
for w in self.possible_suffixes:
self.replace_suffixes[w] = u""
return
class RemoveHyphenatesNumberDate(RemoveCaseSuffix):
""" Done correctly (மேல்) 65536-மேல்,
ivan paritchayil இரண்டாவது, 2-வது """
pass
class RemoveVerbSuffixTense(RemoveCaseSuffix):
def __init__(self):
super(RemoveCaseSuffix,self).__init__()
self.tenses = { "present" :u"கிற்",
"past" : u"த",
"future" : u"வ" }
def setSuffixes(self):
"""
"""
tense_endings = [u"ஏன்",u"ஆய்",u"ஆர்",u"ஆன்",u"ஆள்",u"அது",u"ஓம்", u"அன"]
self.possible_suffixes=tense_endings
self.replace_suffixes = tense_endings
class RemovePluralSuffix(RemoveSuffix):
def __init__(self):
super(RemovePluralSuffix,self).__init__()
def apply(self,word):
return self.removeSuffix(word)
def setSuffixes(self):
self.replace_suffixes = {u"ற்கள்":u"ல்",u"கள்":u"",u"ல்":u"", u"ட்கள்": u"ள்", u"ங்கள்":u"ம்"}
self.possible_suffixes=list(self.replace_suffixes.keys())
class RemoveNegationSuffix(RemoveCaseSuffix):
def __init__(self):
super(RemoveNegationSuffix,self).__init__()
def setSuffixes(self):
self.replace_suffixes = {u"கே":u"",u"ல்லை":u"",u"ாதே":u"", u"ாமல்":u""}
self.possible_suffixes=list(self.replace_suffixes.keys())
class CaseFilter(object):
def __init__(self,*filter_obj_list):
object.__init__(self)
self.filters = filter_obj_list
def apply(self,word_in):
word = [word_in,None]
for filter_obj in self.filters:
word = filter_obj.apply( word[0] )
return word[0]
def xkcd():
obj = RemovePluralSuffix()
objf = CaseFilter(obj)
expected = [u"பதிவி",u"கட்டளை",u"அவர்",u"பள்ளி"]
words_list = [u"பதிவில்",u"கட்டளைகள்",u"அவர்கள்",u"பள்ளிகள்"]
for w,x in zip(words_list,expected):
rval = obj.removeSuffix(w)
trunc_word = objf.apply( w )
assert( trunc_word == rval[0] )
assert(rval[1])
print(utf8.get_letters(w),'->',rval[1])
assert(rval[0] == x)
return
if __name__ == "__main__":
xkcd()
| mit |
rtucker-mozilla/mozpackager | vendor-local/lib/python/kombu/tests/transport/test_mongodb.py | 7 | 3328 | from __future__ import absolute_import
from nose import SkipTest
from kombu import Connection
from kombu.tests.utils import TestCase, skip_if_not_module
class MockConnection(dict):
def __setattr__(self, key, value):
self[key] = value
class test_mongodb(TestCase):
@skip_if_not_module('pymongo')
def test_url_parser(self):
from kombu.transport import mongodb
from pymongo.errors import ConfigurationError
raise SkipTest(
'Test is functional: it actually connects to mongod')
class Transport(mongodb.Transport):
Connection = MockConnection
url = 'mongodb://'
c = Connection(url, transport=Transport).connect()
client = c.channels[0].client
self.assertEquals(client.name, 'kombu_default')
self.assertEquals(client.connection.host, '127.0.0.1')
url = 'mongodb://localhost'
c = Connection(url, transport=Transport).connect()
client = c.channels[0].client
self.assertEquals(client.name, 'kombu_default')
url = 'mongodb://localhost/dbname'
c = Connection(url, transport=Transport).connect()
client = c.channels[0].client
self.assertEquals(client.name, 'dbname')
url = 'mongodb://localhost,localhost:29017/dbname'
c = Connection(url, transport=Transport).connect()
client = c.channels[0].client
nodes = client.connection.nodes
# If there's just 1 node it is because we're connecting to a single
# server instead of a repl / mongoss.
if len(nodes) == 2:
self.assertTrue(('localhost', 29017) in nodes)
self.assertEquals(client.name, 'dbname')
# Passing options breaks kombu's _init_params method
# url = 'mongodb://localhost,localhost2:29017/dbname?safe=true'
# c = Connection(url, transport=Transport).connect()
# client = c.channels[0].client
url = 'mongodb://localhost:27017,localhost2:29017/dbname'
c = Connection(url, transport=Transport).connect()
client = c.channels[0].client
# Login to admin db since there's no db specified
url = "mongodb://adminusername:adminpassword@localhost"
c = Connection(url, transport=Transport).connect()
client = c.channels[0].client
self.assertEquals(client.name, "kombu_default")
# Lets make sure that using admin db doesn't break anything
# when no user is specified
url = "mongodb://localhost"
c = Connection(url, transport=Transport).connect()
client = c.channels[0].client
# Assuming there's user 'username' with password 'password'
# configured in mongodb
url = "mongodb://username:password@localhost/dbname"
c = Connection(url, transport=Transport).connect()
client = c.channels[0].client
# Assuming there's no user 'nousername' with password 'nopassword'
# configured in mongodb
url = "mongodb://nousername:nopassword@localhost/dbname"
c = Connection(url, transport=Transport).connect()
# Needed, otherwise the error would be rose before
# the assertRaises is called
def get_client():
c.channels[0].client
self.assertRaises(ConfigurationError, get_client)
| bsd-3-clause |
davidharrigan/django | django/db/backends/oracle/features.py | 356 | 2345 | from django.db.backends.base.features import BaseDatabaseFeatures
from django.db.utils import InterfaceError
try:
import pytz
except ImportError:
pytz = None
class DatabaseFeatures(BaseDatabaseFeatures):
empty_fetchmany_value = ()
interprets_empty_strings_as_nulls = True
uses_savepoints = True
has_select_for_update = True
has_select_for_update_nowait = True
can_return_id_from_insert = True
allow_sliced_subqueries = False
supports_subqueries_in_group_by = False
supports_transactions = True
supports_timezones = False
has_zoneinfo_database = pytz is not None
supports_bitwise_or = False
has_native_duration_field = True
can_defer_constraint_checks = True
supports_partially_nullable_unique_constraints = False
truncates_names = True
has_bulk_insert = True
supports_tablespaces = True
supports_sequence_reset = False
can_introspect_default = False # Pending implementation by an interested person.
can_introspect_max_length = False
can_introspect_time_field = False
atomic_transactions = False
supports_combined_alters = False
nulls_order_largest = True
requires_literal_defaults = True
closed_cursor_error_class = InterfaceError
bare_select_suffix = " FROM DUAL"
uppercases_column_names = True
# select for update with limit can be achieved on Oracle, but not with the current backend.
supports_select_for_update_with_limit = False
def introspected_boolean_field_type(self, field=None, created_separately=False):
"""
Some versions of Oracle -- we've seen this on 11.2.0.1 and suspect
it goes back -- have a weird bug where, when an integer column is
added to an existing table with a default, its precision is later
reported on introspection as 0, regardless of the real precision.
For Django introspection, this means that such columns are reported
as IntegerField even if they are really BigIntegerField or BooleanField.
The bug is solved in Oracle 11.2.0.2 and up.
"""
if self.connection.oracle_full_version < '11.2.0.2' and field and field.has_default() and created_separately:
return 'IntegerField'
return super(DatabaseFeatures, self).introspected_boolean_field_type(field, created_separately)
| bsd-3-clause |
richard-willowit/Pentaho-reports-for-OpenERP | odoo_addon/pentaho_report_selection_sets/wizard/store_selections.py | 2 | 9016 | # -*- encoding: utf-8 -*-
import json
from openerp import models, fields, api, _
from openerp.exceptions import UserError, ValidationError
from openerp.addons.pentaho_reports.core import VALID_OUTPUT_TYPES
from openerp.addons.pentaho_reports.java_oe import OPENERP_DATA_TYPES, parameter_resolve_column_name
from ..report_formulae import *
class store_selections_wizard(models.TransientModel):
_name = "ir.actions.store.selections.wiz"
_description = "Store Report Selections Wizard"
existing_selectionset_id = fields.Many2one('ir.actions.report.set.header', string='Selection Set', ondelete='set null')
name = fields.Char(string='Selection Set Description', size=64, required=True)
report_action_id = fields.Many2one('ir.actions.report.xml', string='Report Name', readonly=True)
output_type = fields.Selection(VALID_OUTPUT_TYPES, string='Report format', help='Choose the format for the output')
parameters_dictionary = fields.Text(string='parameter dictionary')
detail_ids = fields.One2many('ir.actions.store.selections.detail.wiz', 'header_id', string='Selection Details')
def_user_ids = fields.Many2many('res.users', 'ir_actions_store_selections_def_user_rel', 'header_id', 'user_id', string='Users (Default)')
def_group_ids = fields.Many2many('res.groups', 'ir_actions_store_selections_def_group_rel', 'header_id', 'group_id', string='Groups (Default)')
passing_wizard_id = fields.Many2one('ir.actions.report.promptwizard', string='Screen wizard - kept for "Cancel" button')
@api.model
def default_get(self, fields):
if not self.env.context.get('active_id'):
raise UserError(_('No active id passed.'))
screen_wizard = self.env['ir.actions.report.promptwizard'].browse(self.env.context['active_id'])
parameters_dictionary = json.loads(screen_wizard.parameters_dictionary)
res = super(store_selections_wizard, self).default_get(fields)
res.update({'existing_selectionset_id': screen_wizard.selectionset_id.id,
'name': screen_wizard.selectionset_id.name,
'report_action_id': screen_wizard.report_action_id.id,
'output_type': screen_wizard.output_type,
'parameters_dictionary': screen_wizard.parameters_dictionary,
'detail_ids': [],
'def_user_ids': [],
'def_group_ids': [],
'passing_wizard_id': screen_wizard.id,
})
for index in range(0, len(parameters_dictionary)):
res['detail_ids'].append((0, 0, {'variable': parameters_dictionary[index]['variable'],
'label': parameters_dictionary[index]['label'],
'counter': index,
'type': parameters_dictionary[index]['type'],
'x2m': parameter_can_2m(parameters_dictionary, index),
'display_value': self.env['ir.actions.report.set.detail'].wizard_value_to_display(getattr(screen_wizard, parameter_resolve_column_name(parameters_dictionary, index)), parameters_dictionary, index),
'calc_formula': getattr(screen_wizard, parameter_resolve_formula_column_name(parameters_dictionary, index)),
}))
if screen_wizard.selectionset_id:
res['def_user_ids'] = [(6, 0, [u.id for u in screen_wizard.selectionset_id.def_user_ids])]
res['def_group_ids'] = [(6, 0, [g.id for g in screen_wizard.selectionset_id.def_group_ids])]
return res
@api.multi
def button_store_new(self):
return self.button_store(replace=False)
@api.multi
def button_store_replace(self):
return self.button_store(replace=True)
def button_store(self, replace=True):
header_obj = self.env['ir.actions.report.set.header']
for wizard in self:
clash_reports = header_obj.search([('name', '=', wizard.name)])
if clash_reports and (not replace or len(clash_reports) > 1 or any(x.id != wizard.existing_selectionset_id.id for x in clash_reports)):
# We enforce this so that users can uniquely identify a selection set.
raise UserError(_('Selection Sets must have unique names across all reports.'))
vals = {'name': wizard.name,
'report_action_id': wizard.report_action_id.id,
'output_type': wizard.output_type,
'parameters_dictionary': wizard.parameters_dictionary,
'detail_ids': [(5,)],
'def_user_ids': [(6, 0, [u.id for u in wizard.def_user_ids])],
'def_group_ids': [(6, 0, [g.id for g in wizard.def_group_ids])],
}
if replace and wizard.existing_selectionset_id:
wizard.existing_selectionset_id.write(vals)
header = wizard.existing_selectionset_id
else:
header = header_obj.create(vals)
for detail in wizard.detail_ids:
self.env['ir.actions.report.set.detail'].create({'header_id': header.id,
'variable': detail.variable,
'label': detail.label,
'counter': detail.counter,
'type': detail.type,
'x2m': detail.x2m,
'display_value': detail.display_value,
'calc_formula': detail.calc_formula,
})
new_context = self.env.context.copy()
new_context['populate_selectionset_id'] = header.id
new_context['active_ids'] = [] # DEBUG - client will pass the active_ids on to the report call - This is behaviour we do not want, as the active_ids are from this wizard model.
return {
'view_mode': 'form',
'res_model': 'ir.actions.report.promptwizard',
'type': 'ir.actions.act_window',
'target': 'new',
'context': new_context,
}
@api.multi
def button_delete(self):
if self.existing_selectionset_id:
self.existing_selectionset_id.unlink()
return self.button_cancel()
@api.multi
def button_cancel(self):
if self.passing_wizard_id:
new_context = self.env.context.copy()
new_context['active_ids'] = [] # DEBUG - client will pass the active_ids on to the report call - This is behaviour we do not want, as the active_ids are from this wizard model.
return {
'view_mode': 'form',
'res_model': 'ir.actions.report.promptwizard',
'type': 'ir.actions.act_window',
'target': 'new',
'res_id': self.passing_wizard_id.id,
'context': new_context,
}
return {'type': 'ir.actions.act_window_close'}
class store_selections_dets_wizard(models.TransientModel):
_name = 'ir.actions.store.selections.detail.wiz'
_description = "Store Report Selections Wizard"
header_id = fields.Many2one('ir.actions.store.selections.wiz', string='Selections Set')
variable = fields.Char(string='Variable Name', size=64)
label = fields.Char(string='Label', size=64)
counter = fields.Integer(string='Parameter Number')
type = fields.Selection(OPENERP_DATA_TYPES, string='Data Type')
x2m = fields.Boolean(string='Data List Type')
display_value = fields.Text(string='Value')
calc_formula = fields.Char(string='Formula')
_order = 'counter'
@api.onchange('calc_formula')
def _onchange_calc_formula(self):
if self.calc_formula:
parameters = json.loads(self.header_id.parameters_dictionary)
known_variables = {}
for index in range(0, len(parameters)):
known_variables[parameters[index]['variable']] = {'type': parameters[index]['type'],
'x2m': parameter_can_2m(parameters, index),
'calculated': False,
}
parsed_formula = self.env['ir.actions.report.set.formula'].validate_formula(self.calc_formula, self.type, self.x2m, known_variables)
if parsed_formula.get('error'):
raise UserError(parsed_formula['error'])
| gpl-2.0 |
seibert-media/Highton | setup.py | 1 | 1251 | from setuptools import setup
setup(
name='Highton',
version='2.3.0',
license='Apache License 2.0',
description='A Python library for Highrise',
long_description='A beautiful Python - Highrise - Wrapper.',
url='https://github.com/seibert-media/Highton',
author='Julia Giebelhausen, Jakob Löhnertz, Michael Bykovski',
author_email='brogrammers@seibert-media.net',
packages=[
'highton',
'highton.call_mixins',
'highton.fields',
'highton.models',
'highton.parsing',
],
install_requires=[
'requests'
],
keywords='seibertmedia seibert media python api wrapper highrise highton',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
]
)
| apache-2.0 |
oemof/examples | oemof_examples/oemof.solph/v0.4.x/storage_balanced_unbalanced/storage.py | 1 | 5395 | # -*- coding: utf-8 -*-
"""
General description
-------------------
Example that shows the parameter `balanced` of `GenericStorage`.
Installation requirements
-------------------------
This example requires the version v0.3.x of oemof. Install by:
pip install 'oemof.solph>=0.4,<0.5'
Optional to see the plots:
pip install matplotlib
Copyright / Licence Info
------------------------
This file is part of project oemof (github.com/oemof/oemof). It's copyrighted
by the contributors recorded in the version control history of the file,
available from its original location oemof/tests/test_scripts/test_solph/
test_generic_offsettransformer/test_generic_offsettransformer.py
SPDX-License-Identifier: GPL-3.0-or-later
"""
__copyright__ = "oemof developer group"
__license__ = "GPLv3"
import os
import pandas as pd
from oemof import solph
try:
from matplotlib import pyplot as plt
except ImportError:
print("Install matplotlib to see the plots.")
plt = None
DATA = [
{"name": "unbalanced", "initial_storage_level": 0.2, "balanced": False},
{
"name": "unbalanced_None",
"initial_storage_level": None,
"balanced": False,
},
{"name": "balanced", "initial_storage_level": 0.2, "balanced": True},
{"name": "balanced_None", "initial_storage_level": None, "balanced": True},
]
PARAMETER = {"el_price": 10, "sh_price": 5, "nominal_storage_capacity": 7}
def storage_example():
# read time series
timeseries = pd.read_csv(os.path.join(os.getcwd(), "storage_data.csv"))
# create an energy system
idx = pd.date_range("1/1/2017", periods=len(timeseries), freq="H")
es = solph.EnergySystem(timeindex=idx)
for data_set in DATA:
name = data_set["name"]
# power bus
bel = solph.Bus(label="bel_{0}".format(name))
es.add(bel)
es.add(
solph.Source(
label="source_el_{0}".format(name),
outputs={
bel: solph.Flow(variable_costs=PARAMETER["el_price"])
},
)
)
es.add(
solph.Source(
label="pv_el_{0}".format(name),
outputs={
bel: solph.Flow(fix=timeseries["pv_el"], nominal_value=1)
},
)
)
es.add(
solph.Sink(
label="demand_el_{0}".format(name),
inputs={
bel: solph.Flow(
fix=timeseries["demand_el"], nominal_value=1
)
},
)
)
es.add(
solph.Sink(
label="shunt_el_{0}".format(name),
inputs={bel: solph.Flow(variable_costs=PARAMETER["sh_price"])},
)
)
# Electric Storage
es.add(
solph.components.GenericStorage(
label="storage_elec_{0}".format(name),
nominal_storage_capacity=PARAMETER["nominal_storage_capacity"],
inputs={bel: solph.Flow()},
outputs={bel: solph.Flow()},
initial_storage_level=data_set["initial_storage_level"],
balanced=data_set["balanced"],
)
)
# create an optimization problem and solve it
om = solph.Model(es)
# solve model
om.solve(solver="cbc")
# create result object
results = solph.processing.results(om)
flows = [x for x in results if x[1] is not None]
components = [x for x in results if x[1] is None]
storage_cap = pd.DataFrame()
costs = pd.Series(dtype=float)
balance = pd.Series(dtype=float)
for flow in [x for x in flows if "source_el" in x[0].label]:
name = "_".join(flow[0].label.split("_")[2:])
print(name, float(results[flow]["sequences"].sum()))
costs[name] = float(
results[flow]["sequences"].sum() * PARAMETER["el_price"]
)
for flow in [x for x in flows if "shunt_el" in x[1].label]:
name = "_".join(flow[1].label.split("_")[2:])
costs[name] += float(
results[flow]["sequences"].sum() * PARAMETER["sh_price"]
)
storages = [x[0] for x in components if "storage" in x[0].label]
idx = results[storages[0], None]["sequences"]["storage_content"].index
last = idx[-1]
prev = idx[0] - 1 * idx.freq
for s in storages:
name = s.label
storage_cap[name] = results[s, None]["sequences"]["storage_content"]
storage_cap.loc[prev, name] = results[s, None]["scalars"][
"init_content"
]
balance[name] = (
storage_cap.loc[last][name] - storage_cap.loc[prev][name]
)
if plt is not None:
storage_cap.plot(drawstyle="steps-mid", subplots=False, sharey=True)
storage_cap.plot(drawstyle="steps-mid", subplots=True, sharey=True)
costs.plot(kind="bar", ax=plt.subplots()[1], rot=0)
balance.index = [
"balanced",
"balanced_None",
"unbalanced",
"unbalanced_None",
]
balance.plot(
kind="bar",
linewidth=1,
edgecolor="#000000",
rot=0,
ax=plt.subplots()[1],
)
plt.show()
print(storage_cap)
print(costs)
print(balance)
if __name__ == "__main__":
storage_example()
| gpl-3.0 |
rynomster/django | tests/save_delete_hooks/models.py | 409 | 1030 | """
Adding hooks before/after saving and deleting
To execute arbitrary code around ``save()`` and ``delete()``, just subclass
the methods.
"""
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Person(models.Model):
first_name = models.CharField(max_length=20)
last_name = models.CharField(max_length=20)
def __init__(self, *args, **kwargs):
super(Person, self).__init__(*args, **kwargs)
self.data = []
def __str__(self):
return "%s %s" % (self.first_name, self.last_name)
def save(self, *args, **kwargs):
self.data.append("Before save")
# Call the "real" save() method
super(Person, self).save(*args, **kwargs)
self.data.append("After save")
def delete(self):
self.data.append("Before deletion")
# Call the "real" delete() method
super(Person, self).delete()
self.data.append("After deletion")
| bsd-3-clause |
3dfxmadscientist/CBSS | addons/document_page/wizard/wiki_make_index.py | 38 | 3557 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRl (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public license as
# published by the Free Software Foundation, either version 3 of the
# license, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABIlITY or FITNESS FOR A PARTICUlAR PURPOSE. See the
# GNU Affero General Public license for more details.
#
# You should have received a copy of the GNU Affero General Public license
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields, osv
from tools.translate import _
class wiki_make_index(osv.osv_memory):
""" Create Index For Selected Page """
_name = "wiki.make.index"
_description = "Create Index"
def wiki_do_index(self, cr, uid, ids, context=None):
""" Makes Index according to page hierarchy
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param ids: list of wiki index’s IDs
"""
if context is None:
context = {}
data = context and context.get('active_ids', []) or []
if not data:
return {'type': 'ir.actions.act_window_close'}
for index_obj in self.browse(cr, uid, ids, context=context):
wiki_pool = self.pool.get('wiki.wiki')
cr.execute("Select id, section from wiki_wiki where id IN %s \
order by section ", (tuple(data),))
lst0 = cr.fetchall()
if not lst0[0][1]:
raise osv.except_osv(_('Warning!'), _('There is no section in this Page.'))
lst = []
s_ids = {}
for l in lst0:
s_ids[l[1]] = l[0]
lst.append(l[1])
lst.sort()
val = None
def toint(x):
try:
return int(x)
except:
return 1
lst = map(lambda x: map(toint, x.split('.')), lst)
result = []
current = ['0']
current2 = []
for l in lst:
for pos in range(len(l)):
if pos >= len(current):
current.append('1')
continue
if (pos == len(l) - 1) or (pos >= len(current2)) or (toint(l[pos]) > toint(current2[pos])):
current[pos] = str(toint(current[pos]) + 1)
current = current[:pos + 1]
if pos == len(l) - 1:
break
key = ('.'.join([str(x) for x in l]))
id = s_ids[key]
val = ('.'.join([str(x) for x in current[:]]), id)
if val:
result.append(val)
current2 = l
for rs in result:
wiki_pool.write(cr, uid, [rs[1]], {'section':rs[0]})
return {'type': 'ir.actions.act_window_close'}
wiki_make_index()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
ns950/calibre | src/calibre/gui2/tweak_book/editor/smarts/html.py | 14 | 31083 | #!/usr/bin/env python2
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2014, Kovid Goyal <kovid at kovidgoyal.net>'
import sys, re
from operator import itemgetter
from cssutils import parseStyle
from PyQt5.Qt import QTextEdit, Qt
from calibre import prepare_string_for_xml, xml_entity_to_unicode
from calibre.ebooks.oeb.polish.container import OEB_DOCS
from calibre.gui2 import error_dialog
from calibre.gui2.tweak_book.editor.syntax.html import ATTR_NAME, ATTR_END, ATTR_START, ATTR_VALUE
from calibre.gui2.tweak_book import tprefs, current_container
from calibre.gui2.tweak_book.editor.smarts import NullSmarts
from calibre.gui2.tweak_book.editor.smarts.utils import (
no_modifiers, get_leading_whitespace_on_block, get_text_before_cursor,
get_text_after_cursor, smart_home, smart_backspace, smart_tab, expand_tabs)
from calibre.utils.icu import utf16_length
get_offset = itemgetter(0)
PARAGRAPH_SEPARATOR = '\u2029'
class Tag(object):
def __init__(self, start_block, tag_start, end_block, tag_end, self_closing=False):
self.start_block, self.end_block = start_block, end_block
self.start_offset, self.end_offset = tag_start.offset, tag_end.offset
tag = tag_start.name
if tag_start.prefix:
tag = tag_start.prefix + ':' + tag
self.name = tag
self.self_closing = self_closing
def __repr__(self):
return '<%s start_block=%s start_offset=%s end_block=%s end_offset=%s self_closing=%s>' % (
self.name, self.start_block.blockNumber(), self.start_offset, self.end_block.blockNumber(), self.end_offset, self.self_closing)
__str__ = __repr__
def next_tag_boundary(block, offset, forward=True, max_lines=10000):
while block.isValid() and max_lines > 0:
ud = block.userData()
if ud is not None:
tags = sorted(ud.tags, key=get_offset, reverse=not forward)
for boundary in tags:
if forward and boundary.offset > offset:
return block, boundary
if not forward and boundary.offset < offset:
return block, boundary
block = block.next() if forward else block.previous()
offset = -1 if forward else sys.maxint
max_lines -= 1
return None, None
def next_attr_boundary(block, offset, forward=True):
while block.isValid():
ud = block.userData()
if ud is not None:
attributes = sorted(ud.attributes, key=get_offset, reverse=not forward)
for boundary in attributes:
if forward and boundary.offset >= offset:
return block, boundary
if not forward and boundary.offset <= offset:
return block, boundary
block = block.next() if forward else block.previous()
offset = -1 if forward else sys.maxint
return None, None
def find_closest_containing_tag(block, offset, max_tags=sys.maxint):
''' Find the closest containing tag. To find it, we search for the first
opening tag that does not have a matching closing tag before the specified
position. Search through at most max_tags. '''
prev_tag_boundary = lambda b, o: next_tag_boundary(b, o, forward=False)
block, boundary = prev_tag_boundary(block, offset)
if block is None:
return None
if boundary.is_start:
# We are inside a tag already
if boundary.closing:
return find_closest_containing_tag(block, boundary.offset)
eblock, eboundary = next_tag_boundary(block, boundary.offset)
if eblock is None or eboundary is None or eboundary.is_start:
return None
if eboundary.self_closing:
return Tag(block, boundary, eblock, eboundary, self_closing=True)
return find_closest_containing_tag(eblock, eboundary.offset + 1)
stack = []
block, tag_end = block, boundary
while block is not None and max_tags > 0:
sblock, tag_start = prev_tag_boundary(block, tag_end.offset)
if sblock is None or not tag_start.is_start:
break
if tag_start.closing: # A closing tag of the form </a>
stack.append((tag_start.prefix, tag_start.name))
elif tag_end.self_closing: # A self closing tag of the form <a/>
pass # Ignore it
else: # An opening tag, hurray
try:
prefix, name = stack.pop()
except IndexError:
prefix = name = None
if (prefix, name) != (tag_start.prefix, tag_start.name):
# Either we have an unbalanced opening tag or a syntax error, in
# either case terminate
return Tag(sblock, tag_start, block, tag_end)
block, tag_end = prev_tag_boundary(sblock, tag_start.offset)
max_tags -= 1
return None # Could not find a containing tag
def find_tag_definition(block, offset):
''' Return the <tag | > definition, if any that (block, offset) is inside. '''
block, boundary = next_tag_boundary(block, offset, forward=False)
if not boundary.is_start:
return None, False
tag_start = boundary
closing = tag_start.closing
tag = tag_start.name
if tag_start.prefix:
tag = tag_start.prefix + ':' + tag
return tag, closing
def find_containing_attribute(block, offset):
block, boundary = next_attr_boundary(block, offset, forward=False)
if block is None:
return None
if boundary.type is ATTR_NAME or boundary.data is ATTR_END:
return None # offset is not inside an attribute value
block, boundary = next_attr_boundary(block, boundary.offset - 1, forward=False)
if block is not None and boundary.type == ATTR_NAME:
return boundary.data
return None
def find_attribute_in_tag(block, offset, attr_name):
' Return the start of the attribute value as block, offset or None, None if attribute not found '
end_block, boundary = next_tag_boundary(block, offset)
if boundary.is_start:
return None, None
end_offset = boundary.offset
end_pos = (end_block.blockNumber(), end_offset)
current_block, current_offset = block, offset
found_attr = False
while True:
current_block, boundary = next_attr_boundary(current_block, current_offset)
if current_block is None or (current_block.blockNumber(), boundary.offset) > end_pos:
return None, None
current_offset = boundary.offset
if found_attr:
if boundary.type is not ATTR_VALUE or boundary.data is not ATTR_START:
return None, None
return current_block, current_offset
else:
if boundary.type is ATTR_NAME and boundary.data.lower() == attr_name.lower():
found_attr = True
current_offset += 1
def find_end_of_attribute(block, offset):
' Find the end of an attribute that occurs somewhere after the position specified by (block, offset) '
block, boundary = next_attr_boundary(block, offset)
if block is None or boundary is None:
return None, None
if boundary.type is not ATTR_VALUE or boundary.data is not ATTR_END:
return None, None
return block, boundary.offset
def find_closing_tag(tag, max_tags=sys.maxint):
''' Find the closing tag corresponding to the specified tag. To find it we
search for the first closing tag after the specified tag that does not
match a previous opening tag. Search through at most max_tags. '''
if tag.self_closing:
return None
stack = []
block, offset = tag.end_block, tag.end_offset
while block.isValid() and max_tags > 0:
block, tag_start = next_tag_boundary(block, offset)
if block is None or not tag_start.is_start:
break
endblock, tag_end = next_tag_boundary(block, tag_start.offset)
if endblock is None or tag_end.is_start:
break
if tag_start.closing:
try:
prefix, name = stack.pop()
except IndexError:
prefix = name = None
if (prefix, name) != (tag_start.prefix, tag_start.name):
return Tag(block, tag_start, endblock, tag_end)
elif tag_end.self_closing:
pass
else:
stack.append((tag_start.prefix, tag_start.name))
block, offset = endblock, tag_end.offset
max_tags -= 1
return None
def select_tag(cursor, tag):
cursor.setPosition(tag.start_block.position() + tag.start_offset)
cursor.setPosition(tag.end_block.position() + tag.end_offset + 1, cursor.KeepAnchor)
return unicode(cursor.selectedText()).replace(PARAGRAPH_SEPARATOR, '\n').rstrip('\0')
def rename_tag(cursor, opening_tag, closing_tag, new_name, insert=False):
cursor.beginEditBlock()
text = select_tag(cursor, closing_tag)
if insert:
text = '</%s>%s' % (new_name, text)
else:
text = re.sub(r'^<\s*/\s*[a-zA-Z0-9]+', '</%s' % new_name, text)
cursor.insertText(text)
text = select_tag(cursor, opening_tag)
if insert:
text += '<%s>' % new_name
else:
text = re.sub(r'^<\s*[a-zA-Z0-9]+', '<%s' % new_name, text)
cursor.insertText(text)
cursor.endEditBlock()
def ensure_not_within_tag_definition(cursor, forward=True):
''' Ensure the cursor is not inside a tag definition <>. Returns True iff the cursor was moved. '''
block, offset = cursor.block(), cursor.positionInBlock()
b, boundary = next_tag_boundary(block, offset, forward=False)
if b is None:
return False
if boundary.is_start:
# We are inside a tag
if forward:
block, boundary = next_tag_boundary(block, offset)
if block is not None:
cursor.setPosition(block.position() + boundary.offset + 1)
return True
else:
cursor.setPosition(b.position() + boundary.offset)
return True
return False
BLOCK_TAG_NAMES = frozenset((
'address', 'article', 'aside', 'blockquote', 'center', 'dir', 'fieldset',
'isindex', 'menu', 'noframes', 'hgroup', 'noscript', 'pre', 'section',
'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'header', 'p', 'div', 'dd', 'dl', 'ul',
'ol', 'li', 'body', 'td', 'th'))
def find_closest_containing_block_tag(block, offset, block_tag_names=BLOCK_TAG_NAMES):
while True:
tag = find_closest_containing_tag(block, offset)
if tag is None:
break
if tag.name in block_tag_names:
return tag
block, offset = tag.start_block, tag.start_offset
def set_style_property(tag, property_name, value, editor):
'''
Set a style property, i.e. a CSS property inside the style attribute of the tag.
Any existing style attribute is updated or a new attribute is inserted.
'''
block, offset = find_attribute_in_tag(tag.start_block, tag.start_offset + 1, 'style')
c = editor.textCursor()
def css(d):
return d.cssText.replace('\n', ' ')
if block is None or offset is None:
d = parseStyle('')
d.setProperty(property_name, value)
c.setPosition(tag.end_block.position() + tag.end_offset)
c.insertText(' style="%s"' % css(d))
else:
c.setPosition(block.position() + offset - 1)
end_block, end_offset = find_end_of_attribute(block, offset + 1)
if end_block is None:
return error_dialog(editor, _('Invalid markup'), _(
'The current block tag has an existing unclosed style attribute. Run the Fix HTML'
' tool first.'), show=True)
c.setPosition(end_block.position() + end_offset, c.KeepAnchor)
d = parseStyle(editor.selected_text_from_cursor(c)[1:-1])
d.setProperty(property_name, value)
c.insertText('"%s"' % css(d))
entity_pat = re.compile(r'&(#{0,1}[a-zA-Z0-9]{1,8});$')
class Smarts(NullSmarts):
def __init__(self, *args, **kwargs):
if not hasattr(Smarts, 'regexps_compiled'):
Smarts.regexps_compiled = True
Smarts.tag_pat = re.compile(r'<[^>]+>')
Smarts.closing_tag_pat = re.compile(r'<\s*/[^>]+>')
Smarts.closing_pat = re.compile(r'<\s*/')
Smarts.self_closing_pat = re.compile(r'/\s*>')
Smarts.complete_attr_pat = re.compile(r'''([a-zA-Z0-9_-]+)\s*=\s*(?:'([^']*)|"([^"]*))$''')
NullSmarts.__init__(self, *args, **kwargs)
self.last_matched_tag = None
def get_extra_selections(self, editor):
ans = []
def add_tag(tag):
a = QTextEdit.ExtraSelection()
a.cursor, a.format = editor.textCursor(), editor.match_paren_format
a.cursor.setPosition(tag.start_block.position()), a.cursor.movePosition(a.cursor.EndOfBlock, a.cursor.KeepAnchor)
text = unicode(a.cursor.selectedText())
start_pos = utf16_length(text[:tag.start_offset])
a.cursor.setPosition(tag.end_block.position()), a.cursor.movePosition(a.cursor.EndOfBlock, a.cursor.KeepAnchor)
text = unicode(a.cursor.selectedText())
end_pos = utf16_length(text[:tag.end_offset + 1])
a.cursor.setPosition(tag.start_block.position() + start_pos)
a.cursor.setPosition(tag.end_block.position() + end_pos, a.cursor.KeepAnchor)
ans.append(a)
c = editor.textCursor()
block, offset = c.block(), c.positionInBlock()
tag = self.last_matched_tag = find_closest_containing_tag(block, offset, max_tags=2000)
if tag is not None:
add_tag(tag)
tag = find_closing_tag(tag, max_tags=4000)
if tag is not None:
add_tag(tag)
return ans
def rename_block_tag(self, editor, new_name):
editor.highlighter.join()
c = editor.textCursor()
block, offset = c.block(), c.positionInBlock()
tag = find_closest_containing_block_tag(block, offset)
if tag is not None:
closing_tag = find_closing_tag(tag)
if closing_tag is None:
return error_dialog(editor, _('Invalid HTML'), _(
'There is an unclosed %s tag. You should run the Fix HTML tool'
' before trying to rename tags.') % tag.name, show=True)
rename_tag(c, tag, closing_tag, new_name, insert=tag.name in {'body', 'td', 'th', 'li'})
else:
return error_dialog(editor, _('No found'), _(
'No suitable block level tag was found to rename'), show=True)
def get_smart_selection(self, editor, update=True):
editor.highlighter.join()
cursor = editor.textCursor()
if not cursor.hasSelection():
return ''
left = min(cursor.anchor(), cursor.position())
right = max(cursor.anchor(), cursor.position())
cursor.setPosition(left)
ensure_not_within_tag_definition(cursor)
left = cursor.position()
cursor.setPosition(right)
ensure_not_within_tag_definition(cursor, forward=False)
right = cursor.position()
cursor.setPosition(left), cursor.setPosition(right, cursor.KeepAnchor)
if update:
editor.setTextCursor(cursor)
return editor.selected_text_from_cursor(cursor)
def insert_hyperlink(self, editor, target, text):
editor.highlighter.join()
c = editor.textCursor()
if c.hasSelection():
c.insertText('') # delete any existing selected text
ensure_not_within_tag_definition(c)
c.insertText('<a href="%s">' % prepare_string_for_xml(target, True))
p = c.position()
c.insertText('</a>')
c.setPosition(p) # ensure cursor is positioned inside the newly created tag
if text:
c.insertText(text)
editor.setTextCursor(c)
def insert_tag(self, editor, name):
editor.highlighter.join()
name = name.lstrip()
text = self.get_smart_selection(editor, update=True)
c = editor.textCursor()
pos = min(c.position(), c.anchor())
m = re.match(r'[a-zA-Z0-9:-]+', name)
cname = name if m is None else m.group()
c.insertText('<{0}>{1}</{2}>'.format(name, text, cname))
c.setPosition(pos + 2 + len(name))
editor.setTextCursor(c)
def verify_for_spellcheck(self, cursor, highlighter):
# Return True iff the cursor is in a location where spelling is
# checked (inside a tag or inside a checked attribute)
highlighter.join()
block = cursor.block()
start_pos = cursor.anchor() - block.position()
end_pos = cursor.position() - block.position()
start_tag, closing = find_tag_definition(block, start_pos)
if closing:
return False
end_tag, closing = find_tag_definition(block, end_pos)
if closing:
return False
if start_tag is None and end_tag is None:
# We are in normal text, check that the containing tag is
# allowed for spell checking.
tag = find_closest_containing_tag(block, start_pos)
if tag is not None and highlighter.tag_ok_for_spell(tag.name.split(':')[-1]):
return True
if start_tag != end_tag:
return False
# Now we check if we are in an allowed attribute
sa = find_containing_attribute(block, start_pos)
ea = find_containing_attribute(block, end_pos)
if sa == ea and sa in highlighter.spell_attributes:
return True
return False
def cursor_position_with_sourceline(self, cursor, for_position_sync=True, use_matched_tag=True):
''' Return the tag just before the current cursor as a source line
number and a list of tags defined on that line upto and including the
containing tag. If ``for_position_sync`` is False then the tag
*containing* the cursor is returned instead of the tag just before the
cursor. Note that finding the containing tag is expensive, so
use with care. As an optimization, the last tag matched by
get_extra_selections is used, unless use_matched_tag is False. '''
block, offset = cursor.block(), cursor.positionInBlock()
if for_position_sync:
nblock, boundary = next_tag_boundary(block, offset, forward=False)
if boundary is None:
return None, None
if boundary.is_start:
# We are inside a tag, use this tag
start_block, start_offset = nblock, boundary.offset
else:
start_block = None
while start_block is None and block.isValid():
ud = block.userData()
if ud is not None:
for boundary in reversed(ud.tags):
if boundary.is_start and not boundary.closing and boundary.offset <= offset:
start_block, start_offset = block, boundary.offset
break
block, offset = block.previous(), sys.maxint
end_block = None
if start_block is not None:
end_block, boundary = next_tag_boundary(start_block, start_offset)
if boundary is None or boundary.is_start:
return None, None
else:
tag = None
if use_matched_tag:
tag = self.last_matched_tag
if tag is None:
tag = find_closest_containing_tag(block, offset, max_tags=2000)
if tag is None:
return None, None
start_block, start_offset, end_block = tag.start_block, tag.start_offset, tag.end_block
if start_block is None or end_block is None:
return None, None
sourceline = end_block.blockNumber() + 1 # blockNumber() is zero based
ud = start_block.userData()
if ud is None:
return None, None
tags = [t.name for t in ud.tags if (t.is_start and not t.closing and t.offset <= start_offset)]
if start_block.blockNumber() != end_block.blockNumber():
# Multiline opening tag, it must be the first tag on the line with the closing >
del tags[:-1]
return sourceline, tags
def goto_sourceline(self, editor, sourceline, tags, attribute=None):
''' Move the cursor to the tag identified by sourceline and tags (a
list of tags names on the specified line). If attribute is specified
the cursor will be placed at the start of the attribute value. '''
found_tag = False
if sourceline is None:
return found_tag
block = editor.document().findBlockByNumber(sourceline - 1) # blockNumber() is zero based
if not block.isValid():
return found_tag
c = editor.textCursor()
ud = block.userData()
all_tags = [] if ud is None else [t for t in ud.tags if (t.is_start and not t.closing)]
tag_names = [t.name for t in all_tags]
if tag_names[:len(tags)] == tags:
c.setPosition(block.position() + all_tags[len(tags)-1].offset)
found_tag = True
else:
c.setPosition(block.position())
if found_tag and attribute is not None:
start_offset = c.position() - block.position()
nblock, offset = find_attribute_in_tag(block, start_offset, attribute)
if nblock is not None:
c.setPosition(nblock.position() + offset)
editor.setTextCursor(c)
return found_tag
def get_inner_HTML(self, editor):
''' Select the inner HTML of the current tag. Return a cursor with the
inner HTML selected or None. '''
editor.highlighter.join()
c = editor.textCursor()
block = c.block()
offset = c.position() - block.position()
nblock, boundary = next_tag_boundary(block, offset)
if boundary.is_start:
# We are within the contents of a tag already
tag = find_closest_containing_tag(block, offset)
else:
# We are inside a tag definition < | >
if boundary.self_closing:
return None # self closing tags have no inner html
tag = find_closest_containing_tag(nblock, boundary.offset + 1)
if tag is None:
return None
ctag = find_closing_tag(tag)
if ctag is None:
return None
c.setPosition(tag.end_block.position() + tag.end_offset + 1)
c.setPosition(ctag.start_block.position() + ctag.start_offset, c.KeepAnchor)
return c
def set_text_alignment(self, editor, value):
''' Set the text-align property on the current block tag(s) '''
editor.highlighter.join()
block_tag_names = BLOCK_TAG_NAMES - {'body'} # ignore body since setting text-align globally on body is almost never what is wanted
tags = []
c = editor.textCursor()
if c.hasSelection():
start, end = min(c.anchor(), c.position()), max(c.anchor(), c.position())
c.setPosition(start)
block = c.block()
while block.isValid() and block.position() < end:
ud = block.userData()
if ud is not None:
for tb in ud.tags:
if tb.is_start and not tb.closing and tb.name.lower() in block_tag_names:
nblock, boundary = next_tag_boundary(block, tb.offset)
if boundary is not None and not boundary.is_start and not boundary.self_closing:
tags.append(Tag(block, tb, nblock, boundary))
block = block.next()
if not tags:
c = editor.textCursor()
block, offset = c.block(), c.positionInBlock()
tag = find_closest_containing_block_tag(block, offset, block_tag_names)
if tag is None:
return error_dialog(editor, _('Not in a block tag'), _(
'Cannot change text alignment as the cursor is not inside a block level tag, such as a <p> or <div> tag.'), show=True)
tags = [tag]
for tag in reversed(tags):
set_style_property(tag, 'text-align', value, editor)
def handle_key_press(self, ev, editor):
ev_text = ev.text()
key = ev.key()
is_xml = editor.syntax == 'xml'
if tprefs['replace_entities_as_typed'] and (key == Qt.Key_Semicolon or ';' in ev_text):
self.replace_possible_entity(editor)
return True
if key in (Qt.Key_Enter, Qt.Key_Return) and no_modifiers(ev, Qt.ControlModifier, Qt.AltModifier):
ls = get_leading_whitespace_on_block(editor)
if ls == ' ':
ls = '' # Do not consider a single leading space as indentation
if is_xml:
count = 0
for m in self.tag_pat.finditer(get_text_before_cursor(editor)[1]):
text = m.group()
if self.closing_pat.search(text) is not None:
count -= 1
elif self.self_closing_pat.search(text) is None:
count += 1
if self.closing_tag_pat.match(get_text_after_cursor(editor)[1].lstrip()):
count -= 1
if count > 0:
ls += editor.tw * ' '
editor.textCursor().insertText('\n' + ls)
return True
if key == Qt.Key_Slash:
cursor, text = get_text_before_cursor(editor)
if not text.rstrip().endswith('<'):
return False
text = expand_tabs(text.rstrip()[:-1], editor.tw)
pls = get_leading_whitespace_on_block(editor, previous=True)
if is_xml and not text.lstrip() and len(text) > 1 and len(text) >= len(pls):
# Auto-dedent
text = text[:-editor.tw] + '</'
cursor.insertText(text)
editor.setTextCursor(cursor)
self.auto_close_tag(editor)
return True
if self.auto_close_tag(editor):
return True
if key == Qt.Key_Home and smart_home(editor, ev):
return True
if key == Qt.Key_Tab and smart_tab(editor, ev):
return True
if key == Qt.Key_Backspace and smart_backspace(editor, ev):
return True
return False
def replace_possible_entity(self, editor):
c = editor.textCursor()
c.insertText(';')
c.setPosition(c.position() - min(c.positionInBlock(), 10), c.KeepAnchor)
text = editor.selected_text_from_cursor(c)
m = entity_pat.search(text)
if m is not None:
ent = m.group()
repl = xml_entity_to_unicode(m)
if repl != ent:
c.setPosition(c.position() + m.start(), c.KeepAnchor)
c.insertText(repl)
editor.setTextCursor(c)
def auto_close_tag(self, editor):
if not tprefs['auto_close_tags']:
return False
def check_if_in_tag(block, offset=0):
if block.isValid():
text = block.text()
close_pos = text.find('>', offset)
open_pos = text.find('<', offset)
if (close_pos > -1 and open_pos == -1) or (close_pos < open_pos):
return True
return False
c = editor.textCursor()
block, offset = c.block(), c.positionInBlock()
if check_if_in_tag(block, offset) or check_if_in_tag(block.next()):
return False
tag = find_closest_containing_tag(block, offset - 1, max_tags=4000)
if tag is None:
return False
c.insertText('/%s>' % tag.name)
editor.setTextCursor(c)
return True
def get_completion_data(self, editor, ev=None):
c = editor.textCursor()
block, offset = c.block(), c.positionInBlock()
oblock, boundary = next_tag_boundary(block, offset, forward=False, max_lines=5)
if boundary is None or not boundary.is_start or boundary.closing:
# Not inside a opening tag definition
return
tagname = boundary.name.lower()
startpos = oblock.position() + boundary.offset
c.setPosition(c.position()), c.setPosition(startpos, c.KeepAnchor)
text = c.selectedText()
m = self.complete_attr_pat.search(text)
if m is None:
return
attr = m.group(1).lower().split(':')[-1]
doc_name = editor.completion_doc_name
if doc_name and attr in {'href', 'src'}:
# A link
query = m.group(2) or m.group(3) or ''
c = current_container()
names_type = {'a':'text_link', 'img':'image', 'image':'image', 'link':'stylesheet'}.get(tagname)
idx = query.find('#')
if idx > -1 and names_type in (None, 'text_link'):
href, query = query[:idx], query[idx+1:]
name = c.href_to_name(href) if href else doc_name
if c.mime_map.get(name) in OEB_DOCS:
return 'complete_anchor', name, query
return 'complete_names', (names_type, doc_name, c.root), query
if __name__ == '__main__': # {{{
from calibre.gui2.tweak_book.editor.widget import launch_editor
launch_editor('''\
<!DOCTYPE html>
<html xml:lang="en" lang="en">
<!--
-->
<head>
<meta charset="utf-8" />
<title>A title with a tag <span> in it, the tag is treated as normal text</title>
<style type="text/css">
body {
color: green;
font-size: 12pt;
}
</style>
<style type="text/css">p.small { font-size: x-small; color:gray }</style>
</head id="invalid attribute on closing tag">
<body lang="en_IN"><p:
<!-- The start of the actual body text -->
<h1 lang="en_US">A heading that should appear in bold, with an <i>italic</i> word</h1>
<p>Some text with inline formatting, that is syntax highlighted. A <b>bold</b> word, and an <em>italic</em> word. \
<i>Some italic text with a <b>bold-italic</b> word in </i>the middle.</p>
<!-- Let's see what exotic constructs like namespace prefixes and empty attributes look like -->
<svg:svg xmlns:svg="http://whatever" />
<input disabled><input disabled /><span attr=<></span>
<!-- Non-breaking spaces are rendered differently from normal spaces, so that they stand out -->
<p>Some\xa0words\xa0separated\xa0by\xa0non\u2011breaking\xa0spaces and non\u2011breaking hyphens.</p>
<p>Some non-BMP unicode text:\U0001f431\U0001f431\U0001f431</p>
</body>
</html>
''', path_is_raw=True, syntax='xml')
# }}}
| gpl-3.0 |
hoihu/micropython | tools/make-frozen.py | 6 | 2285 | #!/usr/bin/env python
#
# Create frozen modules structure for MicroPython.
#
# Usage:
#
# Have a directory with modules to be frozen (only modules, not packages
# supported so far):
#
# frozen/foo.py
# frozen/bar.py
#
# Run script, passing path to the directory above:
#
# ./make-frozen.py frozen > frozen.c
#
# Include frozen.c in your build, having defined MICROPY_MODULE_FROZEN_STR in
# config.
#
from __future__ import print_function
import sys
import os
def module_name(f):
return f
modules = []
if len(sys.argv) > 1:
root = sys.argv[1].rstrip("/")
root_len = len(root)
for dirpath, dirnames, filenames in os.walk(root):
for f in filenames:
fullpath = dirpath + "/" + f
st = os.stat(fullpath)
modules.append((fullpath[root_len + 1 :], st))
print("#include <stdint.h>")
print("const char mp_frozen_str_names[] = {")
for f, st in modules:
m = module_name(f)
print('"%s\\0"' % m)
print('"\\0"};')
print("const uint32_t mp_frozen_str_sizes[] = {")
for f, st in modules:
print("%d," % st.st_size)
print("};")
print("const char mp_frozen_str_content[] = {")
for f, st in modules:
data = open(sys.argv[1] + "/" + f, "rb").read()
# We need to properly escape the script data to create a C string.
# When C parses hex characters of the form \x00 it keeps parsing the hex
# data until it encounters a non-hex character. Thus one must create
# strings of the form "data\x01" "abc" to properly encode this kind of
# data. We could just encode all characters as hex digits but it's nice
# to be able to read the resulting C code as ASCII when possible.
data = bytearray(data) # so Python2 extracts each byte as an integer
esc_dict = {ord("\n"): "\\n", ord("\r"): "\\r", ord('"'): '\\"', ord("\\"): "\\\\"}
chrs = ['"']
break_str = False
for c in data:
try:
chrs.append(esc_dict[c])
except KeyError:
if 32 <= c <= 126:
if break_str:
chrs.append('" "')
break_str = False
chrs.append(chr(c))
else:
chrs.append("\\x%02x" % c)
break_str = True
chrs.append('\\0"')
print("".join(chrs))
print("};")
| mit |
tempbottle/kbengine | kbe/src/lib/python/Lib/encodings/mac_farsi.py | 272 | 15170 | """ Python Character Mapping Codec mac_farsi generated from 'MAPPINGS/VENDORS/APPLE/FARSI.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mac-farsi',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> CONTROL CHARACTER
'\x01' # 0x01 -> CONTROL CHARACTER
'\x02' # 0x02 -> CONTROL CHARACTER
'\x03' # 0x03 -> CONTROL CHARACTER
'\x04' # 0x04 -> CONTROL CHARACTER
'\x05' # 0x05 -> CONTROL CHARACTER
'\x06' # 0x06 -> CONTROL CHARACTER
'\x07' # 0x07 -> CONTROL CHARACTER
'\x08' # 0x08 -> CONTROL CHARACTER
'\t' # 0x09 -> CONTROL CHARACTER
'\n' # 0x0A -> CONTROL CHARACTER
'\x0b' # 0x0B -> CONTROL CHARACTER
'\x0c' # 0x0C -> CONTROL CHARACTER
'\r' # 0x0D -> CONTROL CHARACTER
'\x0e' # 0x0E -> CONTROL CHARACTER
'\x0f' # 0x0F -> CONTROL CHARACTER
'\x10' # 0x10 -> CONTROL CHARACTER
'\x11' # 0x11 -> CONTROL CHARACTER
'\x12' # 0x12 -> CONTROL CHARACTER
'\x13' # 0x13 -> CONTROL CHARACTER
'\x14' # 0x14 -> CONTROL CHARACTER
'\x15' # 0x15 -> CONTROL CHARACTER
'\x16' # 0x16 -> CONTROL CHARACTER
'\x17' # 0x17 -> CONTROL CHARACTER
'\x18' # 0x18 -> CONTROL CHARACTER
'\x19' # 0x19 -> CONTROL CHARACTER
'\x1a' # 0x1A -> CONTROL CHARACTER
'\x1b' # 0x1B -> CONTROL CHARACTER
'\x1c' # 0x1C -> CONTROL CHARACTER
'\x1d' # 0x1D -> CONTROL CHARACTER
'\x1e' # 0x1E -> CONTROL CHARACTER
'\x1f' # 0x1F -> CONTROL CHARACTER
' ' # 0x20 -> SPACE, left-right
'!' # 0x21 -> EXCLAMATION MARK, left-right
'"' # 0x22 -> QUOTATION MARK, left-right
'#' # 0x23 -> NUMBER SIGN, left-right
'$' # 0x24 -> DOLLAR SIGN, left-right
'%' # 0x25 -> PERCENT SIGN, left-right
'&' # 0x26 -> AMPERSAND, left-right
"'" # 0x27 -> APOSTROPHE, left-right
'(' # 0x28 -> LEFT PARENTHESIS, left-right
')' # 0x29 -> RIGHT PARENTHESIS, left-right
'*' # 0x2A -> ASTERISK, left-right
'+' # 0x2B -> PLUS SIGN, left-right
',' # 0x2C -> COMMA, left-right; in Arabic-script context, displayed as 0x066C ARABIC THOUSANDS SEPARATOR
'-' # 0x2D -> HYPHEN-MINUS, left-right
'.' # 0x2E -> FULL STOP, left-right; in Arabic-script context, displayed as 0x066B ARABIC DECIMAL SEPARATOR
'/' # 0x2F -> SOLIDUS, left-right
'0' # 0x30 -> DIGIT ZERO; in Arabic-script context, displayed as 0x06F0 EXTENDED ARABIC-INDIC DIGIT ZERO
'1' # 0x31 -> DIGIT ONE; in Arabic-script context, displayed as 0x06F1 EXTENDED ARABIC-INDIC DIGIT ONE
'2' # 0x32 -> DIGIT TWO; in Arabic-script context, displayed as 0x06F2 EXTENDED ARABIC-INDIC DIGIT TWO
'3' # 0x33 -> DIGIT THREE; in Arabic-script context, displayed as 0x06F3 EXTENDED ARABIC-INDIC DIGIT THREE
'4' # 0x34 -> DIGIT FOUR; in Arabic-script context, displayed as 0x06F4 EXTENDED ARABIC-INDIC DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE; in Arabic-script context, displayed as 0x06F5 EXTENDED ARABIC-INDIC DIGIT FIVE
'6' # 0x36 -> DIGIT SIX; in Arabic-script context, displayed as 0x06F6 EXTENDED ARABIC-INDIC DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN; in Arabic-script context, displayed as 0x06F7 EXTENDED ARABIC-INDIC DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT; in Arabic-script context, displayed as 0x06F8 EXTENDED ARABIC-INDIC DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE; in Arabic-script context, displayed as 0x06F9 EXTENDED ARABIC-INDIC DIGIT NINE
':' # 0x3A -> COLON, left-right
';' # 0x3B -> SEMICOLON, left-right
'<' # 0x3C -> LESS-THAN SIGN, left-right
'=' # 0x3D -> EQUALS SIGN, left-right
'>' # 0x3E -> GREATER-THAN SIGN, left-right
'?' # 0x3F -> QUESTION MARK, left-right
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET, left-right
'\\' # 0x5C -> REVERSE SOLIDUS, left-right
']' # 0x5D -> RIGHT SQUARE BRACKET, left-right
'^' # 0x5E -> CIRCUMFLEX ACCENT, left-right
'_' # 0x5F -> LOW LINE, left-right
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET, left-right
'|' # 0x7C -> VERTICAL LINE, left-right
'}' # 0x7D -> RIGHT CURLY BRACKET, left-right
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> CONTROL CHARACTER
'\xc4' # 0x80 -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xa0' # 0x81 -> NO-BREAK SPACE, right-left
'\xc7' # 0x82 -> LATIN CAPITAL LETTER C WITH CEDILLA
'\xc9' # 0x83 -> LATIN CAPITAL LETTER E WITH ACUTE
'\xd1' # 0x84 -> LATIN CAPITAL LETTER N WITH TILDE
'\xd6' # 0x85 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xdc' # 0x86 -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\xe1' # 0x87 -> LATIN SMALL LETTER A WITH ACUTE
'\xe0' # 0x88 -> LATIN SMALL LETTER A WITH GRAVE
'\xe2' # 0x89 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe4' # 0x8A -> LATIN SMALL LETTER A WITH DIAERESIS
'\u06ba' # 0x8B -> ARABIC LETTER NOON GHUNNA
'\xab' # 0x8C -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK, right-left
'\xe7' # 0x8D -> LATIN SMALL LETTER C WITH CEDILLA
'\xe9' # 0x8E -> LATIN SMALL LETTER E WITH ACUTE
'\xe8' # 0x8F -> LATIN SMALL LETTER E WITH GRAVE
'\xea' # 0x90 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
'\xeb' # 0x91 -> LATIN SMALL LETTER E WITH DIAERESIS
'\xed' # 0x92 -> LATIN SMALL LETTER I WITH ACUTE
'\u2026' # 0x93 -> HORIZONTAL ELLIPSIS, right-left
'\xee' # 0x94 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\xef' # 0x95 -> LATIN SMALL LETTER I WITH DIAERESIS
'\xf1' # 0x96 -> LATIN SMALL LETTER N WITH TILDE
'\xf3' # 0x97 -> LATIN SMALL LETTER O WITH ACUTE
'\xbb' # 0x98 -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK, right-left
'\xf4' # 0x99 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf6' # 0x9A -> LATIN SMALL LETTER O WITH DIAERESIS
'\xf7' # 0x9B -> DIVISION SIGN, right-left
'\xfa' # 0x9C -> LATIN SMALL LETTER U WITH ACUTE
'\xf9' # 0x9D -> LATIN SMALL LETTER U WITH GRAVE
'\xfb' # 0x9E -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xfc' # 0x9F -> LATIN SMALL LETTER U WITH DIAERESIS
' ' # 0xA0 -> SPACE, right-left
'!' # 0xA1 -> EXCLAMATION MARK, right-left
'"' # 0xA2 -> QUOTATION MARK, right-left
'#' # 0xA3 -> NUMBER SIGN, right-left
'$' # 0xA4 -> DOLLAR SIGN, right-left
'\u066a' # 0xA5 -> ARABIC PERCENT SIGN
'&' # 0xA6 -> AMPERSAND, right-left
"'" # 0xA7 -> APOSTROPHE, right-left
'(' # 0xA8 -> LEFT PARENTHESIS, right-left
')' # 0xA9 -> RIGHT PARENTHESIS, right-left
'*' # 0xAA -> ASTERISK, right-left
'+' # 0xAB -> PLUS SIGN, right-left
'\u060c' # 0xAC -> ARABIC COMMA
'-' # 0xAD -> HYPHEN-MINUS, right-left
'.' # 0xAE -> FULL STOP, right-left
'/' # 0xAF -> SOLIDUS, right-left
'\u06f0' # 0xB0 -> EXTENDED ARABIC-INDIC DIGIT ZERO, right-left (need override)
'\u06f1' # 0xB1 -> EXTENDED ARABIC-INDIC DIGIT ONE, right-left (need override)
'\u06f2' # 0xB2 -> EXTENDED ARABIC-INDIC DIGIT TWO, right-left (need override)
'\u06f3' # 0xB3 -> EXTENDED ARABIC-INDIC DIGIT THREE, right-left (need override)
'\u06f4' # 0xB4 -> EXTENDED ARABIC-INDIC DIGIT FOUR, right-left (need override)
'\u06f5' # 0xB5 -> EXTENDED ARABIC-INDIC DIGIT FIVE, right-left (need override)
'\u06f6' # 0xB6 -> EXTENDED ARABIC-INDIC DIGIT SIX, right-left (need override)
'\u06f7' # 0xB7 -> EXTENDED ARABIC-INDIC DIGIT SEVEN, right-left (need override)
'\u06f8' # 0xB8 -> EXTENDED ARABIC-INDIC DIGIT EIGHT, right-left (need override)
'\u06f9' # 0xB9 -> EXTENDED ARABIC-INDIC DIGIT NINE, right-left (need override)
':' # 0xBA -> COLON, right-left
'\u061b' # 0xBB -> ARABIC SEMICOLON
'<' # 0xBC -> LESS-THAN SIGN, right-left
'=' # 0xBD -> EQUALS SIGN, right-left
'>' # 0xBE -> GREATER-THAN SIGN, right-left
'\u061f' # 0xBF -> ARABIC QUESTION MARK
'\u274a' # 0xC0 -> EIGHT TEARDROP-SPOKED PROPELLER ASTERISK, right-left
'\u0621' # 0xC1 -> ARABIC LETTER HAMZA
'\u0622' # 0xC2 -> ARABIC LETTER ALEF WITH MADDA ABOVE
'\u0623' # 0xC3 -> ARABIC LETTER ALEF WITH HAMZA ABOVE
'\u0624' # 0xC4 -> ARABIC LETTER WAW WITH HAMZA ABOVE
'\u0625' # 0xC5 -> ARABIC LETTER ALEF WITH HAMZA BELOW
'\u0626' # 0xC6 -> ARABIC LETTER YEH WITH HAMZA ABOVE
'\u0627' # 0xC7 -> ARABIC LETTER ALEF
'\u0628' # 0xC8 -> ARABIC LETTER BEH
'\u0629' # 0xC9 -> ARABIC LETTER TEH MARBUTA
'\u062a' # 0xCA -> ARABIC LETTER TEH
'\u062b' # 0xCB -> ARABIC LETTER THEH
'\u062c' # 0xCC -> ARABIC LETTER JEEM
'\u062d' # 0xCD -> ARABIC LETTER HAH
'\u062e' # 0xCE -> ARABIC LETTER KHAH
'\u062f' # 0xCF -> ARABIC LETTER DAL
'\u0630' # 0xD0 -> ARABIC LETTER THAL
'\u0631' # 0xD1 -> ARABIC LETTER REH
'\u0632' # 0xD2 -> ARABIC LETTER ZAIN
'\u0633' # 0xD3 -> ARABIC LETTER SEEN
'\u0634' # 0xD4 -> ARABIC LETTER SHEEN
'\u0635' # 0xD5 -> ARABIC LETTER SAD
'\u0636' # 0xD6 -> ARABIC LETTER DAD
'\u0637' # 0xD7 -> ARABIC LETTER TAH
'\u0638' # 0xD8 -> ARABIC LETTER ZAH
'\u0639' # 0xD9 -> ARABIC LETTER AIN
'\u063a' # 0xDA -> ARABIC LETTER GHAIN
'[' # 0xDB -> LEFT SQUARE BRACKET, right-left
'\\' # 0xDC -> REVERSE SOLIDUS, right-left
']' # 0xDD -> RIGHT SQUARE BRACKET, right-left
'^' # 0xDE -> CIRCUMFLEX ACCENT, right-left
'_' # 0xDF -> LOW LINE, right-left
'\u0640' # 0xE0 -> ARABIC TATWEEL
'\u0641' # 0xE1 -> ARABIC LETTER FEH
'\u0642' # 0xE2 -> ARABIC LETTER QAF
'\u0643' # 0xE3 -> ARABIC LETTER KAF
'\u0644' # 0xE4 -> ARABIC LETTER LAM
'\u0645' # 0xE5 -> ARABIC LETTER MEEM
'\u0646' # 0xE6 -> ARABIC LETTER NOON
'\u0647' # 0xE7 -> ARABIC LETTER HEH
'\u0648' # 0xE8 -> ARABIC LETTER WAW
'\u0649' # 0xE9 -> ARABIC LETTER ALEF MAKSURA
'\u064a' # 0xEA -> ARABIC LETTER YEH
'\u064b' # 0xEB -> ARABIC FATHATAN
'\u064c' # 0xEC -> ARABIC DAMMATAN
'\u064d' # 0xED -> ARABIC KASRATAN
'\u064e' # 0xEE -> ARABIC FATHA
'\u064f' # 0xEF -> ARABIC DAMMA
'\u0650' # 0xF0 -> ARABIC KASRA
'\u0651' # 0xF1 -> ARABIC SHADDA
'\u0652' # 0xF2 -> ARABIC SUKUN
'\u067e' # 0xF3 -> ARABIC LETTER PEH
'\u0679' # 0xF4 -> ARABIC LETTER TTEH
'\u0686' # 0xF5 -> ARABIC LETTER TCHEH
'\u06d5' # 0xF6 -> ARABIC LETTER AE
'\u06a4' # 0xF7 -> ARABIC LETTER VEH
'\u06af' # 0xF8 -> ARABIC LETTER GAF
'\u0688' # 0xF9 -> ARABIC LETTER DDAL
'\u0691' # 0xFA -> ARABIC LETTER RREH
'{' # 0xFB -> LEFT CURLY BRACKET, right-left
'|' # 0xFC -> VERTICAL LINE, right-left
'}' # 0xFD -> RIGHT CURLY BRACKET, right-left
'\u0698' # 0xFE -> ARABIC LETTER JEH
'\u06d2' # 0xFF -> ARABIC LETTER YEH BARREE
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| lgpl-3.0 |
team147/CanadaCoin | contrib/bitrpc/bitrpc.py | 2348 | 7835 | from jsonrpc import ServiceProxy
import sys
import string
# ===== BEGIN USER SETTINGS =====
# if you do not set these you will be prompted for a password for every command
rpcuser = ""
rpcpass = ""
# ====== END USER SETTINGS ======
if rpcpass == "":
access = ServiceProxy("http://127.0.0.1:8332")
else:
access = ServiceProxy("http://"+rpcuser+":"+rpcpass+"@127.0.0.1:8332")
cmd = sys.argv[1].lower()
if cmd == "backupwallet":
try:
path = raw_input("Enter destination path/filename: ")
print access.backupwallet(path)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccount":
try:
addr = raw_input("Enter a Bitcoin address: ")
print access.getaccount(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "getaccountaddress":
try:
acct = raw_input("Enter an account name: ")
print access.getaccountaddress(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getaddressesbyaccount":
try:
acct = raw_input("Enter an account name: ")
print access.getaddressesbyaccount(acct)
except:
print "\n---An error occurred---\n"
elif cmd == "getbalance":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getbalance(acct, mc)
except:
print access.getbalance()
except:
print "\n---An error occurred---\n"
elif cmd == "getblockbycount":
try:
height = raw_input("Height: ")
print access.getblockbycount(height)
except:
print "\n---An error occurred---\n"
elif cmd == "getblockcount":
try:
print access.getblockcount()
except:
print "\n---An error occurred---\n"
elif cmd == "getblocknumber":
try:
print access.getblocknumber()
except:
print "\n---An error occurred---\n"
elif cmd == "getconnectioncount":
try:
print access.getconnectioncount()
except:
print "\n---An error occurred---\n"
elif cmd == "getdifficulty":
try:
print access.getdifficulty()
except:
print "\n---An error occurred---\n"
elif cmd == "getgenerate":
try:
print access.getgenerate()
except:
print "\n---An error occurred---\n"
elif cmd == "gethashespersec":
try:
print access.gethashespersec()
except:
print "\n---An error occurred---\n"
elif cmd == "getinfo":
try:
print access.getinfo()
except:
print "\n---An error occurred---\n"
elif cmd == "getnewaddress":
try:
acct = raw_input("Enter an account name: ")
try:
print access.getnewaddress(acct)
except:
print access.getnewaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaccount":
try:
acct = raw_input("Enter an account (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaccount(acct, mc)
except:
print access.getreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "getreceivedbyaddress":
try:
addr = raw_input("Enter a Bitcoin address (optional): ")
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.getreceivedbyaddress(addr, mc)
except:
print access.getreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "gettransaction":
try:
txid = raw_input("Enter a transaction ID: ")
print access.gettransaction(txid)
except:
print "\n---An error occurred---\n"
elif cmd == "getwork":
try:
data = raw_input("Data (optional): ")
try:
print access.gettransaction(data)
except:
print access.gettransaction()
except:
print "\n---An error occurred---\n"
elif cmd == "help":
try:
cmd = raw_input("Command (optional): ")
try:
print access.help(cmd)
except:
print access.help()
except:
print "\n---An error occurred---\n"
elif cmd == "listaccounts":
try:
mc = raw_input("Minimum confirmations (optional): ")
try:
print access.listaccounts(mc)
except:
print access.listaccounts()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaccount":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaccount(mc, incemp)
except:
print access.listreceivedbyaccount()
except:
print "\n---An error occurred---\n"
elif cmd == "listreceivedbyaddress":
try:
mc = raw_input("Minimum confirmations (optional): ")
incemp = raw_input("Include empty? (true/false, optional): ")
try:
print access.listreceivedbyaddress(mc, incemp)
except:
print access.listreceivedbyaddress()
except:
print "\n---An error occurred---\n"
elif cmd == "listtransactions":
try:
acct = raw_input("Account (optional): ")
count = raw_input("Number of transactions (optional): ")
frm = raw_input("Skip (optional):")
try:
print access.listtransactions(acct, count, frm)
except:
print access.listtransactions()
except:
print "\n---An error occurred---\n"
elif cmd == "move":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.move(frm, to, amt, mc, comment)
except:
print access.move(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendfrom":
try:
frm = raw_input("From: ")
to = raw_input("To: ")
amt = raw_input("Amount:")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendfrom(frm, to, amt, mc, comment, commentto)
except:
print access.sendfrom(frm, to, amt)
except:
print "\n---An error occurred---\n"
elif cmd == "sendmany":
try:
frm = raw_input("From: ")
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
mc = raw_input("Minimum confirmations (optional): ")
comment = raw_input("Comment (optional): ")
try:
print access.sendmany(frm,to,mc,comment)
except:
print access.sendmany(frm,to)
except:
print "\n---An error occurred---\n"
elif cmd == "sendtoaddress":
try:
to = raw_input("To (in format address1:amount1,address2:amount2,...): ")
amt = raw_input("Amount:")
comment = raw_input("Comment (optional): ")
commentto = raw_input("Comment-to (optional): ")
try:
print access.sendtoaddress(to,amt,comment,commentto)
except:
print access.sendtoaddress(to,amt)
except:
print "\n---An error occurred---\n"
elif cmd == "setaccount":
try:
addr = raw_input("Address: ")
acct = raw_input("Account:")
print access.setaccount(addr,acct)
except:
print "\n---An error occurred---\n"
elif cmd == "setgenerate":
try:
gen= raw_input("Generate? (true/false): ")
cpus = raw_input("Max processors/cores (-1 for unlimited, optional):")
try:
print access.setgenerate(gen, cpus)
except:
print access.setgenerate(gen)
except:
print "\n---An error occurred---\n"
elif cmd == "settxfee":
try:
amt = raw_input("Amount:")
print access.settxfee(amt)
except:
print "\n---An error occurred---\n"
elif cmd == "stop":
try:
print access.stop()
except:
print "\n---An error occurred---\n"
elif cmd == "validateaddress":
try:
addr = raw_input("Address: ")
print access.validateaddress(addr)
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrase":
try:
pwd = raw_input("Enter wallet passphrase: ")
access.walletpassphrase(pwd, 60)
print "\n---Wallet unlocked---\n"
except:
print "\n---An error occurred---\n"
elif cmd == "walletpassphrasechange":
try:
pwd = raw_input("Enter old wallet passphrase: ")
pwd2 = raw_input("Enter new wallet passphrase: ")
access.walletpassphrasechange(pwd, pwd2)
print
print "\n---Passphrase changed---\n"
except:
print
print "\n---An error occurred---\n"
print
else:
print "Command not found or not supported" | mit |
livc/Paddle | benchmark/tensorflow/rnn/rnn.py | 21 | 8285 | #!/usr/bin/env python
from six.moves import xrange # pylint: disable=redefined-builtin
import math
import time
import numpy as np
from datetime import datetime
import reader
import tensorflow as tf
from tensorflow.python.ops import rnn
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_integer('batch_size', 128, """Batch size.""")
tf.app.flags.DEFINE_integer('num_batches', 100, """Number of batches to run.""")
tf.app.flags.DEFINE_integer('num_layers', 1, """Number of batches to run.""")
tf.app.flags.DEFINE_integer('max_len', 100, """Number of batches to run.""")
tf.app.flags.DEFINE_boolean('forward_only', False,
"""Only run the forward pass.""")
tf.app.flags.DEFINE_boolean('forward_backward_only', False,
"""Only run the forward-forward pass.""")
tf.app.flags.DEFINE_integer('hidden_size', 128, """Number of batches to run.""")
tf.app.flags.DEFINE_integer('emb_size', 128, """Number of batches to run.""")
tf.app.flags.DEFINE_boolean('log_device_placement', False,
"""Whether to log device placement.""")
VOCAB_SIZE = 30000
NUM_CLASS = 2
def get_feed_dict(x_data, y_data=None):
feed_dict = {}
if y_data is not None:
feed_dict[y_input] = y_data
for i in xrange(x_data.shape[0]):
feed_dict[x_input[i]] = x_data[i, :, :]
return feed_dict
def get_incoming_shape(incoming):
""" Returns the incoming data shape """
if isinstance(incoming, tf.Tensor):
return incoming.get_shape().as_list()
elif type(incoming) in [np.array, list, tuple]:
return np.shape(incoming)
else:
raise Exception("Invalid incoming layer.")
# Note input * W is done in LSTMCell,
# which is different from PaddlePaddle
def single_lstm(name,
incoming,
n_units,
use_peepholes=True,
return_seq=False,
return_state=False):
with tf.name_scope(name) as scope:
cell = tf.nn.rnn_cell.LSTMCell(n_units, use_peepholes=use_peepholes)
output, _cell_state = rnn.rnn(cell, incoming, dtype=tf.float32)
out = output if return_seq else output[-1]
return (out, _cell_state) if return_state else out
def lstm(name,
incoming,
n_units,
use_peepholes=True,
return_seq=False,
return_state=False,
num_layers=1):
with tf.name_scope(name) as scope:
lstm_cell = tf.nn.rnn_cell.LSTMCell(
n_units, use_peepholes=use_peepholes)
cell = tf.nn.rnn_cell.MultiRNNCell([lstm_cell] * num_layers)
initial_state = cell.zero_state(FLAGS.batch_size, dtype=tf.float32)
if not isinstance(incoming, list):
# if the input is embeding, the Tensor shape : [None, time_step, emb_size]
incoming = [
tf.squeeze(input_, [1])
for input_ in tf.split(1, FLAGS.max_len, incoming)
]
outputs, state = tf.nn.rnn(cell,
incoming,
initial_state=initial_state,
dtype=tf.float32)
out = outputs if return_seq else outputs[-1]
return (out, _cell_state) if return_state else out
def embedding(name, incoming, vocab_size, emb_size):
with tf.name_scope(name) as scope:
#with tf.device("/cpu:0"):
embedding = tf.get_variable(
name + '_emb', [vocab_size, emb_size], dtype=tf.float32)
out = tf.nn.embedding_lookup(embedding, incoming)
return out
def fc(name, inpOp, nIn, nOut, act=True):
with tf.name_scope(name) as scope:
kernel = tf.get_variable(
name + '_w', [nIn, nOut],
initializer=tf.truncated_normal_initializer(
stddev=0.01, dtype=tf.float32),
dtype=tf.float32)
biases = tf.get_variable(
name + '_b', [nOut],
initializer=tf.constant_initializer(
value=0.0, dtype=tf.float32),
dtype=tf.float32,
trainable=True)
net = tf.nn.relu_layer(inpOp, kernel, biases, name=name) if act else \
tf.matmul(inpOp, kernel) + biases
return net
def inference(seq):
net = embedding('emb', seq, VOCAB_SIZE, FLAGS.emb_size)
print "emb:", get_incoming_shape(net)
net = lstm('lstm', net, FLAGS.hidden_size, num_layers=FLAGS.num_layers)
print "lstm:", get_incoming_shape(net)
net = fc('fc1', net, FLAGS.hidden_size, 2)
return net
def loss(logits, labels):
# one label index for one sample
labels = tf.cast(labels, tf.float32)
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(
logits, labels, name='cross_entropy_per_example')
cross_entropy_mean = tf.reduce_mean(cross_entropy, name='cross_entropy')
tf.add_to_collection('losses', cross_entropy_mean)
return tf.add_n(tf.get_collection('losses'), name='total_loss')
def time_tensorflow_run(session, target, x_input, y_input, info_string):
num_steps_burn_in = 50
total_duration = 0.0
total_duration_squared = 0.0
if not isinstance(target, list):
target = [target]
target_op = tf.group(*target)
train_dataset = reader.create_datasets("imdb.pkl", VOCAB_SIZE)
for i in xrange(FLAGS.num_batches + num_steps_burn_in):
start_time = time.time()
data, label = train_dataset.next_batch(FLAGS.batch_size)
_ = session.run(target_op, feed_dict={x_input: data, y_input: label})
duration = time.time() - start_time
if i > num_steps_burn_in:
if not i % 10:
print('%s: step %d, duration = %.3f' %
(datetime.now(), i - num_steps_burn_in, duration))
total_duration += duration
total_duration_squared += duration * duration
mn = total_duration / FLAGS.num_batches
vr = total_duration_squared / FLAGS.num_batches - mn * mn
sd = math.sqrt(vr)
print('%s: %s across %d steps, %.3f +/- %.3f sec / batch' %
(datetime.now(), info_string, FLAGS.num_batches, mn, sd))
def run_benchmark():
with tf.Graph().as_default():
global_step = 0
with tf.device('/cpu:0'):
global_step = tf.Variable(0, trainable=False)
with tf.device('/gpu:0'):
#x_input = tf.placeholder(tf.int32, [None, FLAGS.max_len], name="x_input")
#y_input = tf.placeholder(tf.int32, [None, NUM_CLASS], name="y_input")
x_input = tf.placeholder(
tf.int32, [FLAGS.batch_size, FLAGS.max_len], name="x_input")
y_input = tf.placeholder(
tf.int32, [FLAGS.batch_size, NUM_CLASS], name="y_input")
# Generate some dummy sequnce.
last_layer = inference(x_input)
objective = loss(last_layer, y_input)
opt = tf.train.AdamOptimizer(0.001)
grads = opt.compute_gradients(objective)
apply_gradient_op = opt.apply_gradients(
grads, global_step=global_step)
init = tf.initialize_all_variables()
sess = tf.Session(config=tf.ConfigProto(
allow_soft_placement=True,
log_device_placement=FLAGS.log_device_placement))
sess.run(init)
run_forward = True
run_forward_backward = True
if FLAGS.forward_only and FLAGS.forward_backward_only:
raise ValueError("Cannot specify --forward_only and "
"--forward_backward_only at the same time.")
if FLAGS.forward_only:
run_forward_backward = False
elif FLAGS.forward_backward_only:
run_forward = False
if run_forward:
time_tensorflow_run(sess, last_layer, x_input, y_input,
"Forward")
if run_forward_backward:
with tf.control_dependencies([apply_gradient_op]):
train_op = tf.no_op(name='train')
time_tensorflow_run(sess, [train_op, objective], x_input,
y_input, "Forward-backward")
def main(_):
run_benchmark()
if __name__ == '__main__':
tf.app.run()
| apache-2.0 |
dracorpg/python-ivi | ivi/agilent/agilentDSA90804A.py | 7 | 1686 | """
Python Interchangeable Virtual Instrument Library
Copyright (c) 2012-2014 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .agilent90000 import *
class agilentDSA90804A(agilent90000):
"Agilent Infiniium DSA90804A IVI oscilloscope driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', 'DSO90804A')
super(agilentDSA90804A, self).__init__(*args, **kwargs)
self._analog_channel_count = 4
self._digital_channel_count = 0
self._channel_count = self._analog_channel_count + self._digital_channel_count
self._bandwidth = 8e9
self._init_channels()
| mit |
40223220/worktogether | static/Brython3.1.1-20150328-091302/Lib/unittest/suite.py | 748 | 9715 | """TestSuite"""
import sys
from . import case
from . import util
__unittest = True
def _call_if_exists(parent, attr):
func = getattr(parent, attr, lambda: None)
func()
class BaseTestSuite(object):
"""A simple test suite that doesn't provide class or module shared fixtures.
"""
def __init__(self, tests=()):
self._tests = []
self.addTests(tests)
def __repr__(self):
return "<%s tests=%s>" % (util.strclass(self.__class__), list(self))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return list(self) == list(other)
def __ne__(self, other):
return not self == other
def __iter__(self):
return iter(self._tests)
def countTestCases(self):
cases = 0
for test in self:
cases += test.countTestCases()
return cases
def addTest(self, test):
# sanity checks
if not callable(test):
raise TypeError("{} is not callable".format(repr(test)))
if isinstance(test, type) and issubclass(test,
(case.TestCase, TestSuite)):
raise TypeError("TestCases and TestSuites must be instantiated "
"before passing them to addTest()")
self._tests.append(test)
def addTests(self, tests):
if isinstance(tests, str):
raise TypeError("tests must be an iterable of tests, not a string")
for test in tests:
self.addTest(test)
def run(self, result):
for test in self:
if result.shouldStop:
break
test(result)
return result
def __call__(self, *args, **kwds):
return self.run(*args, **kwds)
def debug(self):
"""Run the tests without collecting errors in a TestResult"""
for test in self:
test.debug()
class TestSuite(BaseTestSuite):
"""A test suite is a composite test consisting of a number of TestCases.
For use, create an instance of TestSuite, then add test case instances.
When all tests have been added, the suite can be passed to a test
runner, such as TextTestRunner. It will run the individual test cases
in the order in which they were added, aggregating the results. When
subclassing, do not forget to call the base class constructor.
"""
def run(self, result, debug=False):
topLevel = False
if getattr(result, '_testRunEntered', False) is False:
result._testRunEntered = topLevel = True
for test in self:
if result.shouldStop:
break
if _isnotsuite(test):
self._tearDownPreviousClass(test, result)
self._handleModuleFixture(test, result)
self._handleClassSetUp(test, result)
result._previousTestClass = test.__class__
if (getattr(test.__class__, '_classSetupFailed', False) or
getattr(result, '_moduleSetUpFailed', False)):
continue
if not debug:
test(result)
else:
test.debug()
if topLevel:
self._tearDownPreviousClass(None, result)
self._handleModuleTearDown(result)
result._testRunEntered = False
return result
def debug(self):
"""Run the tests without collecting errors in a TestResult"""
debug = _DebugResult()
self.run(debug, True)
################################
def _handleClassSetUp(self, test, result):
previousClass = getattr(result, '_previousTestClass', None)
currentClass = test.__class__
if currentClass == previousClass:
return
if result._moduleSetUpFailed:
return
if getattr(currentClass, "__unittest_skip__", False):
return
try:
currentClass._classSetupFailed = False
except TypeError:
# test may actually be a function
# so its class will be a builtin-type
pass
setUpClass = getattr(currentClass, 'setUpClass', None)
if setUpClass is not None:
_call_if_exists(result, '_setupStdout')
try:
setUpClass()
except Exception as e:
if isinstance(result, _DebugResult):
raise
currentClass._classSetupFailed = True
className = util.strclass(currentClass)
errorName = 'setUpClass (%s)' % className
self._addClassOrModuleLevelException(result, e, errorName)
finally:
_call_if_exists(result, '_restoreStdout')
def _get_previous_module(self, result):
previousModule = None
previousClass = getattr(result, '_previousTestClass', None)
if previousClass is not None:
previousModule = previousClass.__module__
return previousModule
def _handleModuleFixture(self, test, result):
previousModule = self._get_previous_module(result)
currentModule = test.__class__.__module__
if currentModule == previousModule:
return
self._handleModuleTearDown(result)
result._moduleSetUpFailed = False
try:
module = sys.modules[currentModule]
except KeyError:
return
setUpModule = getattr(module, 'setUpModule', None)
if setUpModule is not None:
_call_if_exists(result, '_setupStdout')
try:
setUpModule()
except Exception as e:
if isinstance(result, _DebugResult):
raise
result._moduleSetUpFailed = True
errorName = 'setUpModule (%s)' % currentModule
self._addClassOrModuleLevelException(result, e, errorName)
finally:
_call_if_exists(result, '_restoreStdout')
def _addClassOrModuleLevelException(self, result, exception, errorName):
error = _ErrorHolder(errorName)
addSkip = getattr(result, 'addSkip', None)
if addSkip is not None and isinstance(exception, case.SkipTest):
addSkip(error, str(exception))
else:
result.addError(error, sys.exc_info())
def _handleModuleTearDown(self, result):
previousModule = self._get_previous_module(result)
if previousModule is None:
return
if result._moduleSetUpFailed:
return
try:
module = sys.modules[previousModule]
except KeyError:
return
tearDownModule = getattr(module, 'tearDownModule', None)
if tearDownModule is not None:
_call_if_exists(result, '_setupStdout')
try:
tearDownModule()
except Exception as e:
if isinstance(result, _DebugResult):
raise
errorName = 'tearDownModule (%s)' % previousModule
self._addClassOrModuleLevelException(result, e, errorName)
finally:
_call_if_exists(result, '_restoreStdout')
def _tearDownPreviousClass(self, test, result):
previousClass = getattr(result, '_previousTestClass', None)
currentClass = test.__class__
if currentClass == previousClass:
return
if getattr(previousClass, '_classSetupFailed', False):
return
if getattr(result, '_moduleSetUpFailed', False):
return
if getattr(previousClass, "__unittest_skip__", False):
return
tearDownClass = getattr(previousClass, 'tearDownClass', None)
if tearDownClass is not None:
_call_if_exists(result, '_setupStdout')
try:
tearDownClass()
except Exception as e:
if isinstance(result, _DebugResult):
raise
className = util.strclass(previousClass)
errorName = 'tearDownClass (%s)' % className
self._addClassOrModuleLevelException(result, e, errorName)
finally:
_call_if_exists(result, '_restoreStdout')
class _ErrorHolder(object):
"""
Placeholder for a TestCase inside a result. As far as a TestResult
is concerned, this looks exactly like a unit test. Used to insert
arbitrary errors into a test suite run.
"""
# Inspired by the ErrorHolder from Twisted:
# http://twistedmatrix.com/trac/browser/trunk/twisted/trial/runner.py
# attribute used by TestResult._exc_info_to_string
failureException = None
def __init__(self, description):
self.description = description
def id(self):
return self.description
def shortDescription(self):
return None
def __repr__(self):
return "<ErrorHolder description=%r>" % (self.description,)
def __str__(self):
return self.id()
def run(self, result):
# could call result.addError(...) - but this test-like object
# shouldn't be run anyway
pass
def __call__(self, result):
return self.run(result)
def countTestCases(self):
return 0
def _isnotsuite(test):
"A crude way to tell apart testcases and suites with duck-typing"
try:
iter(test)
except TypeError:
return True
return False
class _DebugResult(object):
"Used by the TestSuite to hold previous class when running in debug."
_previousTestClass = None
_moduleSetUpFailed = False
shouldStop = False
| gpl-3.0 |
home-assistant/home-assistant | tests/components/vera/test_light.py | 5 | 3015 | """Vera tests."""
from unittest.mock import MagicMock
import pyvera as pv
from homeassistant.components.light import ATTR_BRIGHTNESS, ATTR_HS_COLOR
from homeassistant.core import HomeAssistant
from .common import ComponentFactory, new_simple_controller_config
async def test_light(
hass: HomeAssistant, vera_component_factory: ComponentFactory
) -> None:
"""Test function."""
vera_device: pv.VeraDimmer = MagicMock(spec=pv.VeraDimmer)
vera_device.device_id = 1
vera_device.vera_device_id = vera_device.device_id
vera_device.comm_failure = False
vera_device.name = "dev1"
vera_device.category = pv.CATEGORY_DIMMER
vera_device.is_switched_on = MagicMock(return_value=False)
vera_device.get_brightness = MagicMock(return_value=0)
vera_device.get_color = MagicMock(return_value=[0, 0, 0])
vera_device.is_dimmable = True
entity_id = "light.dev1_1"
component_data = await vera_component_factory.configure_component(
hass=hass,
controller_config=new_simple_controller_config(devices=(vera_device,)),
)
update_callback = component_data.controller_data[0].update_callback
assert hass.states.get(entity_id).state == "off"
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": entity_id},
)
await hass.async_block_till_done()
vera_device.switch_on.assert_called()
vera_device.is_switched_on.return_value = True
update_callback(vera_device)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == "on"
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": entity_id, ATTR_HS_COLOR: [300, 70]},
)
await hass.async_block_till_done()
vera_device.set_color.assert_called_with((255, 76, 255))
vera_device.is_switched_on.return_value = True
vera_device.get_color.return_value = (255, 76, 255)
update_callback(vera_device)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == "on"
assert hass.states.get(entity_id).attributes["hs_color"] == (300.0, 70.196)
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": entity_id, ATTR_BRIGHTNESS: 55},
)
await hass.async_block_till_done()
vera_device.set_brightness.assert_called_with(55)
vera_device.is_switched_on.return_value = True
vera_device.get_brightness.return_value = 55
update_callback(vera_device)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == "on"
assert hass.states.get(entity_id).attributes["brightness"] == 55
await hass.services.async_call(
"light",
"turn_off",
{"entity_id": entity_id},
)
await hass.async_block_till_done()
vera_device.switch_off.assert_called()
vera_device.is_switched_on.return_value = False
update_callback(vera_device)
await hass.async_block_till_done()
assert hass.states.get(entity_id).state == "off"
| apache-2.0 |
postlund/home-assistant | tests/components/withings/common.py | 2 | 13697 | """Common data for for the withings component tests."""
import re
import time
from typing import List
import requests_mock
from withings_api import AbstractWithingsApi
from withings_api.common import (
MeasureGetMeasGroupAttrib,
MeasureGetMeasGroupCategory,
MeasureType,
SleepModel,
SleepState,
)
from homeassistant import data_entry_flow
import homeassistant.components.api as api
import homeassistant.components.http as http
import homeassistant.components.withings.const as const
from homeassistant.config import async_process_ha_core_config
from homeassistant.config_entries import SOURCE_USER
from homeassistant.const import CONF_UNIT_SYSTEM, CONF_UNIT_SYSTEM_METRIC
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_entry_oauth2_flow
from homeassistant.setup import async_setup_component
from homeassistant.util import slugify
def get_entity_id(measure, profile) -> str:
"""Get an entity id for a measure and profile."""
return "sensor.{}_{}_{}".format(const.DOMAIN, measure, slugify(profile))
def assert_state_equals(
hass: HomeAssistant, profile: str, measure: str, expected
) -> None:
"""Assert the state of a withings sensor."""
entity_id = get_entity_id(measure, profile)
state_obj = hass.states.get(entity_id)
assert state_obj, f"Expected entity {entity_id} to exist but it did not"
assert state_obj.state == str(expected), (
f"Expected {expected} but was {state_obj.state} "
f"for measure {measure}, {entity_id}"
)
async def setup_hass(hass: HomeAssistant) -> dict:
"""Configure Home Assistant."""
profiles = ["Person0", "Person1", "Person2", "Person3", "Person4"]
hass_config = {
"homeassistant": {CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_METRIC},
api.DOMAIN: {"base_url": "http://localhost/"},
http.DOMAIN: {"server_port": 8080},
const.DOMAIN: {
const.CLIENT_ID: "my_client_id",
const.CLIENT_SECRET: "my_client_secret",
const.PROFILES: profiles,
},
}
await async_process_ha_core_config(hass, hass_config.get("homeassistant"))
assert await async_setup_component(hass, http.DOMAIN, hass_config)
assert await async_setup_component(hass, api.DOMAIN, hass_config)
assert await async_setup_component(hass, const.DOMAIN, hass_config)
await hass.async_block_till_done()
return hass_config
async def configure_integration(
hass: HomeAssistant,
aiohttp_client,
aioclient_mock,
profiles: List[str],
profile_index: int,
get_device_response: dict,
getmeasures_response: dict,
get_sleep_response: dict,
get_sleep_summary_response: dict,
) -> None:
"""Configure the integration for a specific profile."""
selected_profile = profiles[profile_index]
with requests_mock.mock() as rqmck:
rqmck.get(
re.compile(AbstractWithingsApi.URL + "/v2/user?.*action=getdevice(&.*|$)"),
status_code=200,
json=get_device_response,
)
rqmck.get(
re.compile(AbstractWithingsApi.URL + "/v2/sleep?.*action=get(&.*|$)"),
status_code=200,
json=get_sleep_response,
)
rqmck.get(
re.compile(
AbstractWithingsApi.URL + "/v2/sleep?.*action=getsummary(&.*|$)"
),
status_code=200,
json=get_sleep_summary_response,
)
rqmck.get(
re.compile(AbstractWithingsApi.URL + "/measure?.*action=getmeas(&.*|$)"),
status_code=200,
json=getmeasures_response,
)
# Get the withings config flow.
result = await hass.config_entries.flow.async_init(
const.DOMAIN, context={"source": SOURCE_USER}
)
assert result
# pylint: disable=protected-access
state = config_entry_oauth2_flow._encode_jwt(
hass, {"flow_id": result["flow_id"]}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_EXTERNAL_STEP
assert result["url"] == (
"https://account.withings.com/oauth2_user/authorize2?"
"response_type=code&client_id=my_client_id&"
"redirect_uri=http://127.0.0.1:8080/auth/external/callback&"
f"state={state}"
"&scope=user.info,user.metrics,user.activity"
)
# Simulate user being redirected from withings site.
client = await aiohttp_client(hass.http.app)
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == 200
assert resp.headers["content-type"] == "text/html; charset=utf-8"
aioclient_mock.post(
"https://account.withings.com/oauth2/token",
json={
"refresh_token": "mock-refresh-token",
"access_token": "mock-access-token",
"type": "Bearer",
"expires_in": 60,
"userid": "myuserid",
},
)
# Present user with a list of profiles to choose from.
result = await hass.config_entries.flow.async_configure(result["flow_id"])
assert result.get("type") == "form"
assert result.get("step_id") == "profile"
assert result.get("data_schema").schema["profile"].container == profiles
# Select the user profile.
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {const.PROFILE: selected_profile}
)
# Finish the config flow by calling it again.
assert result.get("type") == "create_entry"
assert result.get("result")
config_data = result.get("result").data
assert config_data.get(const.PROFILE) == profiles[profile_index]
assert config_data.get("auth_implementation") == const.DOMAIN
assert config_data.get("token")
# Ensure all the flows are complete.
flows = hass.config_entries.flow.async_progress()
assert not flows
# Wait for remaining tasks to complete.
await hass.async_block_till_done()
WITHINGS_GET_DEVICE_RESPONSE_EMPTY = {"status": 0, "body": {"devices": []}}
WITHINGS_GET_DEVICE_RESPONSE = {
"status": 0,
"body": {
"devices": [
{
"type": "type1",
"model": "model1",
"battery": "battery1",
"deviceid": "deviceid1",
"timezone": "UTC",
}
]
},
}
WITHINGS_MEASURES_RESPONSE_EMPTY = {
"status": 0,
"body": {"updatetime": "2019-08-01", "timezone": "UTC", "measuregrps": []},
}
WITHINGS_MEASURES_RESPONSE = {
"status": 0,
"body": {
"updatetime": "2019-08-01",
"timezone": "UTC",
"measuregrps": [
# Un-ambiguous groups.
{
"grpid": 1,
"attrib": MeasureGetMeasGroupAttrib.DEVICE_ENTRY_FOR_USER.real,
"date": time.time(),
"created": time.time(),
"category": MeasureGetMeasGroupCategory.REAL.real,
"deviceid": "DEV_ID",
"more": False,
"offset": 0,
"measures": [
{"type": MeasureType.WEIGHT, "value": 70, "unit": 0},
{"type": MeasureType.FAT_MASS_WEIGHT, "value": 5, "unit": 0},
{"type": MeasureType.FAT_FREE_MASS, "value": 60, "unit": 0},
{"type": MeasureType.MUSCLE_MASS, "value": 50, "unit": 0},
{"type": MeasureType.BONE_MASS, "value": 10, "unit": 0},
{"type": MeasureType.HEIGHT, "value": 2, "unit": 0},
{"type": MeasureType.TEMPERATURE, "value": 40, "unit": 0},
{"type": MeasureType.BODY_TEMPERATURE, "value": 40, "unit": 0},
{"type": MeasureType.SKIN_TEMPERATURE, "value": 20, "unit": 0},
{"type": MeasureType.FAT_RATIO, "value": 70, "unit": -3},
{
"type": MeasureType.DIASTOLIC_BLOOD_PRESSURE,
"value": 70,
"unit": 0,
},
{
"type": MeasureType.SYSTOLIC_BLOOD_PRESSURE,
"value": 100,
"unit": 0,
},
{"type": MeasureType.HEART_RATE, "value": 60, "unit": 0},
{"type": MeasureType.SP02, "value": 95, "unit": -2},
{"type": MeasureType.HYDRATION, "value": 95, "unit": -2},
{"type": MeasureType.PULSE_WAVE_VELOCITY, "value": 100, "unit": 0},
],
},
# Ambiguous groups (we ignore these)
{
"grpid": 1,
"attrib": MeasureGetMeasGroupAttrib.DEVICE_ENTRY_FOR_USER.real,
"date": time.time(),
"created": time.time(),
"category": MeasureGetMeasGroupCategory.REAL.real,
"deviceid": "DEV_ID",
"more": False,
"offset": 0,
"measures": [
{"type": MeasureType.WEIGHT, "value": 71, "unit": 0},
{"type": MeasureType.FAT_MASS_WEIGHT, "value": 4, "unit": 0},
{"type": MeasureType.FAT_FREE_MASS, "value": 40, "unit": 0},
{"type": MeasureType.MUSCLE_MASS, "value": 51, "unit": 0},
{"type": MeasureType.BONE_MASS, "value": 11, "unit": 0},
{"type": MeasureType.HEIGHT, "value": 201, "unit": 0},
{"type": MeasureType.TEMPERATURE, "value": 41, "unit": 0},
{"type": MeasureType.BODY_TEMPERATURE, "value": 34, "unit": 0},
{"type": MeasureType.SKIN_TEMPERATURE, "value": 21, "unit": 0},
{"type": MeasureType.FAT_RATIO, "value": 71, "unit": -3},
{
"type": MeasureType.DIASTOLIC_BLOOD_PRESSURE,
"value": 71,
"unit": 0,
},
{
"type": MeasureType.SYSTOLIC_BLOOD_PRESSURE,
"value": 101,
"unit": 0,
},
{"type": MeasureType.HEART_RATE, "value": 61, "unit": 0},
{"type": MeasureType.SP02, "value": 98, "unit": -2},
{"type": MeasureType.HYDRATION, "value": 96, "unit": -2},
{"type": MeasureType.PULSE_WAVE_VELOCITY, "value": 102, "unit": 0},
],
},
],
},
}
WITHINGS_SLEEP_RESPONSE_EMPTY = {
"status": 0,
"body": {"model": SleepModel.TRACKER.real, "series": []},
}
WITHINGS_SLEEP_RESPONSE = {
"status": 0,
"body": {
"model": SleepModel.TRACKER.real,
"series": [
{
"startdate": "2019-02-01 00:00:00",
"enddate": "2019-02-01 01:00:00",
"state": SleepState.AWAKE.real,
},
{
"startdate": "2019-02-01 01:00:00",
"enddate": "2019-02-01 02:00:00",
"state": SleepState.LIGHT.real,
},
{
"startdate": "2019-02-01 02:00:00",
"enddate": "2019-02-01 03:00:00",
"state": SleepState.REM.real,
},
{
"startdate": "2019-02-01 03:00:00",
"enddate": "2019-02-01 04:00:00",
"state": SleepState.DEEP.real,
},
],
},
}
WITHINGS_SLEEP_SUMMARY_RESPONSE_EMPTY = {
"status": 0,
"body": {"more": False, "offset": 0, "series": []},
}
WITHINGS_SLEEP_SUMMARY_RESPONSE = {
"status": 0,
"body": {
"more": False,
"offset": 0,
"series": [
{
"timezone": "UTC",
"model": SleepModel.SLEEP_MONITOR.real,
"startdate": "2019-02-01",
"enddate": "2019-02-02",
"date": "2019-02-02",
"modified": 12345,
"data": {
"wakeupduration": 110,
"lightsleepduration": 210,
"deepsleepduration": 310,
"remsleepduration": 410,
"wakeupcount": 510,
"durationtosleep": 610,
"durationtowakeup": 710,
"hr_average": 810,
"hr_min": 910,
"hr_max": 1010,
"rr_average": 1110,
"rr_min": 1210,
"rr_max": 1310,
},
},
{
"timezone": "UTC",
"model": SleepModel.SLEEP_MONITOR.real,
"startdate": "2019-02-01",
"enddate": "2019-02-02",
"date": "2019-02-02",
"modified": 12345,
"data": {
"wakeupduration": 210,
"lightsleepduration": 310,
"deepsleepduration": 410,
"remsleepduration": 510,
"wakeupcount": 610,
"durationtosleep": 710,
"durationtowakeup": 810,
"hr_average": 910,
"hr_min": 1010,
"hr_max": 1110,
"rr_average": 1210,
"rr_min": 1310,
"rr_max": 1410,
},
},
],
},
}
| apache-2.0 |
tylert/chirp.hg | chirp/ui/mainapp.py | 1 | 80685 | # Copyright 2008 Dan Smith <dsmith@danplanet.com>
# Copyright 2012 Tom Hayward <tom@tomh.us>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from datetime import datetime
import os
import tempfile
import urllib
import webbrowser
from glob import glob
import shutil
import time
import logging
import gtk
import gobject
import sys
from chirp.ui import inputdialog, common
from chirp import platform, directory, util
from chirp.drivers import generic_xml, generic_csv, repeaterbook
from chirp.drivers import ic9x, kenwood_live, idrp, vx7, vx5, vx6
from chirp.drivers import icf, ic9x_icf
from chirp import CHIRP_VERSION, chirp_common, detect, errors
from chirp.ui import editorset, clone, miscwidgets, config, reporting, fips
from chirp.ui import bandplans
gobject.threads_init()
LOG = logging.getLogger(__name__)
if __name__ == "__main__":
sys.path.insert(0, "..")
try:
import serial
except ImportError, e:
common.log_exception()
common.show_error("\nThe Pyserial module is not installed!")
CONF = config.get()
KEEP_RECENT = 8
RB_BANDS = {
"--All--": 0,
"10 meters (29MHz)": 29,
"6 meters (54MHz)": 5,
"2 meters (144MHz)": 14,
"1.25 meters (220MHz)": 22,
"70 centimeters (440MHz)": 4,
"33 centimeters (900MHz)": 9,
"23 centimeters (1.2GHz)": 12,
}
def key_bands(band):
if band.startswith("-"):
return -1
amount, units, mhz = band.split(" ")
scale = units == "meters" and 100 or 1
return 100000 - (float(amount) * scale)
class ModifiedError(Exception):
pass
class ChirpMain(gtk.Window):
def get_current_editorset(self):
page = self.tabs.get_current_page()
if page is not None:
return self.tabs.get_nth_page(page)
else:
return None
def ev_tab_switched(self, pagenum=None):
def set_action_sensitive(action, sensitive):
self.menu_ag.get_action(action).set_sensitive(sensitive)
if pagenum is not None:
eset = self.tabs.get_nth_page(pagenum)
else:
eset = self.get_current_editorset()
upload_sens = bool(eset and
isinstance(eset.radio, chirp_common.CloneModeRadio))
if not eset or isinstance(eset.radio, chirp_common.LiveRadio):
save_sens = False
elif isinstance(eset.radio, chirp_common.NetworkSourceRadio):
save_sens = False
else:
save_sens = True
for i in ["import", "importsrc", "stock"]:
set_action_sensitive(i,
eset is not None and not eset.get_read_only())
for i in ["save", "saveas"]:
set_action_sensitive(i, save_sens)
for i in ["upload"]:
set_action_sensitive(i, upload_sens)
for i in ["cancelq"]:
set_action_sensitive(i, eset is not None and not save_sens)
for i in ["export", "close", "columns", "irbook", "irfinder",
"move_up", "move_dn", "exchange", "iradioreference",
"cut", "copy", "paste", "delete", "viewdeveloper",
"all", "properties"]:
set_action_sensitive(i, eset is not None)
def ev_status(self, editorset, msg):
self.sb_radio.pop(0)
self.sb_radio.push(0, msg)
def ev_usermsg(self, editorset, msg):
self.sb_general.pop(0)
self.sb_general.push(0, msg)
def ev_editor_selected(self, editorset, editortype):
mappings = {
"memedit": ["view", "edit"],
}
for _editortype, actions in mappings.items():
for _action in actions:
action = self.menu_ag.get_action(_action)
action.set_sensitive(editortype.startswith(_editortype))
def _connect_editorset(self, eset):
eset.connect("want-close", self.do_close)
eset.connect("status", self.ev_status)
eset.connect("usermsg", self.ev_usermsg)
eset.connect("editor-selected", self.ev_editor_selected)
def do_diff_radio(self):
if self.tabs.get_n_pages() < 2:
common.show_error("Diff tabs requires at least two open tabs!")
return
esets = []
for i in range(0, self.tabs.get_n_pages()):
esets.append(self.tabs.get_nth_page(i))
d = gtk.Dialog(title="Diff Radios",
buttons=(gtk.STOCK_OK, gtk.RESPONSE_OK,
gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL),
parent=self)
label = gtk.Label("")
label.set_markup("<b>-1</b> for either Mem # does a full-file hex " +
"dump with diffs highlighted.\n" +
"<b>-2</b> for first Mem # shows " +
"<b>only</b> the diffs.")
d.vbox.pack_start(label, True, True, 0)
label.show()
choices = []
for eset in esets:
choices.append("%s %s (%s)" % (eset.rthread.radio.VENDOR,
eset.rthread.radio.MODEL,
eset.filename))
choice_a = miscwidgets.make_choice(choices, False, choices[0])
choice_a.show()
chan_a = gtk.SpinButton()
chan_a.get_adjustment().set_all(1, -2, 999, 1, 10, 0)
chan_a.show()
hbox = gtk.HBox(False, 3)
hbox.pack_start(choice_a, 1, 1, 1)
hbox.pack_start(chan_a, 0, 0, 0)
hbox.show()
d.vbox.pack_start(hbox, 0, 0, 0)
choice_b = miscwidgets.make_choice(choices, False, choices[1])
choice_b.show()
chan_b = gtk.SpinButton()
chan_b.get_adjustment().set_all(1, -1, 999, 1, 10, 0)
chan_b.show()
hbox = gtk.HBox(False, 3)
hbox.pack_start(choice_b, 1, 1, 1)
hbox.pack_start(chan_b, 0, 0, 0)
hbox.show()
d.vbox.pack_start(hbox, 0, 0, 0)
r = d.run()
sel_a = choice_a.get_active_text()
sel_chan_a = chan_a.get_value()
sel_b = choice_b.get_active_text()
sel_chan_b = chan_b.get_value()
d.destroy()
if r == gtk.RESPONSE_CANCEL:
return
if sel_a == sel_b:
common.show_error("Can't diff the same tab!")
return
LOG.debug("Selected %s@%i and %s@%i" %
(sel_a, sel_chan_a, sel_b, sel_chan_b))
name_a = os.path.basename(sel_a)
name_a = name_a[:name_a.rindex(")")]
name_b = os.path.basename(sel_b)
name_b = name_b[:name_b.rindex(")")]
diffwintitle = "%s@%i diff %s@%i" % (
name_a, sel_chan_a, name_b, sel_chan_b)
eset_a = esets[choices.index(sel_a)]
eset_b = esets[choices.index(sel_b)]
def _show_diff(mem_b, mem_a):
# Step 3: Show the diff
diff = common.simple_diff(mem_a, mem_b)
common.show_diff_blob(diffwintitle, diff)
def _get_mem_b(mem_a):
# Step 2: Get memory b
job = common.RadioJob(_show_diff, "get_raw_memory",
int(sel_chan_b))
job.set_cb_args(mem_a)
eset_b.rthread.submit(job)
if sel_chan_a >= 0 and sel_chan_b >= 0:
# Diff numbered memory
# Step 1: Get memory a
job = common.RadioJob(_get_mem_b, "get_raw_memory",
int(sel_chan_a))
eset_a.rthread.submit(job)
elif isinstance(eset_a.rthread.radio, chirp_common.CloneModeRadio) and\
isinstance(eset_b.rthread.radio, chirp_common.CloneModeRadio):
# Diff whole (can do this without a job, since both are clone-mode)
try:
addrfmt = CONF.get('hexdump_addrfmt', section='developer',
raw=True)
except:
pass
a = util.hexprint(eset_a.rthread.radio._mmap.get_packed(),
addrfmt=addrfmt)
b = util.hexprint(eset_b.rthread.radio._mmap.get_packed(),
addrfmt=addrfmt)
if sel_chan_a == -2:
diffsonly = True
else:
diffsonly = False
common.show_diff_blob(diffwintitle,
common.simple_diff(a, b, diffsonly))
else:
common.show_error("Cannot diff whole live-mode radios!")
def do_new(self):
eset = editorset.EditorSet(_("Untitled") + ".csv", self)
self._connect_editorset(eset)
eset.prime()
eset.show()
tab = self.tabs.append_page(eset, eset.get_tab_label())
self.tabs.set_current_page(tab)
def _do_manual_select(self, filename):
radiolist = {}
for drv, radio in directory.DRV_TO_RADIO.items():
if not issubclass(radio, chirp_common.CloneModeRadio):
continue
radiolist["%s %s" % (radio.VENDOR, radio.MODEL)] = drv
lab = gtk.Label("""<b><big>Unable to detect model!</big></b>
If you think that it is valid, you can select a radio model below to
force an open attempt. If selecting the model manually works, please
file a bug on the website and attach your image. If selecting the model
does not work, it is likely that you are trying to open some other type
of file.
""")
lab.set_justify(gtk.JUSTIFY_FILL)
lab.set_line_wrap(True)
lab.set_use_markup(True)
lab.show()
choice = miscwidgets.make_choice(sorted(radiolist.keys()), False,
sorted(radiolist.keys())[0])
d = gtk.Dialog(title="Detection Failed",
buttons=(gtk.STOCK_OK, gtk.RESPONSE_OK,
gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL))
d.vbox.pack_start(lab, 0, 0, 0)
d.vbox.pack_start(choice, 0, 0, 0)
d.vbox.set_spacing(5)
choice.show()
d.set_default_size(400, 200)
# d.set_resizable(False)
r = d.run()
d.destroy()
if r != gtk.RESPONSE_OK:
return
try:
rc = directory.DRV_TO_RADIO[radiolist[choice.get_active_text()]]
return rc(filename)
except:
return
def do_open(self, fname=None, tempname=None):
if not fname:
types = [(_("All files") + " (*.*)", "*"),
(_("CHIRP Radio Images") + " (*.img)", "*.img"),
(_("CHIRP Files") + " (*.chirp)", "*.chirp"),
(_("CSV Files") + " (*.csv)", "*.csv"),
(_("DAT Files") + " (*.dat)", "*.dat"),
(_("EVE Files (VX5)") + " (*.eve)", "*.eve"),
(_("ICF Files") + " (*.icf)", "*.icf"),
(_("VX5 Commander Files") + " (*.vx5)", "*.vx5"),
(_("VX6 Commander Files") + " (*.vx6)", "*.vx6"),
(_("VX7 Commander Files") + " (*.vx7)", "*.vx7"),
]
fname = platform.get_platform().gui_open_file(types=types)
if not fname:
return
self.record_recent_file(fname)
if icf.is_icf_file(fname):
a = common.ask_yesno_question(
_("ICF files cannot be edited, only displayed or imported "
"into another file. Open in read-only mode?"),
self)
if not a:
return
read_only = True
else:
read_only = False
if icf.is_9x_icf(fname):
# We have to actually instantiate the IC9xICFRadio to get its
# sub-devices
radio = ic9x_icf.IC9xICFRadio(fname)
else:
try:
radio = directory.get_radio_by_image(fname)
except errors.ImageMetadataInvalidModel as e:
version = e.metadata.get('chirp_version')
if version:
newer = chirp_common.is_version_newer(version)
LOG.error('Image is from newer CHIRP with a model we '
'do not support')
common.show_error(
_('Unable to open this image. It was generated '
'with a newer version of CHIRP and thus may '
'be for a radio model that is not supported '
'by this version. Please update to the latest '
'version of CHIRP and try again.'))
else:
LOG.error('Image has metadata but has no chirp_version '
'and we do not support the model')
common.show_error(
_('Unable to open this image: unsupported model'))
return
except errors.ImageDetectFailed:
radio = self._do_manual_select(fname)
if not radio:
return
LOG.debug("Manually selected %s" % radio)
except Exception, e:
common.log_exception()
common.show_error(os.path.basename(fname) + ": " + str(e))
return
first_tab = False
try:
eset = editorset.EditorSet(radio, self,
filename=fname,
tempname=tempname)
except Exception, e:
common.log_exception()
common.show_error(
_("There was an error opening {fname}: {error}").format(
fname=fname,
error=e))
return
eset.set_read_only(read_only)
self._connect_editorset(eset)
eset.show()
self.tabs.append_page(eset, eset.get_tab_label())
if hasattr(eset.rthread.radio, "errors") and \
eset.rthread.radio.errors:
msg = _("{num} errors during open:").format(
num=len(eset.rthread.radio.errors))
common.show_error_text(msg,
"\r\n".join(eset.rthread.radio.errors))
self._show_information(radio)
def do_live_warning(self, radio):
d = gtk.MessageDialog(parent=self, buttons=gtk.BUTTONS_OK)
d.set_markup("<big><b>" + _("Note:") + "</b></big>")
msg = _("The {vendor} {model} operates in <b>live mode</b>. "
"This means that any changes you make are immediately sent "
"to the radio. Because of this, you cannot perform the "
"<u>Save</u> or <u>Upload</u> operations. If you wish to "
"edit the contents offline, please <u>Export</u> to a CSV "
"file, using the <b>File menu</b>.")
msg = msg.format(vendor=radio.VENDOR, model=radio.MODEL)
d.format_secondary_markup(msg)
again = gtk.CheckButton(_("Don't show this again"))
again.show()
d.vbox.pack_start(again, 0, 0, 0)
d.run()
CONF.set_bool("live_mode", again.get_active(), "noconfirm")
d.destroy()
def do_open_live(self, radio, tempname=None, read_only=False):
eset = editorset.EditorSet(radio, self, tempname=tempname)
eset.connect("want-close", self.do_close)
eset.connect("status", self.ev_status)
eset.set_read_only(read_only)
eset.show()
self.tabs.append_page(eset, eset.get_tab_label())
if isinstance(radio, chirp_common.LiveRadio):
reporting.report_model_usage(radio, "live", True)
if not CONF.get_bool("live_mode", "noconfirm"):
self.do_live_warning(radio)
def do_save(self, eset=None):
if not eset:
eset = self.get_current_editorset()
# For usability, allow Ctrl-S to short-circuit to Save-As if
# we are working on a yet-to-be-saved image
if not os.path.exists(eset.filename):
return self.do_saveas()
eset.save()
def do_saveas(self):
eset = self.get_current_editorset()
label = _("{vendor} {model} image file").format(
vendor=eset.radio.VENDOR,
model=eset.radio.MODEL)
defname_format = CONF.get("default_filename", "global") or \
"{vendor}_{model}_{date}"
defname = defname_format.format(
vendor=eset.radio.VENDOR,
model=eset.radio.MODEL,
date=datetime.now().strftime('%Y%m%d')
).replace('/', '_')
types = [(label + " (*.%s)" % eset.radio.FILE_EXTENSION,
eset.radio.FILE_EXTENSION)]
if isinstance(eset.radio, vx7.VX7Radio):
types += [(_("VX7 Commander") + " (*.vx7)", "vx7")]
elif isinstance(eset.radio, vx6.VX6Radio):
types += [(_("VX6 Commander") + " (*.vx6)", "vx6")]
elif isinstance(eset.radio, vx5.VX5Radio):
types += [(_("EVE") + " (*.eve)", "eve")]
types += [(_("VX5 Commander") + " (*.vx5)", "vx5")]
while True:
fname = platform.get_platform().gui_save_file(default_name=defname,
types=types)
if not fname:
return
if os.path.exists(fname):
dlg = inputdialog.OverwriteDialog(fname)
owrite = dlg.run()
dlg.destroy()
if owrite == gtk.RESPONSE_OK:
break
else:
break
try:
eset.save(fname)
except Exception, e:
d = inputdialog.ExceptionDialog(e)
d.run()
d.destroy()
def cb_clonein(self, radio, emsg=None):
radio.pipe.close()
reporting.report_model_usage(radio, "download", bool(emsg))
if not emsg:
self.do_open_live(radio, tempname="(" + _("Untitled") + ")")
else:
d = inputdialog.ExceptionDialog(emsg)
d.run()
d.destroy()
def cb_cloneout(self, radio, emsg=None):
radio.pipe.close()
reporting.report_model_usage(radio, "upload", True)
if emsg:
d = inputdialog.ExceptionDialog(emsg)
d.run()
d.destroy()
def _get_recent_list(self):
recent = []
for i in range(0, KEEP_RECENT):
fn = CONF.get("recent%i" % i, "state")
if fn:
recent.append(fn)
return recent
def _set_recent_list(self, recent):
for fn in recent:
CONF.set("recent%i" % recent.index(fn), fn, "state")
def update_recent_files(self):
i = 0
for fname in self._get_recent_list():
action_name = "recent%i" % i
path = "/MenuBar/file/recent"
old_action = self.menu_ag.get_action(action_name)
if old_action:
self.menu_ag.remove_action(old_action)
file_basename = os.path.basename(fname).replace("_", "__")
action = gtk.Action(
action_name, "_%i. %s" % (i + 1, file_basename),
_("Open recent file {name}").format(name=fname), "")
action.connect("activate", lambda a, f: self.do_open(f), fname)
mid = self.menu_uim.new_merge_id()
self.menu_uim.add_ui(mid, path,
action_name, action_name,
gtk.UI_MANAGER_MENUITEM, False)
self.menu_ag.add_action(action)
i += 1
def record_recent_file(self, filename):
recent_files = self._get_recent_list()
if filename not in recent_files:
if len(recent_files) == KEEP_RECENT:
del recent_files[-1]
recent_files.insert(0, filename)
self._set_recent_list(recent_files)
self.update_recent_files()
def import_stock_config(self, action, config):
eset = self.get_current_editorset()
count = eset.do_import(config)
def copy_shipped_stock_configs(self, stock_dir):
basepath = platform.get_platform().find_resource("stock_configs")
files = glob(os.path.join(basepath, "*.csv"))
for fn in files:
if os.path.exists(os.path.join(stock_dir, os.path.basename(fn))):
LOG.info("Skipping existing stock config")
continue
try:
shutil.copy(fn, stock_dir)
LOG.debug("Copying %s -> %s" % (fn, stock_dir))
except Exception, e:
LOG.error("Unable to copy %s to %s: %s" % (fn, stock_dir, e))
return False
return True
def update_stock_configs(self):
stock_dir = platform.get_platform().config_file("stock_configs")
if not os.path.isdir(stock_dir):
try:
os.mkdir(stock_dir)
except Exception, e:
LOG.error("Unable to create directory: %s" % stock_dir)
return
if not self.copy_shipped_stock_configs(stock_dir):
return
def _do_import_action(config):
name = os.path.splitext(os.path.basename(config))[0]
action_name = "stock-%i" % configs.index(config)
path = "/MenuBar/radio/stock"
action = gtk.Action(action_name,
name,
_("Import stock "
"configuration {name}").format(name=name),
"")
action.connect("activate", self.import_stock_config, config)
mid = self.menu_uim.new_merge_id()
mid = self.menu_uim.add_ui(mid, path,
action_name, action_name,
gtk.UI_MANAGER_MENUITEM, False)
self.menu_ag.add_action(action)
def _do_open_action(config):
name = os.path.splitext(os.path.basename(config))[0]
action_name = "openstock-%i" % configs.index(config)
path = "/MenuBar/file/openstock"
action = gtk.Action(action_name,
name,
_("Open stock "
"configuration {name}").format(name=name),
"")
action.connect("activate", lambda a, c: self.do_open(c), config)
mid = self.menu_uim.new_merge_id()
mid = self.menu_uim.add_ui(mid, path,
action_name, action_name,
gtk.UI_MANAGER_MENUITEM, False)
self.menu_ag.add_action(action)
configs = glob(os.path.join(stock_dir, "*.csv"))
for config in configs:
_do_import_action(config)
_do_open_action(config)
def _confirm_experimental(self, rclass):
sql_key = "warn_experimental_%s" % directory.radio_class_id(rclass)
if CONF.is_defined(sql_key, "state") and \
not CONF.get_bool(sql_key, "state"):
return True
title = _("Proceed with experimental driver?")
text = rclass.get_prompts().experimental
msg = _("This radio's driver is experimental. "
"Do you want to proceed?")
resp, squelch = common.show_warning(msg, text,
title=title,
buttons=gtk.BUTTONS_YES_NO,
can_squelch=True)
if resp == gtk.RESPONSE_YES:
CONF.set_bool(sql_key, not squelch, "state")
return resp == gtk.RESPONSE_YES
def _show_information(self, radio):
message = radio.get_prompts().info
if message is None:
return
if CONF.get_bool("clone_information", "noconfirm"):
return
d = gtk.MessageDialog(parent=self, buttons=gtk.BUTTONS_OK)
d.set_markup("<big><b>" + _("{name} Information").format(
name=radio.get_name()) + "</b></big>")
msg = _("{information}").format(information=message)
d.format_secondary_markup(msg)
again = gtk.CheckButton(
_("Don't show information for any radio again"))
again.show()
again.connect("toggled", lambda action:
self.infomenu.set_active(not action.get_active()))
d.vbox.pack_start(again, 0, 0, 0)
h_button_box = d.vbox.get_children()[2]
try:
ok_button = h_button_box.get_children()[0]
ok_button.grab_default()
ok_button.grab_focus()
except AttributeError:
# don't grab focus on GTK+ 2.0
pass
d.run()
d.destroy()
def _show_instructions(self, radio, message):
if message is None:
return
if CONF.get_bool("clone_instructions", "noconfirm"):
return
d = gtk.MessageDialog(parent=self, buttons=gtk.BUTTONS_OK)
d.set_markup("<big><b>" + _("{name} Instructions").format(
name=radio.get_name()) + "</b></big>")
msg = _("{instructions}").format(instructions=message)
d.format_secondary_markup(msg)
again = gtk.CheckButton(
_("Don't show instructions for any radio again"))
again.show()
again.connect("toggled", lambda action:
self.clonemenu.set_active(not action.get_active()))
d.vbox.pack_start(again, 0, 0, 0)
h_button_box = d.vbox.get_children()[2]
try:
ok_button = h_button_box.get_children()[0]
ok_button.grab_default()
ok_button.grab_focus()
except AttributeError:
# don't grab focus on GTK+ 2.0
pass
d.run()
d.destroy()
def do_download(self, port=None, rtype=None):
d = clone.CloneSettingsDialog(parent=self)
settings = d.run()
d.destroy()
if not settings:
return
rclass = settings.radio_class
if issubclass(rclass, chirp_common.ExperimentalRadio) and \
not self._confirm_experimental(rclass):
# User does not want to proceed with experimental driver
return
self._show_instructions(rclass, rclass.get_prompts().pre_download)
LOG.debug("User selected %s %s on port %s" %
(rclass.VENDOR, rclass.MODEL, settings.port))
try:
ser = serial.Serial(port=settings.port,
baudrate=rclass.BAUD_RATE,
rtscts=rclass.HARDWARE_FLOW,
timeout=0.25)
ser.flushInput()
except serial.SerialException, e:
d = inputdialog.ExceptionDialog(e)
d.run()
d.destroy()
return
radio = settings.radio_class(ser)
fn = tempfile.mktemp()
if isinstance(radio, chirp_common.CloneModeRadio):
ct = clone.CloneThread(radio, "in", cb=self.cb_clonein,
parent=self)
ct.start()
else:
self.do_open_live(radio)
self._show_information(rclass) # show Info prompt now
def do_upload(self, port=None, rtype=None):
eset = self.get_current_editorset()
radio = eset.radio
settings = clone.CloneSettings()
settings.radio_class = radio.__class__
d = clone.CloneSettingsDialog(settings, parent=self)
settings = d.run()
d.destroy()
if not settings:
return
prompts = radio.get_prompts()
if prompts.display_pre_upload_prompt_before_opening_port is True:
LOG.debug("Opening port after pre_upload prompt.")
self._show_instructions(radio, prompts.pre_upload)
if isinstance(radio, chirp_common.ExperimentalRadio) and \
not self._confirm_experimental(radio.__class__):
# User does not want to proceed with experimental driver
return
try:
ser = serial.Serial(port=settings.port,
baudrate=radio.BAUD_RATE,
rtscts=radio.HARDWARE_FLOW,
timeout=0.25)
ser.flushInput()
except serial.SerialException, e:
d = inputdialog.ExceptionDialog(e)
d.run()
d.destroy()
return
if prompts.display_pre_upload_prompt_before_opening_port is False:
LOG.debug("Opening port before pre_upload prompt.")
self._show_instructions(radio, prompts.pre_upload)
radio.set_pipe(ser)
ct = clone.CloneThread(radio, "out", cb=self.cb_cloneout, parent=self)
ct.start()
def do_close(self, tab_child=None):
if tab_child:
eset = tab_child
else:
eset = self.get_current_editorset()
if not eset:
return False
if eset.is_modified():
dlg = miscwidgets.YesNoDialog(
title=_("Save Changes?"), parent=self,
buttons=(gtk.STOCK_YES, gtk.RESPONSE_YES,
gtk.STOCK_NO, gtk.RESPONSE_NO,
gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL))
dlg.set_text(_("File is modified, save changes before closing?"))
res = dlg.run()
dlg.destroy()
if res == gtk.RESPONSE_YES:
self.do_save(eset)
elif res != gtk.RESPONSE_NO:
raise ModifiedError()
eset.rthread.stop()
eset.rthread.join()
eset.prepare_close()
if eset.radio.pipe:
eset.radio.pipe.close()
if isinstance(eset.radio, chirp_common.LiveRadio):
action = self.menu_ag.get_action("openlive")
if action:
action.set_sensitive(True)
page = self.tabs.page_num(eset)
if page is not None:
self.tabs.remove_page(page)
return True
def do_import(self):
types = [(_("All files") + " (*.*)", "*"),
(_("CHIRP Files") + " (*.chirp)", "*.chirp"),
(_("CHIRP Radio Images") + " (*.img)", "*.img"),
(_("CSV Files") + " (*.csv)", "*.csv"),
(_("DAT Files") + " (*.dat)", "*.dat"),
(_("EVE Files (VX5)") + " (*.eve)", "*.eve"),
(_("ICF Files") + " (*.icf)", "*.icf"),
(_("Kenwood HMK Files") + " (*.hmk)", "*.hmk"),
(_("Kenwood ITM Files") + " (*.itm)", "*.itm"),
(_("Travel Plus Files") + " (*.tpe)", "*.tpe"),
(_("VX5 Commander Files") + " (*.vx5)", "*.vx5"),
(_("VX6 Commander Files") + " (*.vx6)", "*.vx6"),
(_("VX7 Commander Files") + " (*.vx7)", "*.vx7")]
filen = platform.get_platform().gui_open_file(types=types)
if not filen:
return
eset = self.get_current_editorset()
count = eset.do_import(filen)
reporting.report_model_usage(eset.rthread.radio, "import", count > 0)
def do_dmrmarc_prompt(self):
fields = {"1City": (gtk.Entry(), lambda x: x),
"2State": (gtk.Entry(), lambda x: x),
"3Country": (gtk.Entry(), lambda x: x),
}
d = inputdialog.FieldDialog(title=_("DMR-MARC Repeater Database Dump"),
parent=self)
for k in sorted(fields.keys()):
d.add_field(k[1:], fields[k][0])
fields[k][0].set_text(CONF.get(k[1:], "dmrmarc") or "")
while d.run() == gtk.RESPONSE_OK:
for k in sorted(fields.keys()):
widget, validator = fields[k]
try:
if validator(widget.get_text()):
CONF.set(k[1:], widget.get_text(), "dmrmarc")
continue
except Exception:
pass
d.destroy()
return True
d.destroy()
return False
def do_dmrmarc(self, do_import):
self.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
if not self.do_dmrmarc_prompt():
self.window.set_cursor(None)
return
city = CONF.get("city", "dmrmarc")
state = CONF.get("state", "dmrmarc")
country = CONF.get("country", "dmrmarc")
# Do this in case the import process is going to take a while
# to make sure we process events leading up to this
gtk.gdk.window_process_all_updates()
while gtk.events_pending():
gtk.main_iteration(False)
if do_import:
eset = self.get_current_editorset()
dmrmarcstr = "dmrmarc://%s/%s/%s" % (city, state, country)
eset.do_import(dmrmarcstr)
else:
try:
from chirp import dmrmarc
radio = dmrmarc.DMRMARCRadio(None)
radio.set_params(city, state, country)
self.do_open_live(radio, read_only=True)
except errors.RadioError, e:
common.show_error(e)
self.window.set_cursor(None)
def do_repeaterbook_political_prompt(self):
if not CONF.get_bool("has_seen_credit", "repeaterbook"):
d = gtk.MessageDialog(parent=self, buttons=gtk.BUTTONS_OK)
d.set_markup("<big><big><b>RepeaterBook</b></big>\r\n" +
"<i>North American Repeater Directory</i></big>")
d.format_secondary_markup("For more information about this " +
"free service, please go to\r\n" +
"http://www.repeaterbook.com")
d.run()
d.destroy()
CONF.set_bool("has_seen_credit", True, "repeaterbook")
default_state = "Oregon"
default_county = "--All--"
default_band = "--All--"
try:
try:
code = int(CONF.get("state", "repeaterbook"))
except:
code = CONF.get("state", "repeaterbook")
for k, v in fips.FIPS_STATES.items():
if code == v:
default_state = k
break
code = CONF.get("county", "repeaterbook")
items = fips.FIPS_COUNTIES[fips.FIPS_STATES[default_state]].items()
for k, v in items:
if code == v:
default_county = k
break
code = int(CONF.get("band", "repeaterbook"))
for k, v in RB_BANDS.items():
if code == v:
default_band = k
break
except:
pass
state = miscwidgets.make_choice(sorted(fips.FIPS_STATES.keys()),
False, default_state)
county = miscwidgets.make_choice(
sorted(fips.FIPS_COUNTIES[fips.FIPS_STATES[default_state]].keys()),
False, default_county)
band = miscwidgets.make_choice(sorted(RB_BANDS.keys(), key=key_bands),
False, default_band)
def _changed(box, county):
state = fips.FIPS_STATES[box.get_active_text()]
county.get_model().clear()
for fips_county in sorted(fips.FIPS_COUNTIES[state].keys()):
county.append_text(fips_county)
county.set_active(0)
state.connect("changed", _changed, county)
d = inputdialog.FieldDialog(title=_("RepeaterBook Query"), parent=self)
d.add_field("State", state)
d.add_field("County", county)
d.add_field("Band", band)
r = d.run()
d.destroy()
if r != gtk.RESPONSE_OK:
return False
code = fips.FIPS_STATES[state.get_active_text()]
county_id = fips.FIPS_COUNTIES[code][county.get_active_text()]
freq = RB_BANDS[band.get_active_text()]
CONF.set("state", str(code), "repeaterbook")
CONF.set("county", str(county_id), "repeaterbook")
CONF.set("band", str(freq), "repeaterbook")
return True
def do_repeaterbook_political(self, do_import):
self.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
if not self.do_repeaterbook_political_prompt():
self.window.set_cursor(None)
return
try:
code = "%02i" % int(CONF.get("state", "repeaterbook"))
except:
try:
code = CONF.get("state", "repeaterbook")
except:
code = '41' # Oregon default
try:
county = CONF.get("county", "repeaterbook")
except:
county = '%' # --All-- default
try:
band = int(CONF.get("band", "repeaterbook"))
except:
band = 14 # 2m default
query = "http://chirp.danplanet.com/query/rb/1.0/chirp" + \
"?func=default&state_id=%s&band=%s&freq=%%&band6=%%&loc=%%" + \
"&county_id=%s&status_id=%%&features=%%&coverage=%%&use=%%"
query = query % (code,
band and band or "%%",
county and county or "%%")
print query
# Do this in case the import process is going to take a while
# to make sure we process events leading up to this
gtk.gdk.window_process_all_updates()
while gtk.events_pending():
gtk.main_iteration(False)
fn = tempfile.mktemp(".csv")
filename, headers = urllib.urlretrieve(query, fn)
if not os.path.exists(filename):
LOG.error("Failed, headers were: %s", headers)
common.show_error(_("RepeaterBook query failed"))
self.window.set_cursor(None)
return
try:
# Validate CSV
radio = repeaterbook.RBRadio(filename)
if radio.errors:
reporting.report_misc_error("repeaterbook",
("query=%s\n" % query) +
("\n") +
("\n".join(radio.errors)))
except errors.InvalidDataError, e:
common.show_error(str(e))
self.window.set_cursor(None)
return
except Exception, e:
common.log_exception()
reporting.report_model_usage(radio, "import", True)
self.window.set_cursor(None)
if do_import:
eset = self.get_current_editorset()
count = eset.do_import(filename)
else:
self.do_open_live(radio, read_only=True)
def do_repeaterbook_proximity_prompt(self):
default_band = "--All--"
try:
code = int(CONF.get("band", "repeaterbook"))
for k, v in RB_BANDS.items():
if code == v:
default_band = k
break
except:
pass
fields = {"1Location": (gtk.Entry(), lambda x: x.get_text()),
"2Distance": (gtk.Entry(), lambda x: x.get_text()),
"3Band": (miscwidgets.make_choice(
sorted(RB_BANDS.keys(), key=key_bands),
False, default_band),
lambda x: RB_BANDS[x.get_active_text()]),
}
d = inputdialog.FieldDialog(title=_("RepeaterBook Query"),
parent=self)
for k in sorted(fields.keys()):
d.add_field(k[1:], fields[k][0])
if isinstance(fields[k][0], gtk.Entry):
fields[k][0].set_text(
CONF.get(k[1:].lower(), "repeaterbook") or "")
while d.run() == gtk.RESPONSE_OK:
valid = True
for k, (widget, fn) in fields.items():
try:
CONF.set(k[1:].lower(), str(fn(widget)), "repeaterbook")
continue
except:
pass
common.show_error("Invalid value for %s" % k[1:])
valid = False
break
if valid:
d.destroy()
return True
d.destroy()
return False
def do_repeaterbook_proximity(self, do_import):
self.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
if not self.do_repeaterbook_proximity_prompt():
self.window.set_cursor(None)
return
loc = CONF.get("location", "repeaterbook")
try:
dist = int(CONF.get("distance", "repeaterbook"))
except:
dist = 20
try:
band = int(CONF.get("band", "repeaterbook")) or '%'
band = str(band)
except:
band = '%'
query = "http://chirp.danplanet.com/query/rb/1.0/app_direct" \
"?loc=%s&band=%s&dist=%s" % (loc, band, dist)
print query
# Do this in case the import process is going to take a while
# to make sure we process events leading up to this
gtk.gdk.window_process_all_updates()
while gtk.events_pending():
gtk.main_iteration(False)
fn = tempfile.mktemp(".csv")
filename, headers = urllib.urlretrieve(query, fn)
if not os.path.exists(filename):
LOG.error("Failed, headers were: %s", headers)
common.show_error(_("RepeaterBook query failed"))
self.window.set_cursor(None)
return
try:
# Validate CSV
radio = repeaterbook.RBRadio(filename)
if radio.errors:
reporting.report_misc_error("repeaterbook",
("query=%s\n" % query) +
("\n") +
("\n".join(radio.errors)))
except errors.InvalidDataError, e:
common.show_error(str(e))
self.window.set_cursor(None)
return
except Exception, e:
common.log_exception()
reporting.report_model_usage(radio, "import", True)
self.window.set_cursor(None)
if do_import:
eset = self.get_current_editorset()
count = eset.do_import(filename)
else:
self.do_open_live(radio, read_only=True)
def do_przemienniki_prompt(self):
d = inputdialog.FieldDialog(title='przemienniki.net query',
parent=self)
fields = {
"Country":
(miscwidgets.make_choice(
['at', 'bg', 'by', 'ch', 'cz', 'de', 'dk', 'es', 'fi',
'fr', 'hu', 'it', 'lt', 'lv', 'no', 'pl', 'ro', 'se',
'sk', 'ua', 'uk'], False),
lambda x: str(x.get_active_text())),
"Band":
(miscwidgets.make_choice(['10m', '4m', '6m', '2m', '70cm',
'23cm', '13cm', '3cm'], False, '2m'),
lambda x: str(x.get_active_text())),
"Mode":
(miscwidgets.make_choice(['fm', 'dv'], False),
lambda x: str(x.get_active_text())),
"Only Working":
(miscwidgets.make_choice(['', 'yes'], False),
lambda x: str(x.get_active_text())),
"Latitude": (gtk.Entry(), lambda x: float(x.get_text())),
"Longitude": (gtk.Entry(), lambda x: float(x.get_text())),
"Range": (gtk.Entry(), lambda x: int(x.get_text())),
}
for name in sorted(fields.keys()):
value, fn = fields[name]
d.add_field(name, value)
while d.run() == gtk.RESPONSE_OK:
query = "http://przemienniki.net/export/chirp.csv?"
args = []
for name, (value, fn) in fields.items():
if isinstance(value, gtk.Entry):
contents = value.get_text()
else:
contents = value.get_active_text()
if contents:
try:
_value = fn(value)
except ValueError:
common.show_error(_("Invalid value for %s") % name)
query = None
continue
args.append("=".join((name.replace(" ", "").lower(),
contents)))
query += "&".join(args)
LOG.debug(query)
d.destroy()
return query
d.destroy()
return query
def do_przemienniki(self, do_import):
url = self.do_przemienniki_prompt()
if not url:
return
fn = tempfile.mktemp(".csv")
filename, headers = urllib.urlretrieve(url, fn)
if not os.path.exists(filename):
LOG.error("Failed, headers were: %s", str(headers))
common.show_error(_("Query failed"))
return
class PRRadio(generic_csv.CSVRadio,
chirp_common.NetworkSourceRadio):
VENDOR = "przemienniki.net"
MODEL = ""
try:
radio = PRRadio(filename)
except Exception, e:
common.show_error(str(e))
return
if do_import:
eset = self.get_current_editorset()
count = eset.do_import(filename)
else:
self.do_open_live(radio, read_only=True)
def do_rfinder_prompt(self):
fields = {"1Email": (gtk.Entry(), lambda x: "@" in x),
"2Password": (gtk.Entry(), lambda x: x),
"3Latitude": (gtk.Entry(),
lambda x: float(x) < 90 and float(x) > -90),
"4Longitude": (gtk.Entry(),
lambda x: float(x) < 180 and float(x) > -180),
"5Range_in_Miles": (gtk.Entry(),
lambda x: int(x) > 0 and int(x) < 5000),
}
d = inputdialog.FieldDialog(title="RFinder Login", parent=self)
for k in sorted(fields.keys()):
d.add_field(k[1:].replace("_", " "), fields[k][0])
fields[k][0].set_text(CONF.get(k[1:], "rfinder") or "")
fields[k][0].set_visibility(k != "2Password")
while d.run() == gtk.RESPONSE_OK:
valid = True
for k in sorted(fields.keys()):
widget, validator = fields[k]
try:
if validator(widget.get_text()):
CONF.set(k[1:], widget.get_text(), "rfinder")
continue
except Exception:
pass
common.show_error("Invalid value for %s" % k[1:])
valid = False
break
if valid:
d.destroy()
return True
d.destroy()
return False
def do_rfinder(self, do_import):
self.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
if not self.do_rfinder_prompt():
self.window.set_cursor(None)
return
lat = CONF.get_float("Latitude", "rfinder")
lon = CONF.get_float("Longitude", "rfinder")
passwd = CONF.get("Password", "rfinder")
email = CONF.get("Email", "rfinder")
miles = CONF.get_int("Range_in_Miles", "rfinder")
# Do this in case the import process is going to take a while
# to make sure we process events leading up to this
gtk.gdk.window_process_all_updates()
while gtk.events_pending():
gtk.main_iteration(False)
if do_import:
eset = self.get_current_editorset()
rfstr = "rfinder://%s/%s/%f/%f/%i" % \
(email, passwd, lat, lon, miles)
count = eset.do_import(rfstr)
else:
from chirp.drivers import rfinder
radio = rfinder.RFinderRadio(None)
radio.set_params((lat, lon), miles, email, passwd)
self.do_open_live(radio, read_only=True)
self.window.set_cursor(None)
def do_radioreference_prompt(self):
fields = {"1Username": (gtk.Entry(), lambda x: x),
"2Password": (gtk.Entry(), lambda x: x),
"3Zipcode": (gtk.Entry(), lambda x: x),
}
d = inputdialog.FieldDialog(title=_("RadioReference.com Query"),
parent=self)
for k in sorted(fields.keys()):
d.add_field(k[1:], fields[k][0])
fields[k][0].set_text(CONF.get(k[1:], "radioreference") or "")
fields[k][0].set_visibility(k != "2Password")
while d.run() == gtk.RESPONSE_OK:
valid = True
for k in sorted(fields.keys()):
widget, validator = fields[k]
try:
if validator(widget.get_text()):
CONF.set(k[1:], widget.get_text(), "radioreference")
continue
except Exception:
pass
common.show_error("Invalid value for %s" % k[1:])
valid = False
break
if valid:
d.destroy()
return True
d.destroy()
return False
def do_radioreference(self, do_import):
self.window.set_cursor(gtk.gdk.Cursor(gtk.gdk.WATCH))
if not self.do_radioreference_prompt():
self.window.set_cursor(None)
return
username = CONF.get("Username", "radioreference")
passwd = CONF.get("Password", "radioreference")
zipcode = CONF.get("Zipcode", "radioreference")
# Do this in case the import process is going to take a while
# to make sure we process events leading up to this
gtk.gdk.window_process_all_updates()
while gtk.events_pending():
gtk.main_iteration(False)
if do_import:
eset = self.get_current_editorset()
rrstr = "radioreference://%s/%s/%s" % (zipcode, username, passwd)
count = eset.do_import(rrstr)
else:
try:
from chirp import radioreference
radio = radioreference.RadioReferenceRadio(None)
radio.set_params(zipcode, username, passwd)
self.do_open_live(radio, read_only=True)
except errors.RadioError, e:
common.show_error(e)
self.window.set_cursor(None)
def do_export(self):
types = [(_("CSV Files") + " (*.csv)", "csv"),
]
eset = self.get_current_editorset()
if os.path.exists(eset.filename):
base = os.path.basename(eset.filename)
if "." in base:
base = base[:base.rindex(".")]
defname = base
else:
defname = "radio"
filen = platform.get_platform().gui_save_file(default_name=defname,
types=types)
if not filen:
return
if os.path.exists(filen):
dlg = inputdialog.OverwriteDialog(filen)
owrite = dlg.run()
dlg.destroy()
if owrite != gtk.RESPONSE_OK:
return
os.remove(filen)
count = eset.do_export(filen)
reporting.report_model_usage(eset.rthread.radio, "export", count > 0)
def do_about(self):
d = gtk.AboutDialog()
d.set_transient_for(self)
import sys
verinfo = "GTK %s\nPyGTK %s\nPython %s\n" % (
".".join([str(x) for x in gtk.gtk_version]),
".".join([str(x) for x in gtk.pygtk_version]),
sys.version.split()[0])
# Set url hook to handle user activating a URL link in the about dialog
gtk.about_dialog_set_url_hook(lambda dlg, url: webbrowser.open(url))
d.set_name("CHIRP")
d.set_version(CHIRP_VERSION)
d.set_copyright("Copyright 2019 CHIRP Software LLC")
d.set_website("http://chirp.danplanet.com")
d.set_authors(("Dan Smith KK7DS <dsmith@danplanet.com>",
_("With significant contributions from:"),
"Tom KD7LXL",
"Marco IZ3GME",
"Jim KC9HI"
))
d.set_translator_credits("Polish: Grzegorz SQ2RBY" +
os.linesep +
"Italian: Fabio IZ2QDH" +
os.linesep +
"Dutch: Michael PD4MT" +
os.linesep +
"German: Benjamin HB9EUK" +
os.linesep +
"Hungarian: Attila HA5JA" +
os.linesep +
"Russian: Dmitry Slukin" +
os.linesep +
"Portuguese (BR): Crezivando PP7CJ")
d.set_comments(verinfo)
d.run()
d.destroy()
def do_gethelp(self):
webbrowser.open("http://chirp.danplanet.com")
def do_columns(self):
eset = self.get_current_editorset()
driver = directory.get_driver(eset.rthread.radio.__class__)
radio_name = "%s %s %s" % (eset.rthread.radio.VENDOR,
eset.rthread.radio.MODEL,
eset.rthread.radio.VARIANT)
d = gtk.Dialog(title=_("Select Columns"),
parent=self,
buttons=(gtk.STOCK_OK, gtk.RESPONSE_OK,
gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL))
vbox = gtk.VBox()
vbox.show()
sw = gtk.ScrolledWindow()
sw.set_policy(gtk.POLICY_NEVER, gtk.POLICY_AUTOMATIC)
sw.add_with_viewport(vbox)
sw.show()
d.vbox.pack_start(sw, 1, 1, 1)
d.set_size_request(-1, 300)
d.set_resizable(False)
labelstr = _("Visible columns for {radio}").format(radio=radio_name)
label = gtk.Label(labelstr)
label.show()
vbox.pack_start(label)
fields = []
memedit = eset.get_current_editor() # .editors["memedit"]
unsupported = memedit.get_unsupported_columns()
for colspec in memedit.cols:
if colspec[0].startswith("_"):
continue
elif colspec[0] in unsupported:
continue
label = colspec[0]
visible = memedit.get_column_visible(memedit.col(label))
widget = gtk.CheckButton(label)
widget.set_active(visible)
fields.append(widget)
vbox.pack_start(widget, 1, 1, 1)
widget.show()
res = d.run()
selected_columns = []
if res == gtk.RESPONSE_OK:
for widget in fields:
colnum = memedit.col(widget.get_label())
memedit.set_column_visible(colnum, widget.get_active())
if widget.get_active():
selected_columns.append(widget.get_label())
d.destroy()
CONF.set(driver, ",".join(selected_columns), "memedit_columns")
def do_hide_unused(self, action):
eset = self.get_current_editorset()
if eset is None:
conf = config.get("memedit")
conf.set_bool("hide_unused", action.get_active())
else:
for editortype, editor in eset.editors.iteritems():
if "memedit" in editortype:
editor.set_hide_unused(action.get_active())
def do_clearq(self):
eset = self.get_current_editorset()
eset.rthread.flush()
def do_copy(self, cut):
eset = self.get_current_editorset()
eset.get_current_editor().copy_selection(cut)
def do_paste(self):
eset = self.get_current_editorset()
eset.get_current_editor().paste_selection()
def do_delete(self):
eset = self.get_current_editorset()
eset.get_current_editor().copy_selection(True)
def do_toggle_report(self, action):
if not action.get_active():
d = gtk.MessageDialog(buttons=gtk.BUTTONS_YES_NO, parent=self)
markup = "<b><big>" + _("Reporting is disabled") + "</big></b>"
d.set_markup(markup)
msg = _("The reporting feature of CHIRP is designed to help "
"<u>improve quality</u> by allowing the authors to focus "
"on the radio drivers used most often and errors "
"experienced by the users. The reports contain no "
"identifying information and are used only for "
"statistical purposes by the authors. Your privacy is "
"extremely important, but <u>please consider leaving "
"this feature enabled to help make CHIRP better!</u>\n\n"
"<b>Are you sure you want to disable this feature?</b>")
d.format_secondary_markup(msg.replace("\n", "\r\n"))
r = d.run()
d.destroy()
if r == gtk.RESPONSE_NO:
action.set_active(not action.get_active())
conf = config.get()
conf.set_bool("no_report", not action.get_active())
def do_toggle_no_smart_tmode(self, action):
CONF.set_bool("no_smart_tmode", not action.get_active(), "memedit")
def do_toggle_developer(self, action):
conf = config.get()
conf.set_bool("developer", action.get_active(), "state")
for name in ["viewdeveloper", "loadmod"]:
devaction = self.menu_ag.get_action(name)
devaction.set_visible(action.get_active())
def do_toggle_clone_information(self, action):
CONF.set_bool("clone_information",
not action.get_active(), "noconfirm")
def do_toggle_clone_instructions(self, action):
CONF.set_bool("clone_instructions",
not action.get_active(), "noconfirm")
def do_change_language(self):
langs = ["Auto", "English", "Polish", "Italian", "Dutch", "German",
"Hungarian", "Russian", "Portuguese (BR)", "French",
"Spanish"]
d = inputdialog.ChoiceDialog(langs, parent=self,
title="Choose Language")
d.label.set_text(_("Choose a language or Auto to use the "
"operating system default. You will need to "
"restart the application before the change "
"will take effect"))
d.label.set_line_wrap(True)
r = d.run()
if r == gtk.RESPONSE_OK:
LOG.debug("Chose language %s" % d.choice.get_active_text())
conf = config.get()
conf.set("language", d.choice.get_active_text(), "state")
d.destroy()
def load_module(self, filen=None):
types = [(_("Python Modules") + " *.py", "*.py"),
(_("Modules") + " *.mod", "*.mod")]
if filen is None:
filen = platform.get_platform().gui_open_file(types=types)
if not filen:
return
# We're in development mode, so we need to tell the directory to
# allow a loaded module to override an existing driver, against
# its normal better judgement
directory.enable_reregistrations()
self.modify_bg(gtk.STATE_NORMAL, gtk.gdk.Color('#ea6262'))
try:
with file(filen) as module:
code = module.read()
pyc = compile(code, filen, 'exec')
# See this for why:
# http://stackoverflow.com/questions/2904274/globals-and-locals-in-python-exec
exec(pyc, globals(), globals())
except Exception, e:
common.log_exception()
common.show_error("Unable to load module: %s" % e)
def mh(self, _action, *args):
action = _action.get_name()
if action == "quit":
gtk.main_quit()
elif action == "new":
self.do_new()
elif action == "open":
self.do_open()
elif action == "save":
self.do_save()
elif action == "saveas":
self.do_saveas()
elif action.startswith("download"):
self.do_download(*args)
elif action.startswith("upload"):
self.do_upload(*args)
elif action == "close":
self.do_close()
elif action == "import":
self.do_import()
elif action in ["qdmrmarc", "idmrmarc"]:
self.do_dmrmarc(action[0] == "i")
elif action in ["qrfinder", "irfinder"]:
self.do_rfinder(action[0] == "i")
elif action in ["qradioreference", "iradioreference"]:
self.do_radioreference(action[0] == "i")
elif action == "export":
self.do_export()
elif action in ["qrbookpolitical", "irbookpolitical"]:
self.do_repeaterbook_political(action[0] == "i")
elif action in ["qrbookproximity", "irbookproximity"]:
self.do_repeaterbook_proximity(action[0] == "i")
elif action in ["qpr", "ipr"]:
self.do_przemienniki(action[0] == "i")
elif action == "about":
self.do_about()
elif action == "gethelp":
self.do_gethelp()
elif action == "columns":
self.do_columns()
elif action == "hide_unused":
self.do_hide_unused(_action)
elif action == "cancelq":
self.do_clearq()
elif action == "report":
self.do_toggle_report(_action)
elif action == "channel_defaults":
# The memedit thread also has an instance of bandplans.
bp = bandplans.BandPlans(CONF)
bp.select_bandplan(self)
elif action == "no_smart_tmode":
self.do_toggle_no_smart_tmode(_action)
elif action == "developer":
self.do_toggle_developer(_action)
elif action == "clone_information":
self.do_toggle_clone_information(_action)
elif action == "clone_instructions":
self.do_toggle_clone_instructions(_action)
elif action in ["cut", "copy", "paste", "delete",
"move_up", "move_dn", "exchange", "all",
"devshowraw", "devdiffraw", "properties"]:
self.get_current_editorset().get_current_editor().hotkey(_action)
elif action == "devdifftab":
self.do_diff_radio()
elif action == "language":
self.do_change_language()
elif action == "loadmod":
self.load_module()
else:
return
self.ev_tab_switched()
def make_menubar(self):
menu_xml = """
<ui>
<menubar name="MenuBar">
<menu action="file">
<menuitem action="new"/>
<menuitem action="open"/>
<menu action="openstock" name="openstock"/>
<menu action="recent" name="recent"/>
<menuitem action="save"/>
<menuitem action="saveas"/>
<menuitem action="loadmod"/>
<separator/>
<menuitem action="import"/>
<menuitem action="export"/>
<separator/>
<menuitem action="close"/>
<menuitem action="quit"/>
</menu>
<menu action="edit">
<menuitem action="cut"/>
<menuitem action="copy"/>
<menuitem action="paste"/>
<menuitem action="delete"/>
<separator/>
<menuitem action="all"/>
<separator/>
<menuitem action="move_up"/>
<menuitem action="move_dn"/>
<menuitem action="exchange"/>
<separator/>
<menuitem action="properties"/>
</menu>
<menu action="view">
<menuitem action="columns"/>
<menuitem action="hide_unused"/>
<menuitem action="no_smart_tmode"/>
<menu action="viewdeveloper">
<menuitem action="devshowraw"/>
<menuitem action="devdiffraw"/>
<menuitem action="devdifftab"/>
</menu>
<menuitem action="language"/>
</menu>
<menu action="radio" name="radio">
<menuitem action="download"/>
<menuitem action="upload"/>
<menu action="importsrc" name="importsrc">
<menuitem action="idmrmarc"/>
<menuitem action="iradioreference"/>
<menu action="irbook" name="irbook">
<menuitem action="irbookpolitical"/>
<menuitem action="irbookproximity"/>
</menu>
<menuitem action="ipr"/>
<menuitem action="irfinder"/>
</menu>
<menu action="querysrc" name="querysrc">
<menuitem action="qdmrmarc"/>
<menuitem action="qradioreference"/>
<menu action="qrbook" name="qrbook">
<menuitem action="qrbookpolitical"/>
<menuitem action="qrbookproximity"/>
</menu>
<menuitem action="qpr"/>
<menuitem action="qrfinder"/>
</menu>
<menu action="stock" name="stock"/>
<separator/>
<menuitem action="channel_defaults"/>
<separator/>
<menuitem action="cancelq"/>
</menu>
<menu action="help">
<menuitem action="gethelp"/>
<separator/>
<menuitem action="report"/>
<menuitem action="clone_information"/>
<menuitem action="clone_instructions"/>
<menuitem action="developer"/>
<separator/>
<menuitem action="about"/>
</menu>
</menubar>
</ui>
"""
ALT_KEY = "<Alt>"
CTRL_KEY = "<Ctrl>"
if sys.platform == 'darwin':
ALT_KEY = "<Meta>"
CTRL_KEY = "<Meta>"
actions = [
('file', None, _("_File"), None, None, self.mh),
('new', gtk.STOCK_NEW, None, None, None, self.mh),
('open', gtk.STOCK_OPEN, None, None, None, self.mh),
('openstock', None, _("Open stock config"), None, None, self.mh),
('recent', None, _("_Recent"), None, None, self.mh),
('save', gtk.STOCK_SAVE, None, None, None, self.mh),
('saveas', gtk.STOCK_SAVE_AS, None, None, None, self.mh),
('loadmod', None, _("Load Module"), None, None, self.mh),
('close', gtk.STOCK_CLOSE, None, None, None, self.mh),
('quit', gtk.STOCK_QUIT, None, None, None, self.mh),
('edit', None, _("_Edit"), None, None, self.mh),
('cut', None, _("_Cut"), "%sx" % CTRL_KEY, None, self.mh),
('copy', None, _("_Copy"), "%sc" % CTRL_KEY, None, self.mh),
('paste', None, _("_Paste"),
"%sv" % CTRL_KEY, None, self.mh),
('delete', None, _("_Delete"), "Delete", None, self.mh),
('all', None, _("Select _All"), None, None, self.mh),
('move_up', None, _("Move _Up"),
"%sUp" % CTRL_KEY, None, self.mh),
('move_dn', None, _("Move Dow_n"),
"%sDown" % CTRL_KEY, None, self.mh),
('exchange', None, _("E_xchange"),
"%s<Shift>x" % CTRL_KEY, None, self.mh),
('properties', None, _("P_roperties"), None, None, self.mh),
('view', None, _("_View"), None, None, self.mh),
('columns', None, _("Columns"), None, None, self.mh),
('viewdeveloper', None, _("Developer"), None, None, self.mh),
('devshowraw', None, _('Show raw memory'),
"%s<Shift>r" % CTRL_KEY, None, self.mh),
('devdiffraw', None, _("Diff raw memories"),
"%s<Shift>d" % CTRL_KEY, None, self.mh),
('devdifftab', None, _("Diff tabs"),
"%s<Shift>t" % CTRL_KEY, None, self.mh),
('language', None, _("Change language"), None, None, self.mh),
('radio', None, _("_Radio"), None, None, self.mh),
('download', None, _("Download From Radio"),
"%sd" % ALT_KEY, None, self.mh),
('upload', None, _("Upload To Radio"),
"%su" % ALT_KEY, None, self.mh),
('import', None, _("Import"), "%si" % ALT_KEY, None, self.mh),
('export', None, _("Export"), "%se" % ALT_KEY, None, self.mh),
('importsrc', None, _("Import from data source"),
None, None, self.mh),
('idmrmarc', None, _("DMR-MARC Repeaters"), None, None, self.mh),
('iradioreference', None, _("RadioReference.com"),
None, None, self.mh),
('irfinder', None, _("RFinder"), None, None, self.mh),
('irbook', None, _("RepeaterBook"), None, None, self.mh),
('irbookpolitical', None, _("RepeaterBook political query"), None,
None, self.mh),
('irbookproximity', None, _("RepeaterBook proximity query"), None,
None, self.mh),
('ipr', None, _("przemienniki.net"), None, None, self.mh),
('querysrc', None, _("Query data source"), None, None, self.mh),
('qdmrmarc', None, _("DMR-MARC Repeaters"), None, None, self.mh),
('qradioreference', None, _("RadioReference.com"),
None, None, self.mh),
('qrfinder', None, _("RFinder"), None, None, self.mh),
('qpr', None, _("przemienniki.net"), None, None, self.mh),
('qrbook', None, _("RepeaterBook"), None, None, self.mh),
('qrbookpolitical', None, _("RepeaterBook political query"), None,
None, self.mh),
('qrbookproximity', None, _("RepeaterBook proximity query"), None,
None, self.mh),
('export_chirp', None, _("CHIRP Native File"),
None, None, self.mh),
('export_csv', None, _("CSV File"), None, None, self.mh),
('stock', None, _("Import from stock config"),
None, None, self.mh),
('channel_defaults', None, _("Channel defaults"),
None, None, self.mh),
('cancelq', gtk.STOCK_STOP, None, "Escape", None, self.mh),
('help', None, _('Help'), None, None, self.mh),
('about', gtk.STOCK_ABOUT, None, None, None, self.mh),
('gethelp', None, _("Get Help Online..."), None, None, self.mh),
]
conf = config.get()
re = not conf.get_bool("no_report")
hu = conf.get_bool("hide_unused", "memedit", default=True)
dv = conf.get_bool("developer", "state")
cf = not conf.get_bool("clone_information", "noconfirm")
ci = not conf.get_bool("clone_instructions", "noconfirm")
st = not conf.get_bool("no_smart_tmode", "memedit")
toggles = [('report', None, _("Report Statistics"),
None, None, self.mh, re),
('hide_unused', None, _("Hide Unused Fields"),
None, None, self.mh, hu),
('no_smart_tmode', None, _("Smart Tone Modes"),
None, None, self.mh, st),
('clone_information', None, _("Show Information"),
None, None, self.mh, cf),
('clone_instructions', None, _("Show Instructions"),
None, None, self.mh, ci),
('developer', None, _("Enable Developer Functions"),
None, None, self.mh, dv),
]
self.menu_uim = gtk.UIManager()
self.menu_ag = gtk.ActionGroup("MenuBar")
self.menu_ag.add_actions(actions)
self.menu_ag.add_toggle_actions(toggles)
self.menu_uim.insert_action_group(self.menu_ag, 0)
self.menu_uim.add_ui_from_string(menu_xml)
self.add_accel_group(self.menu_uim.get_accel_group())
self.infomenu = self.menu_uim.get_widget(
"/MenuBar/help/clone_information")
self.clonemenu = self.menu_uim.get_widget(
"/MenuBar/help/clone_instructions")
# Initialize
self.do_toggle_developer(self.menu_ag.get_action("developer"))
return self.menu_uim.get_widget("/MenuBar")
def make_tabs(self):
self.tabs = gtk.Notebook()
self.tabs.set_scrollable(True)
return self.tabs
def close_out(self):
num = self.tabs.get_n_pages()
while num > 0:
num -= 1
LOG.debug("Closing %i" % num)
try:
self.do_close(self.tabs.get_nth_page(num))
except ModifiedError:
return False
gtk.main_quit()
return True
def make_status_bar(self):
box = gtk.HBox(False, 2)
self.sb_general = gtk.Statusbar()
self.sb_general.set_has_resize_grip(False)
self.sb_general.show()
box.pack_start(self.sb_general, 1, 1, 1)
self.sb_radio = gtk.Statusbar()
self.sb_radio.set_has_resize_grip(True)
self.sb_radio.show()
box.pack_start(self.sb_radio, 1, 1, 1)
box.show()
return box
def ev_delete(self, window, event):
if not self.close_out():
return True # Don't exit
def ev_destroy(self, window):
if not self.close_out():
return True # Don't exit
def setup_extra_hotkeys(self):
accelg = self.menu_uim.get_accel_group()
def memedit(a):
self.get_current_editorset().editors["memedit"].hotkey(a)
actions = [
# ("action_name", "key", function)
]
for name, key, fn in actions:
a = gtk.Action(name, name, name, "")
a.connect("activate", fn)
self.menu_ag.add_action_with_accel(a, key)
a.set_accel_group(accelg)
a.connect_accelerator()
def _set_icon(self):
this_platform = platform.get_platform()
path = (this_platform.find_resource("chirp.png") or
this_platform.find_resource(os.path.join("pixmaps",
"chirp.png")))
if os.path.exists(path):
self.set_icon_from_file(path)
else:
LOG.warn("Icon %s not found" % path)
def _updates(self, version):
if not version:
return
if version == CHIRP_VERSION:
return
LOG.info("Server reports version %s is available" % version)
# Report new updates every three days
intv = 3600 * 24 * 3
if CONF.is_defined("last_update_check", "state") and \
(time.time() - CONF.get_int("last_update_check", "state")) < intv:
return
CONF.set_int("last_update_check", int(time.time()), "state")
d = gtk.MessageDialog(buttons=gtk.BUTTONS_OK_CANCEL, parent=self,
type=gtk.MESSAGE_INFO)
d.label.set_markup(
_('A new version of CHIRP is available: ' +
'{ver}. '.format(ver=version) +
'It is recommended that you upgrade as soon as possible. '
'Please go to: \r\n\r\n<a href="http://chirp.danplanet.com">' +
'http://chirp.danplanet.com</a>'))
response = d.run()
d.destroy()
if response == gtk.RESPONSE_OK:
webbrowser.open('http://chirp.danplanet.com/'
'projects/chirp/wiki/Download')
def _init_macos(self, menu_bar):
macapp = None
# for KK7DS runtime <= R10
try:
import gtk_osxapplication
macapp = gtk_osxapplication.OSXApplication()
except ImportError:
pass
# for gtk-mac-integration >= 2.0.7
try:
import gtkosx_application
macapp = gtkosx_application.Application()
except ImportError:
pass
if macapp is None:
LOG.error("No MacOS support: %s" % e)
return
this_platform = platform.get_platform()
icon = (this_platform.find_resource("chirp.png") or
this_platform.find_resource(os.path.join("pixmaps",
"chirp.png")))
if os.path.exists(icon):
icon_pixmap = gtk.gdk.pixbuf_new_from_file(icon)
macapp.set_dock_icon_pixbuf(icon_pixmap)
menu_bar.hide()
macapp.set_menu_bar(menu_bar)
quititem = self.menu_uim.get_widget("/MenuBar/file/quit")
quititem.hide()
aboutitem = self.menu_uim.get_widget("/MenuBar/help/about")
macapp.insert_app_menu_item(aboutitem, 0)
documentationitem = self.menu_uim.get_widget("/MenuBar/help/gethelp")
macapp.insert_app_menu_item(documentationitem, 0)
macapp.set_use_quartz_accelerators(False)
macapp.ready()
LOG.debug("Initialized MacOS support")
def __init__(self, *args, **kwargs):
gtk.Window.__init__(self, *args, **kwargs)
def expose(window, event):
allocation = window.get_allocation()
CONF.set_int("window_w", allocation.width, "state")
CONF.set_int("window_h", allocation.height, "state")
self.connect("expose_event", expose)
def state_change(window, event):
CONF.set_bool(
"window_maximized",
event.new_window_state == gtk.gdk.WINDOW_STATE_MAXIMIZED,
"state")
self.connect("window-state-event", state_change)
d = CONF.get("last_dir", "state")
if d and os.path.isdir(d):
platform.get_platform().set_last_dir(d)
vbox = gtk.VBox(False, 2)
self._recent = []
self.menu_ag = None
mbar = self.make_menubar()
if os.name != "nt":
self._set_icon() # Windows gets the icon from the exe
if os.uname()[0] == "Darwin":
self._init_macos(mbar)
vbox.pack_start(mbar, 0, 0, 0)
self.tabs = None
tabs = self.make_tabs()
tabs.connect("switch-page", lambda n, _, p: self.ev_tab_switched(p))
tabs.connect("page-removed", lambda *a: self.ev_tab_switched())
tabs.show()
self.ev_tab_switched()
vbox.pack_start(tabs, 1, 1, 1)
vbox.pack_start(self.make_status_bar(), 0, 0, 0)
vbox.show()
self.add(vbox)
try:
width = CONF.get_int("window_w", "state")
height = CONF.get_int("window_h", "state")
except Exception:
width = 800
height = 600
self.set_default_size(width, height)
if CONF.get_bool("window_maximized", "state"):
self.maximize()
self.set_title("CHIRP")
self.connect("delete_event", self.ev_delete)
self.connect("destroy", self.ev_destroy)
if not CONF.get_bool("warned_about_reporting") and \
not CONF.get_bool("no_report"):
d = gtk.MessageDialog(buttons=gtk.BUTTONS_OK, parent=self)
d.set_markup("<b><big>" +
_("Error reporting is enabled") +
"</big></b>")
d.format_secondary_markup(
_("If you wish to disable this feature you may do so in "
"the <u>Help</u> menu"))
d.run()
d.destroy()
CONF.set_bool("warned_about_reporting", True)
self.update_recent_files()
try:
self.update_stock_configs()
except UnicodeDecodeError:
LOG.exception('We hit bug #272 while working with unicode paths. '
'Not copying stock configs so we can continue '
'startup.')
self.setup_extra_hotkeys()
def updates_callback(ver):
gobject.idle_add(self._updates, ver)
if not CONF.get_bool("skip_update_check", "state"):
reporting.check_for_updates(updates_callback)
| gpl-3.0 |
DDEFISHER/servo | tests/wpt/harness/wptrunner/wptmanifest/tests/test_serializer.py | 131 | 4691 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import unittest
from cStringIO import StringIO
from .. import parser, serializer
class TokenizerTest(unittest.TestCase):
def setUp(self):
self.serializer = serializer.ManifestSerializer()
self.parser = parser.Parser()
def serialize(self, input_str):
return self.serializer.serialize(self.parser.parse(input_str))
def compare(self, input_str, expected=None):
if expected is None:
expected = input_str
expected = expected.encode("utf8")
actual = self.serialize(input_str)
self.assertEquals(actual, expected)
def test_0(self):
self.compare("""key: value
[Heading 1]
other_key: other_value
""")
def test_1(self):
self.compare("""key: value
[Heading 1]
other_key:
if a or b: other_value
""")
def test_2(self):
self.compare("""key: value
[Heading 1]
other_key:
if a or b: other_value
fallback_value
""")
def test_3(self):
self.compare("""key: value
[Heading 1]
other_key:
if a == 1: other_value
fallback_value
""")
def test_4(self):
self.compare("""key: value
[Heading 1]
other_key:
if a == "1": other_value
fallback_value
""")
def test_5(self):
self.compare("""key: value
[Heading 1]
other_key:
if a == "abc"[1]: other_value
fallback_value
""")
def test_6(self):
self.compare("""key: value
[Heading 1]
other_key:
if a == "abc"[c]: other_value
fallback_value
""")
def test_7(self):
self.compare("""key: value
[Heading 1]
other_key:
if (a or b) and c: other_value
fallback_value
""",
"""key: value
[Heading 1]
other_key:
if a or b and c: other_value
fallback_value
""")
def test_8(self):
self.compare("""key: value
[Heading 1]
other_key:
if a or (b and c): other_value
fallback_value
""")
def test_9(self):
self.compare("""key: value
[Heading 1]
other_key:
if not (a and b): other_value
fallback_value
""")
def test_10(self):
self.compare("""key: value
[Heading 1]
some_key: some_value
[Heading 2]
other_key: other_value
""")
def test_11(self):
self.compare("""key:
if not a and b and c and d: true
""")
def test_12(self):
self.compare("""[Heading 1]
key: [a:1, b:2]
""")
def test_13(self):
self.compare("""key: [a:1, "b:#"]
""")
def test_14(self):
self.compare("""key: [","]
""")
def test_15(self):
self.compare("""key: ,
""")
def test_16(self):
self.compare("""key: ["]", b]
""")
def test_17(self):
self.compare("""key: ]
""")
def test_18(self):
self.compare("""key: \]
""", """key: ]
""")
def test_escape_0(self):
self.compare(r"""k\t\:y: \a\b\f\n\r\t\v""",
r"""k\t\:y: \x07\x08\x0c\n\r\t\x0b
""")
def test_escape_1(self):
self.compare(r"""k\x00: \x12A\x45""",
r"""k\x00: \x12AE
""")
def test_escape_2(self):
self.compare(r"""k\u0045y: \u1234A\uABc6""",
u"""kEy: \u1234A\uabc6
""")
def test_escape_3(self):
self.compare(r"""k\u0045y: \u1234A\uABc6""",
u"""kEy: \u1234A\uabc6
""")
def test_escape_4(self):
self.compare(r"""key: '\u1234A\uABc6'""",
u"""key: \u1234A\uabc6
""")
def test_escape_5(self):
self.compare(r"""key: [\u1234A\uABc6]""",
u"""key: [\u1234A\uabc6]
""")
def test_escape_6(self):
self.compare(r"""key: [\u1234A\uABc6\,]""",
u"""key: ["\u1234A\uabc6,"]
""")
def test_escape_7(self):
self.compare(r"""key: [\,\]\#]""",
r"""key: [",]#"]
""")
def test_escape_8(self):
self.compare(r"""key: \#""",
r"""key: "#"
""")
def test_escape_9(self):
self.compare(r"""key: \U10FFFFabc""",
u"""key: \U0010FFFFabc
""")
def test_escape_10(self):
self.compare(r"""key: \u10FFab""",
u"""key: \u10FFab
""")
def test_escape_11(self):
self.compare(r"""key: \\ab
""")
def test_atom_1(self):
self.compare(r"""key: @True
""")
def test_atom_2(self):
self.compare(r"""key: @False
""")
def test_atom_3(self):
self.compare(r"""key: @Reset
""")
def test_atom_4(self):
self.compare(r"""key: [a, @Reset, b]
""")
| mpl-2.0 |
Jgarcia-IAS/localizacion | openerp/addons-extra/odoo-pruebas/odoo-server/addons/account/wizard/account_change_currency.py | 385 | 3751 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class account_change_currency(osv.osv_memory):
_name = 'account.change.currency'
_description = 'Change Currency'
_columns = {
'currency_id': fields.many2one('res.currency', 'Change to', required=True, help="Select a currency to apply on the invoice"),
}
def view_init(self, cr , uid , fields_list, context=None):
obj_inv = self.pool.get('account.invoice')
if context is None:
context = {}
if context.get('active_id',False):
if obj_inv.browse(cr, uid, context['active_id']).state != 'draft':
raise osv.except_osv(_('Error!'), _('You can only change currency for Draft Invoice.'))
pass
def change_currency(self, cr, uid, ids, context=None):
obj_inv = self.pool.get('account.invoice')
obj_inv_line = self.pool.get('account.invoice.line')
obj_currency = self.pool.get('res.currency')
if context is None:
context = {}
data = self.browse(cr, uid, ids, context=context)[0]
new_currency = data.currency_id.id
invoice = obj_inv.browse(cr, uid, context['active_id'], context=context)
if invoice.currency_id.id == new_currency:
return {}
rate = obj_currency.browse(cr, uid, new_currency, context=context).rate
for line in invoice.invoice_line:
new_price = 0
if invoice.company_id.currency_id.id == invoice.currency_id.id:
new_price = line.price_unit * rate
if new_price <= 0:
raise osv.except_osv(_('Error!'), _('New currency is not configured properly.'))
if invoice.company_id.currency_id.id != invoice.currency_id.id and invoice.company_id.currency_id.id == new_currency:
old_rate = invoice.currency_id.rate
if old_rate <= 0:
raise osv.except_osv(_('Error!'), _('Current currency is not configured properly.'))
new_price = line.price_unit / old_rate
if invoice.company_id.currency_id.id != invoice.currency_id.id and invoice.company_id.currency_id.id != new_currency:
old_rate = invoice.currency_id.rate
if old_rate <= 0:
raise osv.except_osv(_('Error!'), _('Current currency is not configured properly.'))
new_price = (line.price_unit / old_rate ) * rate
obj_inv_line.write(cr, uid, [line.id], {'price_unit': new_price})
obj_inv.write(cr, uid, [invoice.id], {'currency_id': new_currency}, context=context)
return {'type': 'ir.actions.act_window_close'}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Arafatk/sympy | sympy/combinatorics/tests/test_subsets.py | 120 | 1918 | from sympy.combinatorics import Subset
def test_subset():
a = Subset(['c', 'd'], ['a', 'b', 'c', 'd'])
assert a.next_binary() == Subset(['b'], ['a', 'b', 'c', 'd'])
assert a.prev_binary() == Subset(['c'], ['a', 'b', 'c', 'd'])
assert a.next_lexicographic() == Subset(['d'], ['a', 'b', 'c', 'd'])
assert a.prev_lexicographic() == Subset(['c'], ['a', 'b', 'c', 'd'])
assert a.next_gray() == Subset(['c'], ['a', 'b', 'c', 'd'])
assert a.prev_gray() == Subset(['d'], ['a', 'b', 'c', 'd'])
assert a.rank_binary == 3
assert a.rank_lexicographic == 14
assert a.rank_gray == 2
assert a.cardinality == 16
a = Subset([2, 5, 7], [1, 2, 3, 4, 5, 6, 7])
assert a.next_binary() == Subset([2, 5, 6], [1, 2, 3, 4, 5, 6, 7])
assert a.prev_binary() == Subset([2, 5], [1, 2, 3, 4, 5, 6, 7])
assert a.next_lexicographic() == Subset([2, 6], [1, 2, 3, 4, 5, 6, 7])
assert a.prev_lexicographic() == Subset([2, 5, 6, 7], [1, 2, 3, 4, 5, 6, 7])
assert a.next_gray() == Subset([2, 5, 6, 7], [1, 2, 3, 4, 5, 6, 7])
assert a.prev_gray() == Subset([2, 5], [1, 2, 3, 4, 5, 6, 7])
assert a.rank_binary == 37
assert a.rank_lexicographic == 93
assert a.rank_gray == 57
assert a.cardinality == 128
superset = ['a', 'b', 'c', 'd']
assert Subset.unrank_binary(4, superset).rank_binary == 4
assert Subset.unrank_gray(10, superset).rank_gray == 10
superset = [1, 2, 3, 4, 5, 6, 7, 8, 9]
assert Subset.unrank_binary(33, superset).rank_binary == 33
assert Subset.unrank_gray(25, superset).rank_gray == 25
a = Subset([], ['a', 'b', 'c', 'd'])
i = 1
while a.subset != Subset(['d'], ['a', 'b', 'c', 'd']).subset:
a = a.next_lexicographic()
i = i + 1
assert i == 16
i = 1
while a.subset != Subset([], ['a', 'b', 'c', 'd']).subset:
a = a.prev_lexicographic()
i = i + 1
assert i == 16
| bsd-3-clause |
semiautomaticgit/SemiAutomaticClassificationPlugin | maininterface/cloudmasking.py | 1 | 9986 | # -*- coding: utf-8 -*-
'''
/**************************************************************************************************************************
SemiAutomaticClassificationPlugin
The Semi-Automatic Classification Plugin for QGIS allows for the supervised classification of remote sensing images,
providing tools for the download, the preprocessing and postprocessing of images.
-------------------
begin : 2012-12-29
copyright : (C) 2012-2021 by Luca Congedo
email : ing.congedoluca@gmail.com
**************************************************************************************************************************/
/**************************************************************************************************************************
*
* This file is part of Semi-Automatic Classification Plugin
*
* Semi-Automatic Classification Plugin is free software: you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software Foundation,
* version 3 of the License.
*
* Semi-Automatic Classification Plugin is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* Semi-Automatic Classification Plugin. If not, see <http://www.gnu.org/licenses/>.
*
**************************************************************************************************************************/
'''
cfg = __import__(str(__name__).split('.')[0] + '.core.config', fromlist=[''])
class CloudMasking:
def __init__(self):
pass
# value text changed
def textChanged(self):
self.checkValueList()
# check value list
def checkValueList(self):
try:
# class value list
valueList = cfg.utls.textToValueList(cfg.ui.cloud_mask_classes_lineEdit.text())
cfg.ui.cloud_mask_classes_lineEdit.setStyleSheet('color : green')
# logger
cfg.utls.logCondition(str(__name__) + '-' + str(cfg.inspectSCP.stack()[0][3])+ ' ' + cfg.utls.lineOfCode())
except Exception as err:
cfg.ui.cloud_mask_classes_lineEdit.setStyleSheet('color : red')
valueList = []
# logger
cfg.utls.logCondition(str(__name__) + '-' + (cfg.inspectSCP.stack()[0][3])+ ' ' + cfg.utls.lineOfCode(), ' ERROR exception: ' + str(err))
return valueList
# mask band sets
def maskAction(self):
self.cloudMaskingBandSet()
# cloud masking
def cloudMaskingBandSet(self, batch = 'No', bandSetNumber = None, inputClassification = None, outputDirectory = None):
# class value list
valueList = self.checkValueList()
if len(valueList) > 0:
if bandSetNumber is None:
bandSet = cfg.ui.band_set_comb_spinBox_9.value()
bandSetNumber = bandSet - 1
if bandSetNumber >= len(cfg.bandSetsList):
cfg.mx.msgWar25(bandSetNumber + 1)
return 'No'
if inputClassification is None:
clssfctnNm = cfg.ui.classification_name_combo_4.currentText()
clss = cfg.utls.selectLayerbyName(clssfctnNm, 'Yes')
inputClassification = cfg.utls.layerSource(clss)
if batch == 'No':
o = cfg.utls.getExistingDirectory(None , cfg.QtWidgetsSCP.QApplication.translate('semiautomaticclassificationplugin', 'Select a directory'))
else:
o = outputDirectory
if len(o) > 0:
if batch == 'No':
cfg.uiUtls.addProgressBar()
bndSetSources = []
# create list of rasters
try:
cfg.bandSetsList[bandSetNumber][0]
except:
if batch == 'No':
cfg.uiUtls.removeProgressBar()
cfg.mx.msgWar28()
# logger
cfg.utls.logCondition(str(__name__) + '-' + str(cfg.inspectSCP.stack()[0][3])+ ' ' + cfg.utls.lineOfCode(), ' Warning')
return 'No'
if cfg.bandSetsList[bandSetNumber][0] == 'Yes':
ckB = cfg.utls.checkBandSet(bandSetNumber)
else:
if batch == 'No':
cfg.uiUtls.removeProgressBar()
cfg.mx.msgWar29()
# logger
cfg.utls.logCondition(str(__name__) + '-' + str(cfg.inspectSCP.stack()[0][3])+ ' ' + cfg.utls.lineOfCode(), ' Warning')
return 'No'
if cfg.bandSetsList[bandSetNumber][0] == 'Yes':
ckB = cfg.utls.checkBandSet(bandSetNumber)
bndSetIf = 'Yes'
else:
ckB = cfg.utls.checkImageBandSet(bandSetNumber)
bndSetIf = 'No'
if ckB == 'Yes':
bndSetSources.append(cfg.bndSetLst)
if len(cfg.bndSetLst) == 0:
if batch == 'No':
cfg.uiUtls.removeProgressBar()
cfg.mx.msgWar28()
# logger
cfg.utls.logCondition(str(__name__) + '-' + str(cfg.inspectSCP.stack()[0][3])+ ' ' + cfg.utls.lineOfCode(), ' Warning')
return 'No'
cfg.uiUtls.updateBar(10)
rCrs = cfg.utls.getCrsGDAL(cfg.bndSetLst[0])
rEPSG = cfg.osrSCP.SpatialReference()
rEPSG.ImportFromWkt(rCrs)
if rEPSG is None:
if batch == 'No':
cfg.uiUtls.removeProgressBar()
cfg.mx.msgWar28()
# logger
cfg.utls.logCondition(str(__name__) + '-' + str(cfg.inspectSCP.stack()[0][3])+ ' ' + cfg.utls.lineOfCode(), ' Warning')
return 'No'
cfg.uiUtls.updateBar(20)
cfg.utls.makeDirectory(o)
eCrs = cfg.utls.getCrsGDAL(inputClassification)
EPSG = cfg.osrSCP.SpatialReference()
EPSG.ImportFromWkt(eCrs)
if EPSG.IsSame(rEPSG) != 1:
nD = cfg.utls.imageNoDataValue(inputClassification)
if nD is None:
nD = cfg.NoDataVal
#tPMD = cfg.utls.createTempRasterPath('tif')
#cfg.utls.GDALReprojectRaster(inputClassification, tPMD, 'GTiff', None, 'EPSG:' + str(rEPSG), '-ot Float32 -dstnodata ' + str(nD))
tPMD = cfg.utls.createTempRasterPath('vrt')
cfg.utls.createWarpedVrt(inputClassification, tPMD, str(rCrs))
cfg.mx.msg9()
if cfg.osSCP.path.isfile(tPMD):
inputClassification = tPMD
else:
if batch == 'No':
cfg.uiUtls.removeProgressBar()
cfg.mx.msgErr60()
# logger
cfg.utls.logCondition(str(__name__) + '-' + str(cfg.inspectSCP.stack()[0][3])+ ' ' + cfg.utls.lineOfCode(), " Warning")
return 'No'
if cfg.ui.cloud_buffer_checkBox.isChecked() is True:
size = cfg.ui.cloud_buffer_spinBox.value()
struct = cfg.utls.create3x3Window()
input = inputClassification
ndM = cfg.utls.imageNoDataValue(input)
dType = cfg.utls.getRasterDataTypeName(input)
for s in range(0, size):
tPMD = cfg.utls.createTempRasterPath('vrt')
# process calculation
oP = cfg.utls.multiProcessRaster(rasterPath = input, functionBand = 'No', functionRaster = cfg.utls.rasterDilation, outputRasterList = [tPMD], functionBandArgument = struct, functionVariable = valueList, progressMessage = cfg.QtWidgetsSCP.QApplication.translate('semiautomaticclassificationplugin', 'Dilation '), virtualRaster = 'Yes', compress = 'No', outputNoDataValue = ndM, dataType = dType, boundarySize = 3)
input = tPMD
if cfg.osSCP.path.isfile(tPMD):
inputClassification = tPMD
else:
if batch == 'No':
cfg.uiUtls.removeProgressBar()
cfg.mx.msgErr60()
# logger
cfg.utls.logCondition(str(__name__) + '-' + str(cfg.inspectSCP.stack()[0][3])+ ' ' + cfg.utls.lineOfCode(), " Error")
return 'No'
# No data value
NoDataVal = cfg.ui.nodata_spinBox_11.value()
nD = NoDataVal
outputName = cfg.ui.mask_output_name_lineEdit.text()
if len(outputName) > 0:
outputName = str(outputName.encode('ascii','replace'))[2:-1] + "_"
cfg.uiUtls.updateBar(40)
# create functions
bList = []
bandNumberList = []
bList.append(inputClassification)
bandNumberList.append(1)
outputList = []
argumentList = []
variableList = []
varList = []
varList.append('"im0"')
for x in range(1, len(cfg.bndSetLst) + 1):
varList.append('"im' + str(x)+ '"')
for x in range(1, len(cfg.bndSetLst) + 1):
if bndSetIf == 'Yes':
bList.append(cfg.bndSetLst[x-1])
bandNumberList.append(1)
else:
bList.append(cfg.bndSetLst[x])
bandNumberList.append(x)
rstrOut = o + '/' + outputName + cfg.utls.fileNameNoExt(cfg.bndSetLst[x-1]) + '.tif'
outputList.append(rstrOut)
# function
e = ''
end = ''
for c in valueList:
e = e + 'cfg.np.where("im0" == ' + str(c) + ', cfg.np.nan, '
end = end + ')'
e = e + '"im' + str(x) + '"' + end
argumentList.append(e)
variableList.append(varList)
# create virtual raster
vrtCheck = cfg.utls.createTempVirtualRaster(bList, bandNumberList, 'Yes', 'Yes', 0, 'No', 'Yes')
# open input with GDAL
rD = cfg.gdalSCP.Open(vrtCheck, cfg.gdalSCP.GA_ReadOnly)
# output rasters
cfg.utls.createRasterFromReference(rD, 1, outputList, cfg.NoDataVal, 'GTiff', cfg.rasterDataType, 0, None, compress = 'Yes', compressFormat = 'LZW')
rD = None
# process
o = cfg.utls.multiProcessRaster(rasterPath = vrtCheck, functionBand = 'No', functionRaster = cfg.utls.calculateRaster, outputRasterList = outputList, nodataValue = nD, functionBandArgument = argumentList, functionVariable = variableList, progressMessage = cfg.QtWidgetsSCP.QApplication.translate('semiautomaticclassificationplugin', 'Mask'), outputNoDataValue = nD, compress = cfg.rasterCompression, compressFormat = 'LZW', parallel = cfg.parallelRaster, skipSingleBand = 'Yes')
cfg.cnvs.setRenderFlag(False)
for rOut in outputList:
if cfg.osSCP.path.isfile(rOut):
# add raster to layers
cfg.utls.addRasterLayer(rOut)
cfg.cnvs.setRenderFlag(True)
cfg.uiUtls.updateBar(100)
if batch == 'No':
cfg.utls.finishSound()
cfg.utls.sendSMTPMessage(None, str(__name__))
cfg.uiUtls.removeProgressBar()
| gpl-3.0 |
tejasnikumbh/Algorithms | genericCode/printingAndParsingFunctions/parsingFunctions.py | 1 | 1552 | '''
Usage: TO read from files pass in the file stream as follows -
E.g -> f = open('filename.txt')
parseInt(f)
To read from standard input stream, use these functions as follows
E.g -> s = sys.stdin
parseInt(s)
'''
'''
Parses a grid from the passed in stream. Can be used to parse the
grid from standard input (by passing in sys.stdin) as well as from
a text file (by passing in f, where f = open('somename.txt'))
'''
def parseGrid(stream,r,c):
grid = [[] for x in range(r)]
for i in range(r):
grid[i] = list(stream.readline().rstrip())
return grid
'''
Reads in an integer from stream passed in as the parameter. Simple
parsing function that can read from files as well as standard input
'''
def parseInt(stream):
return int(stream.readline().rstrip())
'''
Reads in an array of integers from stream passed in as parameter.This
is a simple parsing function that can read from files as well as
standard input
'''
def parseIntArr(stream):
return [int(x) for x in stream.readline().rstrip().split()]
'''
Reads in a string from stream passed in as the parameter. Simple
parsing function that can read from files as well as standard input
'''
def parseString(stream):
return stream.readline().rstrip()
'''
Reads String array from stream passed in as parameter. Simple parse
function that can raed from files as well as standard input
'''
def parseStringArr(stream):
return [str(x) for x in stream.readline().rstrip().split()]
| bsd-2-clause |
insanemal/wolfram_alpha | wa.py | 1 | 1761 | #!/usr/bin/python2
import requests
import os
import getpass
import argparse
from os.path import expanduser
def main():
home = expanduser("~")
config = os.path.join(home,'.wa_api_key')
parser = argparse.ArgumentParser()
parser.add_argument('-s','--short', action="store_true",help="Return just the answer.")
parser.add_argument('-l','--long', action="store_false",help="Default. Return full text")
parser.add_argument("query", help="Wolfram Alpha query")
args = parser.parse_args()
if os.path.exists(config):
api_key = open(config,"r")
key = api_key.readline().strip()
api_key.close()
else:
print "API Key not found. Please enter API key"
key= getpass.getpass()
api_key = open(config,"w")
api_key.write(key)
api_key.close()
query = args.query
r = requests.get("http://api.wolframalpha.com/v2/query?input={}&appid={}&format=plaintext&output=json".format(query,key))
j = r.json()['queryresult']
if j['success']:
if args.short:
print_short(j)
else:
print_long(j)
else:
print 'Query failed. Check spelling/formating'
def print_long(j):
for field in j['pods']:
if 'title' in field:
print '\x1b[1;34;40m'+field['title']+'\x1b[0m'
for subpod in field['subpods']:
if 'plaintext' in subpod:
print subpod['plaintext']
def print_short(j):
for field in j['pods']:
if 'title' in field:
if field['title'] == 'Result':
for subpod in field['subpods']:
if 'plaintext' in subpod:
print subpod['plaintext']
if __name__ == '__main__':
main()
| mit |
bwrsandman/OpenUpgrade | openerp/http.py | 10 | 60930 | # -*- coding: utf-8 -*-
#----------------------------------------------------------
# OpenERP HTTP layer
#----------------------------------------------------------
import ast
import collections
import contextlib
import datetime
import errno
import functools
import getpass
import inspect
import logging
import mimetypes
import os
import pprint
import random
import re
import sys
import tempfile
import threading
import time
import traceback
import urlparse
import warnings
from zlib import adler32
import babel.core
import psycopg2
import simplejson
import werkzeug.contrib.sessions
import werkzeug.datastructures
import werkzeug.exceptions
import werkzeug.local
import werkzeug.routing
import werkzeug.wrappers
import werkzeug.wsgi
from werkzeug.wsgi import wrap_file
try:
import psutil
except ImportError:
psutil = None
import openerp
from openerp import SUPERUSER_ID
from openerp.service import security, model as service_model
from openerp.tools.func import lazy_property
from openerp.tools import ustr
_logger = logging.getLogger(__name__)
rpc_request = logging.getLogger(__name__ + '.rpc.request')
rpc_response = logging.getLogger(__name__ + '.rpc.response')
# 1 week cache for statics as advised by Google Page Speed
STATIC_CACHE = 60 * 60 * 24 * 7
#----------------------------------------------------------
# RequestHandler
#----------------------------------------------------------
# Thread local global request object
_request_stack = werkzeug.local.LocalStack()
request = _request_stack()
"""
A global proxy that always redirect to the current request object.
"""
def replace_request_password(args):
# password is always 3rd argument in a request, we replace it in RPC logs
# so it's easier to forward logs for diagnostics/debugging purposes...
if len(args) > 2:
args = list(args)
args[2] = '*'
return tuple(args)
# don't trigger debugger for those exceptions, they carry user-facing warnings
# and indications, they're not necessarily indicative of anything being
# *broken*
NO_POSTMORTEM = (openerp.osv.orm.except_orm,
openerp.exceptions.AccessError,
openerp.exceptions.AccessDenied,
openerp.exceptions.Warning,
openerp.exceptions.RedirectWarning)
def dispatch_rpc(service_name, method, params):
""" Handle a RPC call.
This is pure Python code, the actual marshalling (from/to XML-RPC) is done
in a upper layer.
"""
try:
rpc_request_flag = rpc_request.isEnabledFor(logging.DEBUG)
rpc_response_flag = rpc_response.isEnabledFor(logging.DEBUG)
if rpc_request_flag or rpc_response_flag:
start_time = time.time()
start_rss, start_vms = 0, 0
if psutil:
start_rss, start_vms = psutil.Process(os.getpid()).get_memory_info()
if rpc_request and rpc_response_flag:
openerp.netsvc.log(rpc_request, logging.DEBUG, '%s.%s' % (service_name, method), replace_request_password(params))
threading.current_thread().uid = None
threading.current_thread().dbname = None
if service_name == 'common':
dispatch = openerp.service.common.dispatch
elif service_name == 'db':
dispatch = openerp.service.db.dispatch
elif service_name == 'object':
dispatch = openerp.service.model.dispatch
elif service_name == 'report':
dispatch = openerp.service.report.dispatch
else:
dispatch = openerp.service.wsgi_server.rpc_handlers.get(service_name)
result = dispatch(method, params)
if rpc_request_flag or rpc_response_flag:
end_time = time.time()
end_rss, end_vms = 0, 0
if psutil:
end_rss, end_vms = psutil.Process(os.getpid()).get_memory_info()
logline = '%s.%s time:%.3fs mem: %sk -> %sk (diff: %sk)' % (service_name, method, end_time - start_time, start_vms / 1024, end_vms / 1024, (end_vms - start_vms)/1024)
if rpc_response_flag:
openerp.netsvc.log(rpc_response, logging.DEBUG, logline, result)
else:
openerp.netsvc.log(rpc_request, logging.DEBUG, logline, replace_request_password(params), depth=1)
return result
except NO_POSTMORTEM:
raise
except openerp.exceptions.DeferredException, e:
_logger.exception(openerp.tools.exception_to_unicode(e))
openerp.tools.debugger.post_mortem(openerp.tools.config, e.traceback)
raise
except Exception, e:
_logger.exception(openerp.tools.exception_to_unicode(e))
openerp.tools.debugger.post_mortem(openerp.tools.config, sys.exc_info())
raise
def local_redirect(path, query=None, keep_hash=False, forward_debug=True, code=303):
url = path
if not query:
query = {}
if forward_debug and request and request.debug:
query['debug'] = None
if query:
url += '?' + werkzeug.url_encode(query)
if keep_hash:
return redirect_with_hash(url, code)
else:
return werkzeug.utils.redirect(url, code)
def redirect_with_hash(url, code=303):
# Most IE and Safari versions decided not to preserve location.hash upon
# redirect. And even if IE10 pretends to support it, it still fails
# inexplicably in case of multiple redirects (and we do have some).
# See extensive test page at http://greenbytes.de/tech/tc/httpredirects/
if request.httprequest.user_agent.browser in ('firefox',):
return werkzeug.utils.redirect(url, code)
return "<html><head><script>window.location = '%s' + location.hash;</script></head></html>" % url
class WebRequest(object):
""" Parent class for all Odoo Web request types, mostly deals with
initialization and setup of the request object (the dispatching itself has
to be handled by the subclasses)
:param httprequest: a wrapped werkzeug Request object
:type httprequest: :class:`werkzeug.wrappers.BaseRequest`
.. attribute:: httprequest
the original :class:`werkzeug.wrappers.Request` object provided to the
request
.. attribute:: params
:class:`~collections.Mapping` of request parameters, not generally
useful as they're provided directly to the handler method as keyword
arguments
"""
def __init__(self, httprequest):
self.httprequest = httprequest
self.httpresponse = None
self.httpsession = httprequest.session
self.disable_db = False
self.uid = None
self.endpoint = None
self.auth_method = None
self._cr = None
# prevents transaction commit, use when you catch an exception during handling
self._failed = None
# set db/uid trackers - they're cleaned up at the WSGI
# dispatching phase in openerp.service.wsgi_server.application
if self.db:
threading.current_thread().dbname = self.db
if self.session.uid:
threading.current_thread().uid = self.session.uid
@lazy_property
def env(self):
"""
The :class:`~openerp.api.Environment` bound to current request.
Raises a :class:`RuntimeError` if the current requests is not bound
to a database.
"""
if not self.db:
return RuntimeError('request not bound to a database')
return openerp.api.Environment(self.cr, self.uid, self.context)
@lazy_property
def context(self):
"""
:class:`~collections.Mapping` of context values for the current
request
"""
return dict(self.session.context)
@lazy_property
def lang(self):
self.session._fix_lang(self.context)
return self.context["lang"]
@lazy_property
def session(self):
"""
a :class:`OpenERPSession` holding the HTTP session data for the
current http session
"""
return self.httprequest.session
@property
def cr(self):
"""
:class:`~openerp.sql_db.Cursor` initialized for the current method
call.
Accessing the cursor when the current request uses the ``none``
authentication will raise an exception.
"""
# can not be a lazy_property because manual rollback in _call_function
# if already set (?)
if not self.db:
return RuntimeError('request not bound to a database')
if not self._cr:
self._cr = self.registry.cursor()
return self._cr
def __enter__(self):
_request_stack.push(self)
return self
def __exit__(self, exc_type, exc_value, traceback):
_request_stack.pop()
if self._cr:
if exc_type is None and not self._failed:
self._cr.commit()
self._cr.close()
# just to be sure no one tries to re-use the request
self.disable_db = True
self.uid = None
def set_handler(self, endpoint, arguments, auth):
# is this needed ?
arguments = dict((k, v) for k, v in arguments.iteritems()
if not k.startswith("_ignored_"))
endpoint.arguments = arguments
self.endpoint = endpoint
self.auth_method = auth
def _handle_exception(self, exception):
"""Called within an except block to allow converting exceptions
to abitrary responses. Anything returned (except None) will
be used as response."""
self._failed = exception # prevent tx commit
if not isinstance(exception, NO_POSTMORTEM) \
and not isinstance(exception, werkzeug.exceptions.HTTPException):
openerp.tools.debugger.post_mortem(
openerp.tools.config, sys.exc_info())
raise
def _call_function(self, *args, **kwargs):
request = self
if self.endpoint.routing['type'] != self._request_type:
msg = "%s, %s: Function declared as capable of handling request of type '%s' but called with a request of type '%s'"
params = (self.endpoint.original, self.httprequest.path, self.endpoint.routing['type'], self._request_type)
_logger.error(msg, *params)
raise werkzeug.exceptions.BadRequest(msg % params)
kwargs.update(self.endpoint.arguments)
# Backward for 7.0
if self.endpoint.first_arg_is_req:
args = (request,) + args
# Correct exception handling and concurency retry
@service_model.check
def checked_call(___dbname, *a, **kw):
# The decorator can call us more than once if there is an database error. In this
# case, the request cursor is unusable. Rollback transaction to create a new one.
if self._cr:
self._cr.rollback()
return self.endpoint(*a, **kw)
if self.db:
return checked_call(self.db, *args, **kwargs)
return self.endpoint(*args, **kwargs)
@property
def debug(self):
""" Indicates whether the current request is in "debug" mode
"""
return 'debug' in self.httprequest.args
@contextlib.contextmanager
def registry_cr(self):
warnings.warn('please use request.registry and request.cr directly', DeprecationWarning)
yield (self.registry, self.cr)
@lazy_property
def session_id(self):
"""
opaque identifier for the :class:`OpenERPSession` instance of
the current request
.. deprecated:: 8.0
Use the ``sid`` attribute on :attr:`.session`
"""
return self.session.sid
@property
def registry(self):
"""
The registry to the database linked to this request. Can be ``None``
if the current request uses the ``none`` authentication.
.. deprecated:: 8.0
use :attr:`.env`
"""
return openerp.modules.registry.RegistryManager.get(self.db) if self.db else None
@property
def db(self):
"""
The database linked to this request. Can be ``None``
if the current request uses the ``none`` authentication.
"""
return self.session.db if not self.disable_db else None
@lazy_property
def httpsession(self):
""" HTTP session data
.. deprecated:: 8.0
Use :attr:`.session` instead.
"""
return self.session
def route(route=None, **kw):
"""
Decorator marking the decorated method as being a handler for
requests. The method must be part of a subclass of ``Controller``.
:param route: string or array. The route part that will determine which
http requests will match the decorated method. Can be a
single string or an array of strings. See werkzeug's routing
documentation for the format of route expression (
http://werkzeug.pocoo.org/docs/routing/ ).
:param type: The type of request, can be ``'http'`` or ``'json'``.
:param auth: The type of authentication method, can on of the following:
* ``user``: The user must be authenticated and the current request
will perform using the rights of the user.
* ``admin``: The user may not be authenticated and the current request
will perform using the admin user.
* ``none``: The method is always active, even if there is no
database. Mainly used by the framework and authentication
modules. There request code will not have any facilities to access
the database nor have any configuration indicating the current
database nor the current user.
:param methods: A sequence of http methods this route applies to. If not
specified, all methods are allowed.
:param cors: The Access-Control-Allow-Origin cors directive value.
"""
routing = kw.copy()
assert not 'type' in routing or routing['type'] in ("http", "json")
def decorator(f):
if route:
if isinstance(route, list):
routes = route
else:
routes = [route]
routing['routes'] = routes
@functools.wraps(f)
def response_wrap(*args, **kw):
response = f(*args, **kw)
if isinstance(response, Response) or f.routing_type == 'json':
return response
if isinstance(response, basestring):
return Response(response)
if isinstance(response, werkzeug.exceptions.HTTPException):
response = response.get_response()
if isinstance(response, werkzeug.wrappers.BaseResponse):
response = Response.force_type(response)
response.set_default()
return response
_logger.warn("<function %s.%s> returns an invalid response type for an http request" % (f.__module__, f.__name__))
return response
response_wrap.routing = routing
response_wrap.original_func = f
return response_wrap
return decorator
class JsonRequest(WebRequest):
""" Request handler for `JSON-RPC 2
<http://www.jsonrpc.org/specification>`_ over HTTP
* ``method`` is ignored
* ``params`` must be a JSON object (not an array) and is passed as keyword
arguments to the handler method
* the handler method's result is returned as JSON-RPC ``result`` and
wrapped in the `JSON-RPC Response
<http://www.jsonrpc.org/specification#response_object>`_
Sucessful request::
--> {"jsonrpc": "2.0",
"method": "call",
"params": {"context": {},
"arg1": "val1" },
"id": null}
<-- {"jsonrpc": "2.0",
"result": { "res1": "val1" },
"id": null}
Request producing a error::
--> {"jsonrpc": "2.0",
"method": "call",
"params": {"context": {},
"arg1": "val1" },
"id": null}
<-- {"jsonrpc": "2.0",
"error": {"code": 1,
"message": "End user error message.",
"data": {"code": "codestring",
"debug": "traceback" } },
"id": null}
"""
_request_type = "json"
def __init__(self, *args):
super(JsonRequest, self).__init__(*args)
self.jsonp_handler = None
args = self.httprequest.args
jsonp = args.get('jsonp')
self.jsonp = jsonp
request = None
request_id = args.get('id')
if jsonp and self.httprequest.method == 'POST':
# jsonp 2 steps step1 POST: save call
def handler():
self.session['jsonp_request_%s' % (request_id,)] = self.httprequest.form['r']
self.session.modified = True
headers=[('Content-Type', 'text/plain; charset=utf-8')]
r = werkzeug.wrappers.Response(request_id, headers=headers)
return r
self.jsonp_handler = handler
return
elif jsonp and args.get('r'):
# jsonp method GET
request = args.get('r')
elif jsonp and request_id:
# jsonp 2 steps step2 GET: run and return result
request = self.session.pop('jsonp_request_%s' % (request_id,), '{}')
else:
# regular jsonrpc2
request = self.httprequest.stream.read()
# Read POST content or POST Form Data named "request"
try:
self.jsonrequest = simplejson.loads(request)
except simplejson.JSONDecodeError:
msg = 'Invalid JSON data: %r' % (request,)
_logger.error('%s: %s', self.httprequest.path, msg)
raise werkzeug.exceptions.BadRequest(msg)
self.params = dict(self.jsonrequest.get("params", {}))
self.context = self.params.pop('context', dict(self.session.context))
def _json_response(self, result=None, error=None):
response = {
'jsonrpc': '2.0',
'id': self.jsonrequest.get('id')
}
if error is not None:
response['error'] = error
if result is not None:
response['result'] = result
if self.jsonp:
# If we use jsonp, that's mean we are called from another host
# Some browser (IE and Safari) do no allow third party cookies
# We need then to manage http sessions manually.
response['session_id'] = self.session_id
mime = 'application/javascript'
body = "%s(%s);" % (self.jsonp, simplejson.dumps(response),)
else:
mime = 'application/json'
body = simplejson.dumps(response)
return Response(
body, headers=[('Content-Type', mime),
('Content-Length', len(body))])
def _handle_exception(self, exception):
"""Called within an except block to allow converting exceptions
to arbitrary responses. Anything returned (except None) will
be used as response."""
try:
return super(JsonRequest, self)._handle_exception(exception)
except Exception:
if not isinstance(exception, (openerp.exceptions.Warning, SessionExpiredException)):
_logger.exception("Exception during JSON request handling.")
error = {
'code': 200,
'message': "Odoo Server Error",
'data': serialize_exception(exception)
}
if isinstance(exception, AuthenticationError):
error['code'] = 100
error['message'] = "Odoo Session Invalid"
if isinstance(exception, SessionExpiredException):
error['code'] = 100
error['message'] = "Odoo Session Expired"
return self._json_response(error=error)
def dispatch(self):
if self.jsonp_handler:
return self.jsonp_handler()
try:
rpc_request_flag = rpc_request.isEnabledFor(logging.DEBUG)
rpc_response_flag = rpc_response.isEnabledFor(logging.DEBUG)
if rpc_request_flag or rpc_response_flag:
endpoint = self.endpoint.method.__name__
model = self.params.get('model')
method = self.params.get('method')
args = self.params.get('args', [])
start_time = time.time()
_, start_vms = 0, 0
if psutil:
_, start_vms = psutil.Process(os.getpid()).get_memory_info()
if rpc_request and rpc_response_flag:
rpc_request.debug('%s: %s %s, %s',
endpoint, model, method, pprint.pformat(args))
result = self._call_function(**self.params)
if rpc_request_flag or rpc_response_flag:
end_time = time.time()
_, end_vms = 0, 0
if psutil:
_, end_vms = psutil.Process(os.getpid()).get_memory_info()
logline = '%s: %s %s: time:%.3fs mem: %sk -> %sk (diff: %sk)' % (
endpoint, model, method, end_time - start_time, start_vms / 1024, end_vms / 1024, (end_vms - start_vms)/1024)
if rpc_response_flag:
rpc_response.debug('%s, %s', logline, pprint.pformat(result))
else:
rpc_request.debug(logline)
return self._json_response(result)
except Exception, e:
return self._handle_exception(e)
def serialize_exception(e):
tmp = {
"name": type(e).__module__ + "." + type(e).__name__ if type(e).__module__ else type(e).__name__,
"debug": traceback.format_exc(),
"message": ustr(e),
"arguments": to_jsonable(e.args),
}
if isinstance(e, openerp.osv.osv.except_osv):
tmp["exception_type"] = "except_osv"
elif isinstance(e, openerp.exceptions.Warning):
tmp["exception_type"] = "warning"
elif isinstance(e, openerp.exceptions.AccessError):
tmp["exception_type"] = "access_error"
elif isinstance(e, openerp.exceptions.AccessDenied):
tmp["exception_type"] = "access_denied"
return tmp
def to_jsonable(o):
if isinstance(o, str) or isinstance(o,unicode) or isinstance(o, int) or isinstance(o, long) \
or isinstance(o, bool) or o is None or isinstance(o, float):
return o
if isinstance(o, list) or isinstance(o, tuple):
return [to_jsonable(x) for x in o]
if isinstance(o, dict):
tmp = {}
for k, v in o.items():
tmp[u"%s" % k] = to_jsonable(v)
return tmp
return ustr(o)
def jsonrequest(f):
"""
.. deprecated:: 8.0
Use the :func:`~openerp.http.route` decorator instead.
"""
base = f.__name__.lstrip('/')
if f.__name__ == "index":
base = ""
return route([base, base + "/<path:_ignored_path>"], type="json", auth="user", combine=True)(f)
class HttpRequest(WebRequest):
""" Handler for the ``http`` request type.
matched routing parameters, query string parameters, form_ parameters
and files are passed to the handler method as keyword arguments.
In case of name conflict, routing parameters have priority.
The handler method's result can be:
* a falsy value, in which case the HTTP response will be an
`HTTP 204`_ (No Content)
* a werkzeug Response object, which is returned as-is
* a ``str`` or ``unicode``, will be wrapped in a Response object and
interpreted as HTML
.. _form: http://www.w3.org/TR/html401/interact/forms.html#h-17.13.4.2
.. _HTTP 204: http://tools.ietf.org/html/rfc7231#section-6.3.5
"""
_request_type = "http"
def __init__(self, *args):
super(HttpRequest, self).__init__(*args)
params = self.httprequest.args.to_dict()
params.update(self.httprequest.form.to_dict())
params.update(self.httprequest.files.to_dict())
params.pop('session_id', None)
self.params = params
def _handle_exception(self, exception):
"""Called within an except block to allow converting exceptions
to abitrary responses. Anything returned (except None) will
be used as response."""
try:
return super(HttpRequest, self)._handle_exception(exception)
except SessionExpiredException:
if not request.params.get('noredirect'):
query = werkzeug.urls.url_encode({
'redirect': request.httprequest.url,
})
return werkzeug.utils.redirect('/web/login?%s' % query)
except werkzeug.exceptions.HTTPException, e:
return e
def dispatch(self):
if request.httprequest.method == 'OPTIONS' and request.endpoint and request.endpoint.routing.get('cors'):
headers = {
'Access-Control-Max-Age': 60 * 60 * 24,
'Access-Control-Allow-Headers': 'Origin, X-Requested-With, Content-Type, Accept'
}
return Response(status=200, headers=headers)
r = self._call_function(**self.params)
if not r:
r = Response(status=204) # no content
return r
def make_response(self, data, headers=None, cookies=None):
""" Helper for non-HTML responses, or HTML responses with custom
response headers or cookies.
While handlers can just return the HTML markup of a page they want to
send as a string if non-HTML data is returned they need to create a
complete response object, or the returned data will not be correctly
interpreted by the clients.
:param basestring data: response body
:param headers: HTTP headers to set on the response
:type headers: ``[(name, value)]``
:param collections.Mapping cookies: cookies to set on the client
"""
response = Response(data, headers=headers)
if cookies:
for k, v in cookies.iteritems():
response.set_cookie(k, v)
return response
def render(self, template, qcontext=None, lazy=True, **kw):
""" Lazy render of a QWeb template.
The actual rendering of the given template will occur at then end of
the dispatching. Meanwhile, the template and/or qcontext can be
altered or even replaced by a static response.
:param basestring template: template to render
:param dict qcontext: Rendering context to use
:param bool lazy: whether the template rendering should be deferred
until the last possible moment
:param kw: forwarded to werkzeug's Response object
"""
response = Response(template=template, qcontext=qcontext, **kw)
if not lazy:
return response.render()
return response
def not_found(self, description=None):
""" Shortcut for a `HTTP 404
<http://tools.ietf.org/html/rfc7231#section-6.5.4>`_ (Not Found)
response
"""
return werkzeug.exceptions.NotFound(description)
def httprequest(f):
"""
.. deprecated:: 8.0
Use the :func:`~openerp.http.route` decorator instead.
"""
base = f.__name__.lstrip('/')
if f.__name__ == "index":
base = ""
return route([base, base + "/<path:_ignored_path>"], type="http", auth="user", combine=True)(f)
#----------------------------------------------------------
# Controller and route registration
#----------------------------------------------------------
addons_module = {}
addons_manifest = {}
controllers_per_module = collections.defaultdict(list)
class ControllerType(type):
def __init__(cls, name, bases, attrs):
super(ControllerType, cls).__init__(name, bases, attrs)
# flag old-style methods with req as first argument
for k, v in attrs.items():
if inspect.isfunction(v) and hasattr(v, 'original_func'):
# Set routing type on original functions
routing_type = v.routing.get('type')
parent = [claz for claz in bases if isinstance(claz, ControllerType) and hasattr(claz, k)]
parent_routing_type = getattr(parent[0], k).original_func.routing_type if parent else routing_type or 'http'
if routing_type is not None and routing_type is not parent_routing_type:
routing_type = parent_routing_type
_logger.warn("Subclass re-defines <function %s.%s.%s> with different type than original."
" Will use original type: %r" % (cls.__module__, cls.__name__, k, parent_routing_type))
v.original_func.routing_type = routing_type or parent_routing_type
spec = inspect.getargspec(v.original_func)
first_arg = spec.args[1] if len(spec.args) >= 2 else None
if first_arg in ["req", "request"]:
v._first_arg_is_req = True
# store the controller in the controllers list
name_class = ("%s.%s" % (cls.__module__, cls.__name__), cls)
class_path = name_class[0].split(".")
if not class_path[:2] == ["openerp", "addons"]:
module = ""
else:
# we want to know all modules that have controllers
module = class_path[2]
# but we only store controllers directly inheriting from Controller
if not "Controller" in globals() or not Controller in bases:
return
controllers_per_module[module].append(name_class)
class Controller(object):
__metaclass__ = ControllerType
class EndPoint(object):
def __init__(self, method, routing):
self.method = method
self.original = getattr(method, 'original_func', method)
self.routing = routing
self.arguments = {}
@property
def first_arg_is_req(self):
# Backward for 7.0
return getattr(self.method, '_first_arg_is_req', False)
def __call__(self, *args, **kw):
return self.method(*args, **kw)
def routing_map(modules, nodb_only, converters=None):
routing_map = werkzeug.routing.Map(strict_slashes=False, converters=converters)
def get_subclasses(klass):
def valid(c):
return c.__module__.startswith('openerp.addons.') and c.__module__.split(".")[2] in modules
subclasses = klass.__subclasses__()
result = []
for subclass in subclasses:
if valid(subclass):
result.extend(get_subclasses(subclass))
if not result and valid(klass):
result = [klass]
return result
uniq = lambda it: collections.OrderedDict((id(x), x) for x in it).values()
for module in modules:
if module not in controllers_per_module:
continue
for _, cls in controllers_per_module[module]:
subclasses = uniq(c for c in get_subclasses(cls) if c is not cls)
if subclasses:
name = "%s (extended by %s)" % (cls.__name__, ', '.join(sub.__name__ for sub in subclasses))
cls = type(name, tuple(reversed(subclasses)), {})
o = cls()
members = inspect.getmembers(o, inspect.ismethod)
for _, mv in members:
if hasattr(mv, 'routing'):
routing = dict(type='http', auth='user', methods=None, routes=None)
methods_done = list()
# update routing attributes from subclasses(auth, methods...)
for claz in reversed(mv.im_class.mro()):
fn = getattr(claz, mv.func_name, None)
if fn and hasattr(fn, 'routing') and fn not in methods_done:
methods_done.append(fn)
routing.update(fn.routing)
if not nodb_only or routing['auth'] == "none":
assert routing['routes'], "Method %r has not route defined" % mv
endpoint = EndPoint(mv, routing)
for url in routing['routes']:
if routing.get("combine", False):
# deprecated v7 declaration
url = o._cp_path.rstrip('/') + '/' + url.lstrip('/')
if url.endswith("/") and len(url) > 1:
url = url[: -1]
xtra_keys = 'defaults subdomain build_only strict_slashes redirect_to alias host'.split()
kw = {k: routing[k] for k in xtra_keys if k in routing}
routing_map.add(werkzeug.routing.Rule(url, endpoint=endpoint, methods=routing['methods'], **kw))
return routing_map
#----------------------------------------------------------
# HTTP Sessions
#----------------------------------------------------------
class AuthenticationError(Exception):
pass
class SessionExpiredException(Exception):
pass
class Service(object):
"""
.. deprecated:: 8.0
Use :func:`dispatch_rpc` instead.
"""
def __init__(self, session, service_name):
self.session = session
self.service_name = service_name
def __getattr__(self, method):
def proxy_method(*args):
result = dispatch_rpc(self.service_name, method, args)
return result
return proxy_method
class Model(object):
"""
.. deprecated:: 8.0
Use the registry and cursor in :data:`request` instead.
"""
def __init__(self, session, model):
self.session = session
self.model = model
self.proxy = self.session.proxy('object')
def __getattr__(self, method):
self.session.assert_valid()
def proxy(*args, **kw):
# Can't provide any retro-compatibility for this case, so we check it and raise an Exception
# to tell the programmer to adapt his code
if not request.db or not request.uid or self.session.db != request.db \
or self.session.uid != request.uid:
raise Exception("Trying to use Model with badly configured database or user.")
if method.startswith('_'):
raise Exception("Access denied")
mod = request.registry[self.model]
meth = getattr(mod, method)
# make sure to instantiate an environment
cr = request.env.cr
result = meth(cr, request.uid, *args, **kw)
# reorder read
if method == "read":
if isinstance(result, list) and len(result) > 0 and "id" in result[0]:
index = {}
for r in result:
index[r['id']] = r
result = [index[x] for x in args[0] if x in index]
return result
return proxy
class OpenERPSession(werkzeug.contrib.sessions.Session):
def __init__(self, *args, **kwargs):
self.inited = False
self.modified = False
self.rotate = False
super(OpenERPSession, self).__init__(*args, **kwargs)
self.inited = True
self._default_values()
self.modified = False
def __getattr__(self, attr):
return self.get(attr, None)
def __setattr__(self, k, v):
if getattr(self, "inited", False):
try:
object.__getattribute__(self, k)
except:
return self.__setitem__(k, v)
object.__setattr__(self, k, v)
def authenticate(self, db, login=None, password=None, uid=None):
"""
Authenticate the current user with the given db, login and
password. If successful, store the authentication parameters in the
current session and request.
:param uid: If not None, that user id will be used instead the login
to authenticate the user.
"""
if uid is None:
wsgienv = request.httprequest.environ
env = dict(
base_location=request.httprequest.url_root.rstrip('/'),
HTTP_HOST=wsgienv['HTTP_HOST'],
REMOTE_ADDR=wsgienv['REMOTE_ADDR'],
)
uid = dispatch_rpc('common', 'authenticate', [db, login, password, env])
else:
security.check(db, uid, password)
self.rotate = True
self.db = db
self.uid = uid
self.login = login
self.password = password
request.uid = uid
request.disable_db = False
if uid: self.get_context()
return uid
def check_security(self):
"""
Check the current authentication parameters to know if those are still
valid. This method should be called at each request. If the
authentication fails, a :exc:`SessionExpiredException` is raised.
"""
if not self.db or not self.uid:
raise SessionExpiredException("Session expired")
security.check(self.db, self.uid, self.password)
def logout(self, keep_db=False):
for k in self.keys():
if not (keep_db and k == 'db'):
del self[k]
self._default_values()
self.rotate = True
def _default_values(self):
self.setdefault("db", None)
self.setdefault("uid", None)
self.setdefault("login", None)
self.setdefault("password", None)
self.setdefault("context", {})
def get_context(self):
"""
Re-initializes the current user's session context (based on his
preferences) by calling res.users.get_context() with the old context.
:returns: the new context
"""
assert self.uid, "The user needs to be logged-in to initialize his context"
self.context = request.registry.get('res.users').context_get(request.cr, request.uid) or {}
self.context['uid'] = self.uid
self._fix_lang(self.context)
return self.context
def _fix_lang(self, context):
""" OpenERP provides languages which may not make sense and/or may not
be understood by the web client's libraries.
Fix those here.
:param dict context: context to fix
"""
lang = context.get('lang')
# inane OpenERP locale
if lang == 'ar_AR':
lang = 'ar'
# lang to lang_REGION (datejs only handles lang_REGION, no bare langs)
if lang in babel.core.LOCALE_ALIASES:
lang = babel.core.LOCALE_ALIASES[lang]
context['lang'] = lang or 'en_US'
# Deprecated to be removed in 9
"""
Damn properties for retro-compatibility. All of that is deprecated,
all of that.
"""
@property
def _db(self):
return self.db
@_db.setter
def _db(self, value):
self.db = value
@property
def _uid(self):
return self.uid
@_uid.setter
def _uid(self, value):
self.uid = value
@property
def _login(self):
return self.login
@_login.setter
def _login(self, value):
self.login = value
@property
def _password(self):
return self.password
@_password.setter
def _password(self, value):
self.password = value
def send(self, service_name, method, *args):
"""
.. deprecated:: 8.0
Use :func:`dispatch_rpc` instead.
"""
return dispatch_rpc(service_name, method, args)
def proxy(self, service):
"""
.. deprecated:: 8.0
Use :func:`dispatch_rpc` instead.
"""
return Service(self, service)
def assert_valid(self, force=False):
"""
.. deprecated:: 8.0
Use :meth:`check_security` instead.
Ensures this session is valid (logged into the openerp server)
"""
if self.uid and not force:
return
# TODO use authenticate instead of login
self.uid = self.proxy("common").login(self.db, self.login, self.password)
if not self.uid:
raise AuthenticationError("Authentication failure")
def ensure_valid(self):
"""
.. deprecated:: 8.0
Use :meth:`check_security` instead.
"""
if self.uid:
try:
self.assert_valid(True)
except Exception:
self.uid = None
def execute(self, model, func, *l, **d):
"""
.. deprecated:: 8.0
Use the registry and cursor in :data:`request` instead.
"""
model = self.model(model)
r = getattr(model, func)(*l, **d)
return r
def exec_workflow(self, model, id, signal):
"""
.. deprecated:: 8.0
Use the registry and cursor in :data:`request` instead.
"""
self.assert_valid()
r = self.proxy('object').exec_workflow(self.db, self.uid, self.password, model, signal, id)
return r
def model(self, model):
"""
.. deprecated:: 8.0
Use the registry and cursor in :data:`request` instead.
Get an RPC proxy for the object ``model``, bound to this session.
:param model: an OpenERP model name
:type model: str
:rtype: a model object
"""
if not self.db:
raise SessionExpiredException("Session expired")
return Model(self, model)
def save_action(self, action):
"""
This method store an action object in the session and returns an integer
identifying that action. The method get_action() can be used to get
back the action.
:param the_action: The action to save in the session.
:type the_action: anything
:return: A key identifying the saved action.
:rtype: integer
"""
saved_actions = self.setdefault('saved_actions', {"next": 1, "actions": {}})
# we don't allow more than 10 stored actions
if len(saved_actions["actions"]) >= 10:
del saved_actions["actions"][min(saved_actions["actions"])]
key = saved_actions["next"]
saved_actions["actions"][key] = action
saved_actions["next"] = key + 1
self.modified = True
return key
def get_action(self, key):
"""
Gets back a previously saved action. This method can return None if the action
was saved since too much time (this case should be handled in a smart way).
:param key: The key given by save_action()
:type key: integer
:return: The saved action or None.
:rtype: anything
"""
saved_actions = self.get('saved_actions', {})
return saved_actions.get("actions", {}).get(key)
def session_gc(session_store):
if random.random() < 0.001:
# we keep session one week
last_week = time.time() - 60*60*24*7
for fname in os.listdir(session_store.path):
path = os.path.join(session_store.path, fname)
try:
if os.path.getmtime(path) < last_week:
os.unlink(path)
except OSError:
pass
#----------------------------------------------------------
# WSGI Layer
#----------------------------------------------------------
# Add potentially missing (older ubuntu) font mime types
mimetypes.add_type('application/font-woff', '.woff')
mimetypes.add_type('application/vnd.ms-fontobject', '.eot')
mimetypes.add_type('application/x-font-ttf', '.ttf')
class Response(werkzeug.wrappers.Response):
""" Response object passed through controller route chain.
In addition to the :class:`werkzeug.wrappers.Response` parameters, this
class's constructor can take the following additional parameters
for QWeb Lazy Rendering.
:param basestring template: template to render
:param dict qcontext: Rendering context to use
:param int uid: User id to use for the ir.ui.view render call,
``None`` to use the request's user (the default)
these attributes are available as parameters on the Response object and
can be altered at any time before rendering
Also exposes all the attributes and methods of
:class:`werkzeug.wrappers.Response`.
"""
default_mimetype = 'text/html'
def __init__(self, *args, **kw):
template = kw.pop('template', None)
qcontext = kw.pop('qcontext', None)
uid = kw.pop('uid', None)
super(Response, self).__init__(*args, **kw)
self.set_default(template, qcontext, uid)
def set_default(self, template=None, qcontext=None, uid=None):
self.template = template
self.qcontext = qcontext or dict()
self.uid = uid
# Support for Cross-Origin Resource Sharing
if request.endpoint and 'cors' in request.endpoint.routing:
self.headers.set('Access-Control-Allow-Origin', request.endpoint.routing['cors'])
methods = 'GET, POST'
if request.endpoint.routing['type'] == 'json':
methods = 'POST'
elif request.endpoint.routing.get('methods'):
methods = ', '.join(request.endpoint.routing['methods'])
self.headers.set('Access-Control-Allow-Methods', methods)
@property
def is_qweb(self):
return self.template is not None
def render(self):
""" Renders the Response's template, returns the result
"""
view_obj = request.registry["ir.ui.view"]
uid = self.uid or request.uid or openerp.SUPERUSER_ID
return view_obj.render(
request.cr, uid, self.template, self.qcontext,
context=request.context)
def flatten(self):
""" Forces the rendering of the response's template, sets the result
as response body and unsets :attr:`.template`
"""
self.response.append(self.render())
self.template = None
class DisableCacheMiddleware(object):
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
def start_wrapped(status, headers):
referer = environ.get('HTTP_REFERER', '')
parsed = urlparse.urlparse(referer)
debug = parsed.query.count('debug') >= 1
new_headers = []
unwanted_keys = ['Last-Modified']
if debug:
new_headers = [('Cache-Control', 'no-cache')]
unwanted_keys += ['Expires', 'Etag', 'Cache-Control']
for k, v in headers:
if k not in unwanted_keys:
new_headers.append((k, v))
start_response(status, new_headers)
return self.app(environ, start_wrapped)
class Root(object):
"""Root WSGI application for the OpenERP Web Client.
"""
def __init__(self):
self._loaded = False
@lazy_property
def session_store(self):
# Setup http sessions
path = openerp.tools.config.session_dir
_logger.debug('HTTP sessions stored in: %s', path)
return werkzeug.contrib.sessions.FilesystemSessionStore(path, session_class=OpenERPSession)
@lazy_property
def nodb_routing_map(self):
_logger.info("Generating nondb routing")
return routing_map([''] + openerp.conf.server_wide_modules, True)
def __call__(self, environ, start_response):
""" Handle a WSGI request
"""
if not self._loaded:
self._loaded = True
self.load_addons()
return self.dispatch(environ, start_response)
def load_addons(self):
""" Load all addons from addons path containing static files and
controllers and configure them. """
# TODO should we move this to ir.http so that only configured modules are served ?
statics = {}
for addons_path in openerp.modules.module.ad_paths:
for module in sorted(os.listdir(str(addons_path))):
if module not in addons_module:
manifest_path = os.path.join(addons_path, module, '__openerp__.py')
path_static = os.path.join(addons_path, module, 'static')
if os.path.isfile(manifest_path) and os.path.isdir(path_static):
manifest = ast.literal_eval(open(manifest_path).read())
manifest['addons_path'] = addons_path
_logger.debug("Loading %s", module)
if 'openerp.addons' in sys.modules:
m = __import__('openerp.addons.' + module)
else:
m = None
addons_module[module] = m
addons_manifest[module] = manifest
statics['/%s/static' % module] = path_static
if statics:
_logger.info("HTTP Configuring static files")
app = werkzeug.wsgi.SharedDataMiddleware(self.dispatch, statics, cache_timeout=STATIC_CACHE)
self.dispatch = DisableCacheMiddleware(app)
def setup_session(self, httprequest):
# recover or create session
session_gc(self.session_store)
sid = httprequest.args.get('session_id')
explicit_session = True
if not sid:
sid = httprequest.headers.get("X-Openerp-Session-Id")
if not sid:
sid = httprequest.cookies.get('session_id')
explicit_session = False
if sid is None:
httprequest.session = self.session_store.new()
else:
httprequest.session = self.session_store.get(sid)
return explicit_session
def setup_db(self, httprequest):
db = httprequest.session.db
# Check if session.db is legit
if db:
if db not in db_filter([db], httprequest=httprequest):
_logger.warn("Logged into database '%s', but dbfilter "
"rejects it; logging session out.", db)
httprequest.session.logout()
db = None
if not db:
httprequest.session.db = db_monodb(httprequest)
def setup_lang(self, httprequest):
if not "lang" in httprequest.session.context:
lang = httprequest.accept_languages.best or "en_US"
lang = babel.core.LOCALE_ALIASES.get(lang, lang).replace('-', '_')
httprequest.session.context["lang"] = lang
def get_request(self, httprequest):
# deduce type of request
if httprequest.args.get('jsonp'):
return JsonRequest(httprequest)
if httprequest.mimetype in ("application/json", "application/json-rpc"):
return JsonRequest(httprequest)
else:
return HttpRequest(httprequest)
def get_response(self, httprequest, result, explicit_session):
if isinstance(result, Response) and result.is_qweb:
try:
result.flatten()
except(Exception), e:
if request.db:
result = request.registry['ir.http']._handle_exception(e)
else:
raise
if isinstance(result, basestring):
response = Response(result, mimetype='text/html')
else:
response = result
if httprequest.session.should_save:
if httprequest.session.rotate:
self.session_store.delete(httprequest.session)
httprequest.session.sid = self.session_store.generate_key()
httprequest.session.modified = True
self.session_store.save(httprequest.session)
# We must not set the cookie if the session id was specified using a http header or a GET parameter.
# There are two reasons to this:
# - When using one of those two means we consider that we are overriding the cookie, which means creating a new
# session on top of an already existing session and we don't want to create a mess with the 'normal' session
# (the one using the cookie). That is a special feature of the Session Javascript class.
# - It could allow session fixation attacks.
if not explicit_session and hasattr(response, 'set_cookie'):
response.set_cookie('session_id', httprequest.session.sid, max_age=90 * 24 * 60 * 60)
return response
def dispatch(self, environ, start_response):
"""
Performs the actual WSGI dispatching for the application.
"""
try:
httprequest = werkzeug.wrappers.Request(environ)
httprequest.app = self
explicit_session = self.setup_session(httprequest)
self.setup_db(httprequest)
self.setup_lang(httprequest)
request = self.get_request(httprequest)
def _dispatch_nodb():
try:
func, arguments = self.nodb_routing_map.bind_to_environ(request.httprequest.environ).match()
except werkzeug.exceptions.HTTPException, e:
return request._handle_exception(e)
request.set_handler(func, arguments, "none")
result = request.dispatch()
return result
with request:
db = request.session.db
if db:
openerp.modules.registry.RegistryManager.check_registry_signaling(db)
try:
with openerp.tools.mute_logger('openerp.sql_db'):
ir_http = request.registry['ir.http']
except (AttributeError, psycopg2.OperationalError):
# psycopg2 error or attribute error while constructing
# the registry. That means the database probably does
# not exists anymore or the code doesnt match the db.
# Log the user out and fall back to nodb
request.session.logout()
result = _dispatch_nodb()
else:
result = ir_http._dispatch()
openerp.modules.registry.RegistryManager.signal_caches_change(db)
else:
result = _dispatch_nodb()
response = self.get_response(httprequest, result, explicit_session)
return response(environ, start_response)
except werkzeug.exceptions.HTTPException, e:
return e(environ, start_response)
def get_db_router(self, db):
if not db:
return self.nodb_routing_map
return request.registry['ir.http'].routing_map()
def db_list(force=False, httprequest=None):
dbs = dispatch_rpc("db", "list", [force])
return db_filter(dbs, httprequest=httprequest)
def db_filter(dbs, httprequest=None):
httprequest = httprequest or request.httprequest
h = httprequest.environ.get('HTTP_HOST', '').split(':')[0]
d, _, r = h.partition('.')
if d == "www" and r:
d = r.partition('.')[0]
r = openerp.tools.config['dbfilter'].replace('%h', h).replace('%d', d)
dbs = [i for i in dbs if re.match(r, i)]
return dbs
def db_monodb(httprequest=None):
"""
Magic function to find the current database.
Implementation details:
* Magic
* More magic
Returns ``None`` if the magic is not magic enough.
"""
httprequest = httprequest or request.httprequest
dbs = db_list(True, httprequest)
# try the db already in the session
db_session = httprequest.session.db
if db_session in dbs:
return db_session
# if there is only one possible db, we take that one
if len(dbs) == 1:
return dbs[0]
return None
def send_file(filepath_or_fp, mimetype=None, as_attachment=False, filename=None, mtime=None,
add_etags=True, cache_timeout=STATIC_CACHE, conditional=True):
"""This is a modified version of Flask's send_file()
Sends the contents of a file to the client. This will use the
most efficient method available and configured. By default it will
try to use the WSGI server's file_wrapper support.
By default it will try to guess the mimetype for you, but you can
also explicitly provide one. For extra security you probably want
to send certain files as attachment (HTML for instance). The mimetype
guessing requires a `filename` or an `attachment_filename` to be
provided.
Please never pass filenames to this function from user sources without
checking them first.
:param filepath_or_fp: the filename of the file to send.
Alternatively a file object might be provided
in which case `X-Sendfile` might not work and
fall back to the traditional method. Make sure
that the file pointer is positioned at the start
of data to send before calling :func:`send_file`.
:param mimetype: the mimetype of the file if provided, otherwise
auto detection happens.
:param as_attachment: set to `True` if you want to send this file with
a ``Content-Disposition: attachment`` header.
:param filename: the filename for the attachment if it differs from the file's filename or
if using file object without 'name' attribute (eg: E-tags with StringIO).
:param mtime: last modification time to use for contitional response.
:param add_etags: set to `False` to disable attaching of etags.
:param conditional: set to `False` to disable conditional responses.
:param cache_timeout: the timeout in seconds for the headers.
"""
if isinstance(filepath_or_fp, (str, unicode)):
if not filename:
filename = os.path.basename(filepath_or_fp)
file = open(filepath_or_fp, 'rb')
if not mtime:
mtime = os.path.getmtime(filepath_or_fp)
else:
file = filepath_or_fp
if not filename:
filename = getattr(file, 'name', None)
file.seek(0, 2)
size = file.tell()
file.seek(0)
if mimetype is None and filename:
mimetype = mimetypes.guess_type(filename)[0]
if mimetype is None:
mimetype = 'application/octet-stream'
headers = werkzeug.datastructures.Headers()
if as_attachment:
if filename is None:
raise TypeError('filename unavailable, required for sending as attachment')
headers.add('Content-Disposition', 'attachment', filename=filename)
headers['Content-Length'] = size
data = wrap_file(request.httprequest.environ, file)
rv = Response(data, mimetype=mimetype, headers=headers,
direct_passthrough=True)
if isinstance(mtime, str):
try:
server_format = openerp.tools.misc.DEFAULT_SERVER_DATETIME_FORMAT
mtime = datetime.datetime.strptime(mtime.split('.')[0], server_format)
except Exception:
mtime = None
if mtime is not None:
rv.last_modified = mtime
rv.cache_control.public = True
if cache_timeout:
rv.cache_control.max_age = cache_timeout
rv.expires = int(time.time() + cache_timeout)
if add_etags and filename and mtime:
rv.set_etag('odoo-%s-%s-%s' % (
mtime,
size,
adler32(
filename.encode('utf-8') if isinstance(filename, unicode)
else filename
) & 0xffffffff
))
if conditional:
rv = rv.make_conditional(request.httprequest)
# make sure we don't send x-sendfile for servers that
# ignore the 304 status code for x-sendfile.
if rv.status_code == 304:
rv.headers.pop('x-sendfile', None)
return rv
#----------------------------------------------------------
# RPC controller
#----------------------------------------------------------
class CommonController(Controller):
@route('/jsonrpc', type='json', auth="none")
def jsonrpc(self, service, method, args):
""" Method used by client APIs to contact OpenERP. """
return dispatch_rpc(service, method, args)
@route('/gen_session_id', type='json', auth="none")
def gen_session_id(self):
nsession = root.session_store.new()
return nsession.sid
# register main wsgi handler
root = Root()
openerp.service.wsgi_server.register_wsgi_handler(root)
# vim:et:ts=4:sw=4:
| agpl-3.0 |
tomtor/QGIS | tests/src/python/test_qgslayermetadata.py | 33 | 18809 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsLayerMetadata.
Run with: ctest -V -R PyQgsLayerMetadata
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Nyall Dawson'
__date__ = '11/04/2017'
__copyright__ = 'Copyright 2017, The QGIS Project'
import qgis # NOQA
from qgis.PyQt.QtXml import QDomDocument
from qgis.core import (QgsLayerMetadata,
QgsCoordinateReferenceSystem,
QgsVectorLayer,
QgsNativeMetadataValidator,
QgsBox3d,
QgsDateTimeRange)
from qgis.PyQt.QtCore import (QDate,
QTime,
QDateTime)
from qgis.testing import start_app, unittest
start_app()
class TestQgsLayerMetadata(unittest.TestCase):
def testGettersSetters(self):
m = QgsLayerMetadata()
m.setIdentifier('identifier')
self.assertEqual(m.identifier(), 'identifier')
m.setParentIdentifier('parent identifier')
self.assertEqual(m.parentIdentifier(), 'parent identifier')
m.setLanguage('en-us')
self.assertEqual(m.language(), 'en-us')
m.setType('type')
self.assertEqual(m.type(), 'type')
m.setTitle('title')
self.assertEqual(m.title(), 'title')
m.setCategories(['category'])
self.assertEqual(m.categories(), ['category'])
m.setAbstract('abstract')
self.assertEqual(m.abstract(), 'abstract')
m.setFees('fees')
self.assertEqual(m.fees(), 'fees')
m.setConstraints([QgsLayerMetadata.Constraint('constraint a'), QgsLayerMetadata.Constraint('constraint b')])
m.addConstraint(QgsLayerMetadata.Constraint('constraint c'))
self.assertEqual(m.constraints()[0].constraint, 'constraint a')
self.assertEqual(m.constraints()[1].constraint, 'constraint b')
self.assertEqual(m.constraints()[2].constraint, 'constraint c')
m.setRights(['right a', 'right b'])
self.assertEqual(m.rights(), ['right a', 'right b'])
m.setLicenses(['l a', 'l b'])
self.assertEqual(m.licenses(), ['l a', 'l b'])
m.setHistory(['loaded into QGIS'])
self.assertEqual(m.history(), ['loaded into QGIS'])
m.setHistory(['accidentally deleted some features'])
self.assertEqual(m.history(), ['accidentally deleted some features'])
m.addHistoryItem('panicked and deleted more')
self.assertEqual(m.history(), ['accidentally deleted some features', 'panicked and deleted more'])
m.setEncoding('encoding')
self.assertEqual(m.encoding(), 'encoding')
m.setCrs(QgsCoordinateReferenceSystem.fromEpsgId(3111))
self.assertEqual(m.crs().authid(), 'EPSG:3111')
def testEquality(self):
# spatial extent
extent = QgsLayerMetadata.SpatialExtent()
extent.extentCrs = QgsCoordinateReferenceSystem.fromEpsgId(3111)
extent.bounds = QgsBox3d(5.0, 6.0, 7.0, 11.0, 13.0, 15.0)
extent2 = QgsLayerMetadata.SpatialExtent()
extent2.extentCrs = QgsCoordinateReferenceSystem.fromEpsgId(3111)
extent2.bounds = QgsBox3d(5.0, 6.0, 7.0, 11.0, 13.0, 15.0)
self.assertEqual(extent, extent2)
extent2.extentCrs = QgsCoordinateReferenceSystem.fromEpsgId(3113)
self.assertNotEqual(extent, extent2)
extent2.extentCrs = QgsCoordinateReferenceSystem.fromEpsgId(3111)
extent2.bounds = QgsBox3d(5.0, 6.0, 7.0, 11.0, 13.0, 16.0)
self.assertNotEqual(extent, extent2)
# extent
extent = QgsLayerMetadata.Extent()
extent1 = QgsLayerMetadata.SpatialExtent()
extent1.extentCrs = QgsCoordinateReferenceSystem.fromEpsgId(3111)
extent1.bounds = QgsBox3d(5.0, 6.0, 7.0, 11.0, 13.0, 15.0)
extent2 = QgsLayerMetadata.SpatialExtent()
extent2.extentCrs = QgsCoordinateReferenceSystem.fromEpsgId(3113)
extent2.bounds = QgsBox3d(5.0, 6.0, 7.0, 11.0, 13.0, 16.0)
extent.setSpatialExtents([extent1, extent2])
dates = [
QgsDateTimeRange(
QDateTime(QDate(2001, 12, 17), QTime(9, 30, 47)),
QDateTime(QDate(2001, 12, 17), QTime(9, 30, 47))),
QgsDateTimeRange(
QDateTime(QDate(2010, 12, 17), QTime(9, 30, 47)),
QDateTime(QDate(2020, 12, 17), QTime(9, 30, 47)))
]
extent.setTemporalExtents(dates)
extent_copy = QgsLayerMetadata.Extent(extent)
self.assertEqual(extent, extent_copy)
extent_copy.setTemporalExtents([
QgsDateTimeRange(
QDateTime(QDate(2001, 12, 17), QTime(9, 30, 47)),
QDateTime(QDate(2001, 12, 17), QTime(9, 30, 47))),
QgsDateTimeRange(
QDateTime(QDate(2010, 12, 17), QTime(9, 30, 48)),
QDateTime(QDate(2020, 12, 17), QTime(9, 30, 49)))
])
self.assertNotEqual(extent, extent_copy)
extent_copy = QgsLayerMetadata.Extent(extent)
extent3 = QgsLayerMetadata.SpatialExtent()
extent3.extentCrs = QgsCoordinateReferenceSystem.fromEpsgId(3113)
extent3.bounds = QgsBox3d(5.0, 6.0, 7.0, 11.0, 13.0, 19.0)
extent_copy.setSpatialExtents([extent1, extent3])
self.assertNotEqual(extent, extent_copy)
constraint = QgsLayerMetadata.Constraint('c', 'type1')
self.assertEqual(constraint, QgsLayerMetadata.Constraint('c', 'type1'))
self.assertNotEqual(constraint, QgsLayerMetadata.Constraint('c2', 'type1'))
self.assertNotEqual(constraint, QgsLayerMetadata.Constraint('c', 'type2'))
def testExtent(self):
e = QgsLayerMetadata.Extent()
se = QgsLayerMetadata.SpatialExtent()
se.extentCrs = QgsCoordinateReferenceSystem.fromEpsgId(3111)
se.bounds = QgsBox3d(1, 2, 3, 4, 5, 6)
e.setSpatialExtents([se])
e.setTemporalExtents([QgsDateTimeRange(QDateTime(QDate(2017, 1, 3), QTime(11, 34, 56)), QDateTime(QDate(2018, 1, 3), QTime(12, 35, 57)))])
m = QgsLayerMetadata()
m.setExtent(e)
extents = m.extent().spatialExtents()
self.assertEqual(extents[0].extentCrs.authid(), 'EPSG:3111')
self.assertEqual(extents[0].bounds.xMinimum(), 1.0)
self.assertEqual(extents[0].bounds.yMinimum(), 2.0)
self.assertEqual(extents[0].bounds.zMinimum(), 3.0)
self.assertEqual(extents[0].bounds.xMaximum(), 4.0)
self.assertEqual(extents[0].bounds.yMaximum(), 5.0)
self.assertEqual(extents[0].bounds.zMaximum(), 6.0)
self.assertEqual(m.extent().temporalExtents()[0].begin(), QDateTime(QDate(2017, 1, 3), QTime(11, 34, 56)))
self.assertEqual(m.extent().temporalExtents()[0].end(), QDateTime(QDate(2018, 1, 3), QTime(12, 35, 57)))
def createTestMetadata(self):
"""
Returns a standard metadata which can be tested with checkExpectedMetadata
"""
m = QgsLayerMetadata()
m.setIdentifier('1234')
m.setParentIdentifier('xyz')
m.setLanguage('en-CA')
m.setType('dataset')
m.setTitle('roads')
m.setAbstract('my roads')
m.setFees('None')
m.setConstraints([QgsLayerMetadata.Constraint('None', 'access')])
m.setRights(['Copyright foo 2017'])
m.setLicenses(['WTFPL'])
m.setHistory(['history a', 'history b'])
m.setKeywords({
'GEMET': ['kw1', 'kw2'],
'gmd:topicCategory': ['natural'],
})
m.setEncoding('utf-8')
m.setCrs(QgsCoordinateReferenceSystem.fromOgcWmsCrs('EPSG:4326'))
e = QgsLayerMetadata.Extent()
se = QgsLayerMetadata.SpatialExtent()
se.extentCrs = QgsCoordinateReferenceSystem.fromOgcWmsCrs('EPSG:4326')
se.bounds = QgsBox3d(-180, -90, 0, 180, 90, 0)
e.setSpatialExtents([se])
dates = [
QgsDateTimeRange(
QDateTime(QDate(2001, 12, 17), QTime(9, 30, 47)),
QDateTime(QDate(2001, 12, 17), QTime(9, 30, 47))),
QgsDateTimeRange(
QDateTime(QDate(2010, 12, 17), QTime(9, 30, 47)),
QDateTime(QDate(2020, 12, 17), QTime(9, 30, 47)))
]
e.setTemporalExtents(dates)
m.setExtent(e)
c = QgsLayerMetadata.Contact()
c.name = 'John Smith'
c.organization = 'ACME'
c.position = 'staff'
c.voice = '1500 515 555'
c.fax = 'xx.xxx.xxx.xxxx'
c.email = 'foo@example.org'
c.role = 'pointOfContact'
address = QgsLayerMetadata.Address()
address.type = 'postal'
address.address = '123 Main Street'
address.city = 'anycity'
address.administrativeArea = 'anyprovince'
address.postalCode = '90210'
address.country = 'Canada'
c.addresses = [address]
m.setContacts([c])
l = QgsLayerMetadata.Link()
l.name = 'geonode:roads'
l.type = 'OGC:WMS'
l.description = 'my GeoNode road layer'
l.url = 'http://example.org/wms'
l2 = QgsLayerMetadata.Link()
l2.name = 'geonode:roads'
l2.type = 'OGC:WFS'
l2.description = 'my GeoNode road layer'
l2.url = 'http://example.org/wfs'
l3 = QgsLayerMetadata.Link()
l3.name = 'roads'
l3.type = 'WWW:LINK'
l3.description = 'full dataset download'
l3.url = 'http://example.org/roads.tgz'
l3.format = 'ESRI Shapefile'
l3.mimeType = 'application/gzip'
l3.size = '283676'
m.setLinks([l, l2, l3])
return m
def checkExpectedMetadata(self, m):
"""
Checks that a metadata object matches that returned by createTestMetadata
"""
self.assertEqual(m.identifier(), '1234')
self.assertEqual(m.parentIdentifier(), 'xyz')
self.assertEqual(m.language(), 'en-CA')
self.assertEqual(m.type(), 'dataset')
self.assertEqual(m.title(), 'roads')
self.assertEqual(m.abstract(), 'my roads')
self.assertEqual(m.fees(), 'None')
self.assertEqual(m.constraints()[0].constraint, 'None')
self.assertEqual(m.constraints()[0].type, 'access')
self.assertEqual(m.rights(), ['Copyright foo 2017'])
self.assertEqual(m.licenses(), ['WTFPL'])
self.assertEqual(m.history(), ['history a', 'history b'])
self.assertEqual(m.encoding(), 'utf-8')
self.assertEqual(
m.keywords(),
{'GEMET': ['kw1', 'kw2'], 'gmd:topicCategory': ['natural']})
self.assertEqual(m.crs().authid(), 'EPSG:4326')
extent = m.extent().spatialExtents()[0]
self.assertEqual(extent.extentCrs.authid(), 'EPSG:4326')
self.assertEqual(extent.bounds.xMinimum(), -180.0)
self.assertEqual(extent.bounds.yMinimum(), -90.0)
self.assertEqual(extent.bounds.xMaximum(), 180.0)
self.assertEqual(extent.bounds.yMaximum(), 90.0)
self.assertEqual(m.extent().temporalExtents()[0].begin(), QDateTime(QDate(2001, 12, 17), QTime(9, 30, 47)))
self.assertTrue(m.extent().temporalExtents()[0].isInstant())
self.assertFalse(m.extent().temporalExtents()[1].isInstant())
self.assertEqual(m.extent().temporalExtents()[1].end(), QDateTime(QDate(2020, 12, 17), QTime(9, 30, 47)))
self.assertEqual(m.contacts()[0].name, 'John Smith')
self.assertEqual(m.contacts()[0].organization, 'ACME')
self.assertEqual(m.contacts()[0].position, 'staff')
self.assertEqual(m.contacts()[0].voice, '1500 515 555')
self.assertEqual(m.contacts()[0].fax, 'xx.xxx.xxx.xxxx')
self.assertEqual(m.contacts()[0].email, 'foo@example.org')
self.assertEqual(m.contacts()[0].role, 'pointOfContact')
self.assertEqual(m.contacts()[0].addresses[0].type, 'postal')
self.assertEqual(m.contacts()[0].addresses[0].address, '123 Main Street')
self.assertEqual(m.contacts()[0].addresses[0].city, 'anycity')
self.assertEqual(m.contacts()[0].addresses[0].administrativeArea, 'anyprovince')
self.assertEqual(m.contacts()[0].addresses[0].postalCode, '90210')
self.assertEqual(m.contacts()[0].addresses[0].country, 'Canada')
self.assertEqual(m.links()[0].name, 'geonode:roads')
self.assertEqual(m.links()[0].type, 'OGC:WMS')
self.assertEqual(m.links()[0].description, 'my GeoNode road layer')
self.assertEqual(m.links()[0].url, 'http://example.org/wms')
self.assertEqual(m.links()[1].name, 'geonode:roads')
self.assertEqual(m.links()[1].type, 'OGC:WFS')
self.assertEqual(m.links()[1].description, 'my GeoNode road layer')
self.assertEqual(m.links()[1].url, 'http://example.org/wfs')
self.assertEqual(m.links()[2].name, 'roads')
self.assertEqual(m.links()[2].type, 'WWW:LINK')
self.assertEqual(m.links()[2].description, 'full dataset download')
self.assertEqual(m.links()[2].url, 'http://example.org/roads.tgz')
self.assertEqual(m.links()[2].format, 'ESRI Shapefile')
self.assertEqual(m.links()[2].mimeType, 'application/gzip')
self.assertEqual(m.links()[2].size, '283676')
def testStandard(self):
m = self.createTestMetadata()
self.checkExpectedMetadata(m)
def testSaveReadFromLayer(self):
"""
Test saving and reading metadata from a layer
"""
vl = QgsVectorLayer('Point', 'test', 'memory')
self.assertTrue(vl.isValid())
# save metadata to layer
m = self.createTestMetadata()
m.saveToLayer(vl)
# read back from layer and check result
m2 = QgsLayerMetadata()
m2.readFromLayer(vl)
self.checkExpectedMetadata(m2)
def testSaveReadFromXml(self):
"""
Test saving and reading metadata from a XML.
"""
# save metadata to XML
m = self.createTestMetadata()
doc = QDomDocument("testdoc")
elem = doc.createElement("metadata")
self.assertTrue(m.writeMetadataXml(elem, doc))
# read back from XML and check result
m2 = QgsLayerMetadata()
m2.readMetadataXml(elem)
self.checkExpectedMetadata(m2)
def testValidateNative(self): # spellok
"""
Test validating metadata against QGIS native schema
"""
m = self.createTestMetadata()
v = QgsNativeMetadataValidator()
res, list = v.validate(m)
self.assertTrue(res)
self.assertFalse(list)
# corrupt metadata piece by piece...
m = self.createTestMetadata()
m.setIdentifier('')
res, list = v.validate(m)
self.assertFalse(res)
self.assertEqual(list[0].section, 'identifier')
m = self.createTestMetadata()
m.setLanguage('')
res, list = v.validate(m)
self.assertFalse(res)
self.assertEqual(list[0].section, 'language')
m = self.createTestMetadata()
m.setType('')
res, list = v.validate(m)
self.assertFalse(res)
self.assertEqual(list[0].section, 'type')
m = self.createTestMetadata()
m.setTitle('')
res, list = v.validate(m)
self.assertFalse(res)
self.assertEqual(list[0].section, 'title')
m = self.createTestMetadata()
m.setAbstract('')
res, list = v.validate(m)
self.assertFalse(res)
self.assertEqual(list[0].section, 'abstract')
m = self.createTestMetadata()
m.setLicenses([])
res, list = v.validate(m)
self.assertFalse(res)
self.assertEqual(list[0].section, 'license')
m = self.createTestMetadata()
m.setCrs(QgsCoordinateReferenceSystem())
res, list = v.validate(m)
self.assertFalse(res)
self.assertEqual(list[0].section, 'crs')
m = self.createTestMetadata()
m.setContacts([])
res, list = v.validate(m)
self.assertFalse(res)
self.assertEqual(list[0].section, 'contacts')
m = self.createTestMetadata()
m.setLinks([])
res, list = v.validate(m)
self.assertFalse(res)
self.assertEqual(list[0].section, 'links')
m = self.createTestMetadata()
m.setKeywords({'': ['kw1', 'kw2']})
res, list = v.validate(m)
self.assertFalse(res)
self.assertEqual(list[0].section, 'keywords')
self.assertEqual(list[0].identifier, 0)
m = self.createTestMetadata()
m.setKeywords({'AA': []})
res, list = v.validate(m)
self.assertFalse(res)
self.assertEqual(list[0].section, 'keywords')
self.assertEqual(list[0].identifier, 0)
m = self.createTestMetadata()
e = m.extent()
se = e.spatialExtents()[0]
se.extentCrs = QgsCoordinateReferenceSystem()
e.setSpatialExtents([se])
m.setExtent(e)
res, list = v.validate(m)
self.assertFalse(res)
self.assertEqual(list[0].section, 'extent')
self.assertEqual(list[0].identifier, 0)
m = self.createTestMetadata()
e = m.extent()
se = e.spatialExtents()[0]
se.bounds = QgsBox3d(1, 1, 0, 1, 2)
e.setSpatialExtents([se])
m.setExtent(e)
res, list = v.validate(m)
self.assertFalse(res)
self.assertEqual(list[0].section, 'extent')
self.assertEqual(list[0].identifier, 0)
m = self.createTestMetadata()
c = m.contacts()[0]
c.name = ''
m.setContacts([c])
res, list = v.validate(m)
self.assertFalse(res)
self.assertEqual(list[0].section, 'contacts')
self.assertEqual(list[0].identifier, 0)
m = self.createTestMetadata()
l = m.links()[0]
l.name = ''
m.setLinks([l])
res, list = v.validate(m)
self.assertFalse(res)
self.assertEqual(list[0].section, 'links')
self.assertEqual(list[0].identifier, 0)
m = self.createTestMetadata()
l = m.links()[0]
l.type = ''
m.setLinks([l])
res, list = v.validate(m)
self.assertFalse(res)
self.assertEqual(list[0].section, 'links')
self.assertEqual(list[0].identifier, 0)
m = self.createTestMetadata()
l = m.links()[0]
l.url = ''
m.setLinks([l])
res, list = v.validate(m)
self.assertFalse(res)
self.assertEqual(list[0].section, 'links')
self.assertEqual(list[0].identifier, 0)
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
ldtri0209/robotframework | src/robot/reporting/jsbuildingcontext.py | 5 | 3312 | # Copyright 2008-2014 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from contextlib import contextmanager
import os.path
from robot.output.loggerhelper import LEVELS
from robot.utils import (html_escape, html_format, get_link_path,
timestamp_to_secs)
from .stringcache import StringCache
class JsBuildingContext(object):
def __init__(self, log_path=None, split_log=False, prune_input=False):
# log_path can be a custom object in unit tests
self._log_dir = os.path.dirname(log_path) \
if isinstance(log_path, basestring) else None
self._split_log = split_log
self._prune_input = prune_input
self._strings = self._top_level_strings = StringCache()
self.basemillis = None
self.split_results = []
self.min_level = 'NONE'
self._msg_links = {}
def string(self, string, escape=True):
if escape and string:
if not isinstance(string, unicode):
string = unicode(string)
string = html_escape(string)
return self._strings.add(string)
def html(self, string):
return self.string(html_format(string), escape=False)
def relative_source(self, source):
rel_source = get_link_path(source, self._log_dir) \
if self._log_dir and source and os.path.exists(source) else ''
return self.string(rel_source)
def timestamp(self, time):
if not time:
return None
# Must use `long` due to http://ironpython.codeplex.com/workitem/31549
millis = long(round(timestamp_to_secs(time) * 1000))
if self.basemillis is None:
self.basemillis = millis
return millis - self.basemillis
def message_level(self, level):
if LEVELS[level] < LEVELS[self.min_level]:
self.min_level = level
def create_link_target(self, msg):
id = self._top_level_strings.add(msg.parent.id)
self._msg_links[self._link_key(msg)] = id
def link(self, msg):
return self._msg_links.get(self._link_key(msg))
def _link_key(self, msg):
return (msg.message, msg.level, msg.timestamp)
@property
def strings(self):
return self._strings.dump()
def start_splitting_if_needed(self, split=False):
if self._split_log and split:
self._strings = StringCache()
return True
return False
def end_splitting(self, model):
self.split_results.append((model, self.strings))
self._strings = self._top_level_strings
return len(self.split_results)
@contextmanager
def prune_input(self, *items):
yield
if self._prune_input:
for item in items:
item.clear()
| apache-2.0 |
StealthMicro/OctoPi-Makerbot | env/Lib/site-packages/pip/_vendor/html5lib/treewalkers/genshistream.py | 1730 | 2278 | from __future__ import absolute_import, division, unicode_literals
from genshi.core import QName
from genshi.core import START, END, XML_NAMESPACE, DOCTYPE, TEXT
from genshi.core import START_NS, END_NS, START_CDATA, END_CDATA, PI, COMMENT
from . import _base
from ..constants import voidElements, namespaces
class TreeWalker(_base.TreeWalker):
def __iter__(self):
# Buffer the events so we can pass in the following one
previous = None
for event in self.tree:
if previous is not None:
for token in self.tokens(previous, event):
yield token
previous = event
# Don't forget the final event!
if previous is not None:
for token in self.tokens(previous, None):
yield token
def tokens(self, event, next):
kind, data, pos = event
if kind == START:
tag, attribs = data
name = tag.localname
namespace = tag.namespace
converted_attribs = {}
for k, v in attribs:
if isinstance(k, QName):
converted_attribs[(k.namespace, k.localname)] = v
else:
converted_attribs[(None, k)] = v
if namespace == namespaces["html"] and name in voidElements:
for token in self.emptyTag(namespace, name, converted_attribs,
not next or next[0] != END
or next[1] != tag):
yield token
else:
yield self.startTag(namespace, name, converted_attribs)
elif kind == END:
name = data.localname
namespace = data.namespace
if name not in voidElements:
yield self.endTag(namespace, name)
elif kind == COMMENT:
yield self.comment(data)
elif kind == TEXT:
for token in self.text(data):
yield token
elif kind == DOCTYPE:
yield self.doctype(*data)
elif kind in (XML_NAMESPACE, DOCTYPE, START_NS, END_NS,
START_CDATA, END_CDATA, PI):
pass
else:
yield self.unknown(kind)
| agpl-3.0 |
thanatoskira/cansina | core/inspector.py | 2 | 2268 | import sys
import random
import requests
import hashlib
USER_AGENT = "Mozilla/5.0 (Windows; U; MSIE 10.0; Windows NT 9.0; es-ES)"
user_agent = {"user-agent": USER_AGENT}
class Inspector:
"""This class mission is to examine the behaviour of the application when on
purpose an inexistent page is requested"""
TEST404_OK = 0
TEST404_MD5 = 1
TEST404_STRING = 2
TEST404_URL = 3
TEST404_NONE = 4
def __init__(self, target):
self.target = target
def _give_it_a_try(self):
"""Every time this method is called it will request a random resource
the target domain. Return value is a dictionary with values as
HTTP response code, resquest size, md5 of the content and the content
itself. If there were a redirection it will record the new url"""
s = []
for n in range(0, 42):
random.seed()
s.append(chr(random.randrange(97, 122)))
s = "".join(s)
target = self.target + s
print("Checking with %s" % target)
page = requests.get(target, headers=user_agent)
content = page.content
result = {'code': str(page.status_code),
'size': len(content),
'md5': hashlib.md5("".join(content)).hexdigest(),
'content': content,
'location': None}
if len(page.history) >= 1:
result['location'] = page.url
return result
def check_this(self):
"""Get the a request and decide what to do"""
first_result = self._give_it_a_try()
if first_result['code'] == '404':
print("Got a nice 404, problems not expected")
# Ok, resquest gave a 404 so we should not find problems
return '', Inspector.TEST404_OK
elif first_result['code'] == '302' or first_result['location']:
location = first_result['location']
return location, Inspector.TEST404_URL
#elif first_result['code'] == '200':
else:
return first_result['md5'], Inspector.TEST404_MD5
# We give up here :(
return '', Inspector.TEST404_NONE
if __name__ == '__main__':
i = Inspector(sys.argv[1])
print i.check_this()
| gpl-3.0 |
ndingwall/scikit-learn | sklearn/cluster/_optics.py | 6 | 37726 | # -*- coding: utf-8 -*-
"""Ordering Points To Identify the Clustering Structure (OPTICS)
These routines execute the OPTICS algorithm, and implement various
cluster extraction methods of the ordered list.
Authors: Shane Grigsby <refuge@rocktalus.com>
Adrin Jalali <adrinjalali@gmail.com>
Erich Schubert <erich@debian.org>
Hanmin Qin <qinhanmin2005@sina.com>
License: BSD 3 clause
"""
import warnings
import numpy as np
from ..utils import gen_batches, get_chunk_n_rows
from ..utils.validation import _deprecate_positional_args
from ..neighbors import NearestNeighbors
from ..base import BaseEstimator, ClusterMixin
from ..metrics import pairwise_distances
class OPTICS(ClusterMixin, BaseEstimator):
"""Estimate clustering structure from vector array.
OPTICS (Ordering Points To Identify the Clustering Structure), closely
related to DBSCAN, finds core sample of high density and expands clusters
from them [1]_. Unlike DBSCAN, keeps cluster hierarchy for a variable
neighborhood radius. Better suited for usage on large datasets than the
current sklearn implementation of DBSCAN.
Clusters are then extracted using a DBSCAN-like method
(cluster_method = 'dbscan') or an automatic
technique proposed in [1]_ (cluster_method = 'xi').
This implementation deviates from the original OPTICS by first performing
k-nearest-neighborhood searches on all points to identify core sizes, then
computing only the distances to unprocessed points when constructing the
cluster order. Note that we do not employ a heap to manage the expansion
candidates, so the time complexity will be O(n^2).
Read more in the :ref:`User Guide <optics>`.
Parameters
----------
min_samples : int > 1 or float between 0 and 1, default=5
The number of samples in a neighborhood for a point to be considered as
a core point. Also, up and down steep regions can't have more than
``min_samples`` consecutive non-steep points. Expressed as an absolute
number or a fraction of the number of samples (rounded to be at least
2).
max_eps : float, default=np.inf
The maximum distance between two samples for one to be considered as
in the neighborhood of the other. Default value of ``np.inf`` will
identify clusters across all scales; reducing ``max_eps`` will result
in shorter run times.
metric : str or callable, default='minkowski'
Metric to use for distance computation. Any metric from scikit-learn
or scipy.spatial.distance can be used.
If metric is a callable function, it is called on each
pair of instances (rows) and the resulting value recorded. The callable
should take two arrays as input and return one value indicating the
distance between them. This works for Scipy's metrics, but is less
efficient than passing the metric name as a string. If metric is
"precomputed", X is assumed to be a distance matrix and must be square.
Valid values for metric are:
- from scikit-learn: ['cityblock', 'cosine', 'euclidean', 'l1', 'l2',
'manhattan']
- from scipy.spatial.distance: ['braycurtis', 'canberra', 'chebyshev',
'correlation', 'dice', 'hamming', 'jaccard', 'kulsinski',
'mahalanobis', 'minkowski', 'rogerstanimoto', 'russellrao',
'seuclidean', 'sokalmichener', 'sokalsneath', 'sqeuclidean',
'yule']
See the documentation for scipy.spatial.distance for details on these
metrics.
p : int, default=2
Parameter for the Minkowski metric from
:class:`~sklearn.metrics.pairwise_distances`. When p = 1, this is
equivalent to using manhattan_distance (l1), and euclidean_distance
(l2) for p = 2. For arbitrary p, minkowski_distance (l_p) is used.
metric_params : dict, default=None
Additional keyword arguments for the metric function.
cluster_method : str, default='xi'
The extraction method used to extract clusters using the calculated
reachability and ordering. Possible values are "xi" and "dbscan".
eps : float, default=None
The maximum distance between two samples for one to be considered as
in the neighborhood of the other. By default it assumes the same value
as ``max_eps``.
Used only when ``cluster_method='dbscan'``.
xi : float between 0 and 1, default=0.05
Determines the minimum steepness on the reachability plot that
constitutes a cluster boundary. For example, an upwards point in the
reachability plot is defined by the ratio from one point to its
successor being at most 1-xi.
Used only when ``cluster_method='xi'``.
predecessor_correction : bool, default=True
Correct clusters according to the predecessors calculated by OPTICS
[2]_. This parameter has minimal effect on most datasets.
Used only when ``cluster_method='xi'``.
min_cluster_size : int > 1 or float between 0 and 1, default=None
Minimum number of samples in an OPTICS cluster, expressed as an
absolute number or a fraction of the number of samples (rounded to be
at least 2). If ``None``, the value of ``min_samples`` is used instead.
Used only when ``cluster_method='xi'``.
algorithm : {'auto', 'ball_tree', 'kd_tree', 'brute'}, default='auto'
Algorithm used to compute the nearest neighbors:
- 'ball_tree' will use :class:`BallTree`
- 'kd_tree' will use :class:`KDTree`
- 'brute' will use a brute-force search.
- 'auto' will attempt to decide the most appropriate algorithm
based on the values passed to :meth:`fit` method. (default)
Note: fitting on sparse input will override the setting of
this parameter, using brute force.
leaf_size : int, default=30
Leaf size passed to :class:`BallTree` or :class:`KDTree`. This can
affect the speed of the construction and query, as well as the memory
required to store the tree. The optimal value depends on the
nature of the problem.
n_jobs : int, default=None
The number of parallel jobs to run for neighbors search.
``None`` means 1 unless in a :obj:`joblib.parallel_backend` context.
``-1`` means using all processors. See :term:`Glossary <n_jobs>`
for more details.
Attributes
----------
labels_ : ndarray of shape (n_samples,)
Cluster labels for each point in the dataset given to fit().
Noisy samples and points which are not included in a leaf cluster
of ``cluster_hierarchy_`` are labeled as -1.
reachability_ : ndarray of shape (n_samples,)
Reachability distances per sample, indexed by object order. Use
``clust.reachability_[clust.ordering_]`` to access in cluster order.
ordering_ : ndarray of shape (n_samples,)
The cluster ordered list of sample indices.
core_distances_ : ndarray of shape (n_samples,)
Distance at which each sample becomes a core point, indexed by object
order. Points which will never be core have a distance of inf. Use
``clust.core_distances_[clust.ordering_]`` to access in cluster order.
predecessor_ : ndarray of shape (n_samples,)
Point that a sample was reached from, indexed by object order.
Seed points have a predecessor of -1.
cluster_hierarchy_ : ndarray of shape (n_clusters, 2)
The list of clusters in the form of ``[start, end]`` in each row, with
all indices inclusive. The clusters are ordered according to
``(end, -start)`` (ascending) so that larger clusters encompassing
smaller clusters come after those smaller ones. Since ``labels_`` does
not reflect the hierarchy, usually
``len(cluster_hierarchy_) > np.unique(optics.labels_)``. Please also
note that these indices are of the ``ordering_``, i.e.
``X[ordering_][start:end + 1]`` form a cluster.
Only available when ``cluster_method='xi'``.
See Also
--------
DBSCAN : A similar clustering for a specified neighborhood radius (eps).
Our implementation is optimized for runtime.
References
----------
.. [1] Ankerst, Mihael, Markus M. Breunig, Hans-Peter Kriegel,
and Jörg Sander. "OPTICS: ordering points to identify the clustering
structure." ACM SIGMOD Record 28, no. 2 (1999): 49-60.
.. [2] Schubert, Erich, Michael Gertz.
"Improving the Cluster Structure Extracted from OPTICS Plots." Proc. of
the Conference "Lernen, Wissen, Daten, Analysen" (LWDA) (2018): 318-329.
Examples
--------
>>> from sklearn.cluster import OPTICS
>>> import numpy as np
>>> X = np.array([[1, 2], [2, 5], [3, 6],
... [8, 7], [8, 8], [7, 3]])
>>> clustering = OPTICS(min_samples=2).fit(X)
>>> clustering.labels_
array([0, 0, 0, 1, 1, 1])
"""
@_deprecate_positional_args
def __init__(self, *, min_samples=5, max_eps=np.inf, metric='minkowski',
p=2, metric_params=None, cluster_method='xi', eps=None,
xi=0.05, predecessor_correction=True, min_cluster_size=None,
algorithm='auto', leaf_size=30, n_jobs=None):
self.max_eps = max_eps
self.min_samples = min_samples
self.min_cluster_size = min_cluster_size
self.algorithm = algorithm
self.metric = metric
self.metric_params = metric_params
self.p = p
self.leaf_size = leaf_size
self.cluster_method = cluster_method
self.eps = eps
self.xi = xi
self.predecessor_correction = predecessor_correction
self.n_jobs = n_jobs
def fit(self, X, y=None):
"""Perform OPTICS clustering.
Extracts an ordered list of points and reachability distances, and
performs initial clustering using ``max_eps`` distance specified at
OPTICS object instantiation.
Parameters
----------
X : ndarray of shape (n_samples, n_features), or \
(n_samples, n_samples) if metric=’precomputed’
A feature array, or array of distances between samples if
metric='precomputed'.
y : ignored
Ignored.
Returns
-------
self : instance of OPTICS
The instance.
"""
X = self._validate_data(X, dtype=float)
if self.cluster_method not in ['dbscan', 'xi']:
raise ValueError("cluster_method should be one of"
" 'dbscan' or 'xi' but is %s" %
self.cluster_method)
(self.ordering_, self.core_distances_, self.reachability_,
self.predecessor_) = compute_optics_graph(
X=X, min_samples=self.min_samples, algorithm=self.algorithm,
leaf_size=self.leaf_size, metric=self.metric,
metric_params=self.metric_params, p=self.p, n_jobs=self.n_jobs,
max_eps=self.max_eps)
# Extract clusters from the calculated orders and reachability
if self.cluster_method == 'xi':
labels_, clusters_ = cluster_optics_xi(
reachability=self.reachability_,
predecessor=self.predecessor_,
ordering=self.ordering_,
min_samples=self.min_samples,
min_cluster_size=self.min_cluster_size,
xi=self.xi,
predecessor_correction=self.predecessor_correction)
self.cluster_hierarchy_ = clusters_
elif self.cluster_method == 'dbscan':
if self.eps is None:
eps = self.max_eps
else:
eps = self.eps
if eps > self.max_eps:
raise ValueError('Specify an epsilon smaller than %s. Got %s.'
% (self.max_eps, eps))
labels_ = cluster_optics_dbscan(
reachability=self.reachability_,
core_distances=self.core_distances_,
ordering=self.ordering_, eps=eps)
self.labels_ = labels_
return self
def _validate_size(size, n_samples, param_name):
if size <= 0 or (size !=
int(size)
and size > 1):
raise ValueError('%s must be a positive integer '
'or a float between 0 and 1. Got %r' %
(param_name, size))
elif size > n_samples:
raise ValueError('%s must be no greater than the'
' number of samples (%d). Got %d' %
(param_name, n_samples, size))
# OPTICS helper functions
def _compute_core_distances_(X, neighbors, min_samples, working_memory):
"""Compute the k-th nearest neighbor of each sample
Equivalent to neighbors.kneighbors(X, self.min_samples)[0][:, -1]
but with more memory efficiency.
Parameters
----------
X : array-like of shape (n_samples, n_features)
The data.
neighbors : NearestNeighbors instance
The fitted nearest neighbors estimator.
working_memory : int, default=None
The sought maximum memory for temporary distance matrix chunks.
When None (default), the value of
``sklearn.get_config()['working_memory']`` is used.
Returns
-------
core_distances : ndarray of shape (n_samples,)
Distance at which each sample becomes a core point.
Points which will never be core have a distance of inf.
"""
n_samples = X.shape[0]
core_distances = np.empty(n_samples)
core_distances.fill(np.nan)
chunk_n_rows = get_chunk_n_rows(row_bytes=16 * min_samples,
max_n_rows=n_samples,
working_memory=working_memory)
slices = gen_batches(n_samples, chunk_n_rows)
for sl in slices:
core_distances[sl] = neighbors.kneighbors(
X[sl], min_samples)[0][:, -1]
return core_distances
@_deprecate_positional_args
def compute_optics_graph(X, *, min_samples, max_eps, metric, p, metric_params,
algorithm, leaf_size, n_jobs):
"""Computes the OPTICS reachability graph.
Read more in the :ref:`User Guide <optics>`.
Parameters
----------
X : ndarray of shape (n_samples, n_features), or \
(n_samples, n_samples) if metric=’precomputed’.
A feature array, or array of distances between samples if
metric='precomputed'
min_samples : int > 1 or float between 0 and 1
The number of samples in a neighborhood for a point to be considered
as a core point. Expressed as an absolute number or a fraction of the
number of samples (rounded to be at least 2).
max_eps : float, default=np.inf
The maximum distance between two samples for one to be considered as
in the neighborhood of the other. Default value of ``np.inf`` will
identify clusters across all scales; reducing ``max_eps`` will result
in shorter run times.
metric : str or callable, default='minkowski'
Metric to use for distance computation. Any metric from scikit-learn
or scipy.spatial.distance can be used.
If metric is a callable function, it is called on each
pair of instances (rows) and the resulting value recorded. The callable
should take two arrays as input and return one value indicating the
distance between them. This works for Scipy's metrics, but is less
efficient than passing the metric name as a string. If metric is
"precomputed", X is assumed to be a distance matrix and must be square.
Valid values for metric are:
- from scikit-learn: ['cityblock', 'cosine', 'euclidean', 'l1', 'l2',
'manhattan']
- from scipy.spatial.distance: ['braycurtis', 'canberra', 'chebyshev',
'correlation', 'dice', 'hamming', 'jaccard', 'kulsinski',
'mahalanobis', 'minkowski', 'rogerstanimoto', 'russellrao',
'seuclidean', 'sokalmichener', 'sokalsneath', 'sqeuclidean',
'yule']
See the documentation for scipy.spatial.distance for details on these
metrics.
p : int, default=2
Parameter for the Minkowski metric from
:class:`~sklearn.metrics.pairwise_distances`. When p = 1, this is
equivalent to using manhattan_distance (l1), and euclidean_distance
(l2) for p = 2. For arbitrary p, minkowski_distance (l_p) is used.
metric_params : dict, default=None
Additional keyword arguments for the metric function.
algorithm : {'auto', 'ball_tree', 'kd_tree', 'brute'}, default='auto'
Algorithm used to compute the nearest neighbors:
- 'ball_tree' will use :class:`BallTree`
- 'kd_tree' will use :class:`KDTree`
- 'brute' will use a brute-force search.
- 'auto' will attempt to decide the most appropriate algorithm
based on the values passed to :meth:`fit` method. (default)
Note: fitting on sparse input will override the setting of
this parameter, using brute force.
leaf_size : int, default=30
Leaf size passed to :class:`BallTree` or :class:`KDTree`. This can
affect the speed of the construction and query, as well as the memory
required to store the tree. The optimal value depends on the
nature of the problem.
n_jobs : int, default=None
The number of parallel jobs to run for neighbors search.
``None`` means 1 unless in a :obj:`joblib.parallel_backend` context.
``-1`` means using all processors. See :term:`Glossary <n_jobs>`
for more details.
Returns
-------
ordering_ : array of shape (n_samples,)
The cluster ordered list of sample indices.
core_distances_ : array of shape (n_samples,)
Distance at which each sample becomes a core point, indexed by object
order. Points which will never be core have a distance of inf. Use
``clust.core_distances_[clust.ordering_]`` to access in cluster order.
reachability_ : array of shape (n_samples,)
Reachability distances per sample, indexed by object order. Use
``clust.reachability_[clust.ordering_]`` to access in cluster order.
predecessor_ : array of shape (n_samples,)
Point that a sample was reached from, indexed by object order.
Seed points have a predecessor of -1.
References
----------
.. [1] Ankerst, Mihael, Markus M. Breunig, Hans-Peter Kriegel,
and Jörg Sander. "OPTICS: ordering points to identify the clustering
structure." ACM SIGMOD Record 28, no. 2 (1999): 49-60.
"""
n_samples = X.shape[0]
_validate_size(min_samples, n_samples, 'min_samples')
if min_samples <= 1:
min_samples = max(2, int(min_samples * n_samples))
# Start all points as 'unprocessed' ##
reachability_ = np.empty(n_samples)
reachability_.fill(np.inf)
predecessor_ = np.empty(n_samples, dtype=int)
predecessor_.fill(-1)
nbrs = NearestNeighbors(n_neighbors=min_samples,
algorithm=algorithm,
leaf_size=leaf_size,
metric=metric,
metric_params=metric_params,
p=p,
n_jobs=n_jobs)
nbrs.fit(X)
# Here we first do a kNN query for each point, this differs from
# the original OPTICS that only used epsilon range queries.
# TODO: handle working_memory somehow?
core_distances_ = _compute_core_distances_(X=X, neighbors=nbrs,
min_samples=min_samples,
working_memory=None)
# OPTICS puts an upper limit on these, use inf for undefined.
core_distances_[core_distances_ > max_eps] = np.inf
# Main OPTICS loop. Not parallelizable. The order that entries are
# written to the 'ordering_' list is important!
# Note that this implementation is O(n^2) theoretically, but
# supposedly with very low constant factors.
processed = np.zeros(X.shape[0], dtype=bool)
ordering = np.zeros(X.shape[0], dtype=int)
for ordering_idx in range(X.shape[0]):
# Choose next based on smallest reachability distance
# (And prefer smaller ids on ties, possibly np.inf!)
index = np.where(processed == 0)[0]
point = index[np.argmin(reachability_[index])]
processed[point] = True
ordering[ordering_idx] = point
if core_distances_[point] != np.inf:
_set_reach_dist(core_distances_=core_distances_,
reachability_=reachability_,
predecessor_=predecessor_,
point_index=point,
processed=processed, X=X, nbrs=nbrs,
metric=metric, metric_params=metric_params,
p=p, max_eps=max_eps)
if np.all(np.isinf(reachability_)):
warnings.warn("All reachability values are inf. Set a larger"
" max_eps or all data will be considered outliers.",
UserWarning)
return ordering, core_distances_, reachability_, predecessor_
def _set_reach_dist(core_distances_, reachability_, predecessor_,
point_index, processed, X, nbrs, metric, metric_params,
p, max_eps):
P = X[point_index:point_index + 1]
# Assume that radius_neighbors is faster without distances
# and we don't need all distances, nevertheless, this means
# we may be doing some work twice.
indices = nbrs.radius_neighbors(P, radius=max_eps,
return_distance=False)[0]
# Getting indices of neighbors that have not been processed
unproc = np.compress(~np.take(processed, indices), indices)
# Neighbors of current point are already processed.
if not unproc.size:
return
# Only compute distances to unprocessed neighbors:
if metric == 'precomputed':
dists = X[point_index, unproc]
else:
_params = dict() if metric_params is None else metric_params.copy()
if metric == 'minkowski' and 'p' not in _params:
# the same logic as neighbors, p is ignored if explicitly set
# in the dict params
_params['p'] = p
dists = pairwise_distances(P, np.take(X, unproc, axis=0),
metric=metric, n_jobs=None,
**_params).ravel()
rdists = np.maximum(dists, core_distances_[point_index])
improved = np.where(rdists < np.take(reachability_, unproc))
reachability_[unproc[improved]] = rdists[improved]
predecessor_[unproc[improved]] = point_index
@_deprecate_positional_args
def cluster_optics_dbscan(*, reachability, core_distances, ordering, eps):
"""Performs DBSCAN extraction for an arbitrary epsilon.
Extracting the clusters runs in linear time. Note that this results in
``labels_`` which are close to a :class:`~sklearn.cluster.DBSCAN` with
similar settings and ``eps``, only if ``eps`` is close to ``max_eps``.
Parameters
----------
reachability : array of shape (n_samples,)
Reachability distances calculated by OPTICS (``reachability_``)
core_distances : array of shape (n_samples,)
Distances at which points become core (``core_distances_``)
ordering : array of shape (n_samples,)
OPTICS ordered point indices (``ordering_``)
eps : float
DBSCAN ``eps`` parameter. Must be set to < ``max_eps``. Results
will be close to DBSCAN algorithm if ``eps`` and ``max_eps`` are close
to one another.
Returns
-------
labels_ : array of shape (n_samples,)
The estimated labels.
"""
n_samples = len(core_distances)
labels = np.zeros(n_samples, dtype=int)
far_reach = reachability > eps
near_core = core_distances <= eps
labels[ordering] = np.cumsum(far_reach[ordering] & near_core[ordering]) - 1
labels[far_reach & ~near_core] = -1
return labels
def cluster_optics_xi(*, reachability, predecessor, ordering, min_samples,
min_cluster_size=None, xi=0.05,
predecessor_correction=True):
"""Automatically extract clusters according to the Xi-steep method.
Parameters
----------
reachability : ndarray of shape (n_samples,)
Reachability distances calculated by OPTICS (`reachability_`)
predecessor : ndarray of shape (n_samples,)
Predecessors calculated by OPTICS.
ordering : ndarray of shape (n_samples,)
OPTICS ordered point indices (`ordering_`)
min_samples : int > 1 or float between 0 and 1
The same as the min_samples given to OPTICS. Up and down steep regions
can't have more then ``min_samples`` consecutive non-steep points.
Expressed as an absolute number or a fraction of the number of samples
(rounded to be at least 2).
min_cluster_size : int > 1 or float between 0 and 1, default=None
Minimum number of samples in an OPTICS cluster, expressed as an
absolute number or a fraction of the number of samples (rounded to be
at least 2). If ``None``, the value of ``min_samples`` is used instead.
xi : float between 0 and 1, default=0.05
Determines the minimum steepness on the reachability plot that
constitutes a cluster boundary. For example, an upwards point in the
reachability plot is defined by the ratio from one point to its
successor being at most 1-xi.
predecessor_correction : bool, default=True
Correct clusters based on the calculated predecessors.
Returns
-------
labels : ndarray of shape (n_samples,)
The labels assigned to samples. Points which are not included
in any cluster are labeled as -1.
clusters : ndarray of shape (n_clusters, 2)
The list of clusters in the form of ``[start, end]`` in each row, with
all indices inclusive. The clusters are ordered according to ``(end,
-start)`` (ascending) so that larger clusters encompassing smaller
clusters come after such nested smaller clusters. Since ``labels`` does
not reflect the hierarchy, usually ``len(clusters) >
np.unique(labels)``.
"""
n_samples = len(reachability)
_validate_size(min_samples, n_samples, 'min_samples')
if min_samples <= 1:
min_samples = max(2, int(min_samples * n_samples))
if min_cluster_size is None:
min_cluster_size = min_samples
_validate_size(min_cluster_size, n_samples, 'min_cluster_size')
if min_cluster_size <= 1:
min_cluster_size = max(2, int(min_cluster_size * n_samples))
clusters = _xi_cluster(reachability[ordering], predecessor[ordering],
ordering, xi,
min_samples, min_cluster_size,
predecessor_correction)
labels = _extract_xi_labels(ordering, clusters)
return labels, clusters
def _extend_region(steep_point, xward_point, start, min_samples):
"""Extend the area until it's maximal.
It's the same function for both upward and downward reagions, depending on
the given input parameters. Assuming:
- steep_{upward/downward}: bool array indicating whether a point is a
steep {upward/downward};
- upward/downward: bool array indicating whether a point is
upward/downward;
To extend an upward reagion, ``steep_point=steep_upward`` and
``xward_point=downward`` are expected, and to extend a downward region,
``steep_point=steep_downward`` and ``xward_point=upward``.
Parameters
----------
steep_point : ndarray of shape (n_samples,), dtype=bool
True if the point is steep downward (upward).
xward_point : ndarray of shape (n_samples,), dtype=bool
True if the point is an upward (respectively downward) point.
start : int
The start of the xward region.
min_samples : int
The same as the min_samples given to OPTICS. Up and down steep
regions can't have more then ``min_samples`` consecutive non-steep
points.
Returns
-------
index : int
The current index iterating over all the samples, i.e. where we are up
to in our search.
end : int
The end of the region, which can be behind the index. The region
includes the ``end`` index.
"""
n_samples = len(steep_point)
non_xward_points = 0
index = start
end = start
# find a maximal area
while index < n_samples:
if steep_point[index]:
non_xward_points = 0
end = index
elif not xward_point[index]:
# it's not a steep point, but still goes up.
non_xward_points += 1
# region should include no more than min_samples consecutive
# non steep xward points.
if non_xward_points > min_samples:
break
else:
return end
index += 1
return end
def _update_filter_sdas(sdas, mib, xi_complement, reachability_plot):
"""Update steep down areas (SDAs) using the new maximum in between (mib)
value, and the given complement of xi, i.e. ``1 - xi``.
"""
if np.isinf(mib):
return []
res = [sda for sda in sdas
if mib <= reachability_plot[sda['start']] * xi_complement]
for sda in res:
sda['mib'] = max(sda['mib'], mib)
return res
def _correct_predecessor(reachability_plot, predecessor_plot, ordering, s, e):
"""Correct for predecessors.
Applies Algorithm 2 of [1]_.
Input parameters are ordered by the computer OPTICS ordering.
.. [1] Schubert, Erich, Michael Gertz.
"Improving the Cluster Structure Extracted from OPTICS Plots." Proc. of
the Conference "Lernen, Wissen, Daten, Analysen" (LWDA) (2018): 318-329.
"""
while s < e:
if reachability_plot[s] > reachability_plot[e]:
return s, e
p_e = ordering[predecessor_plot[e]]
for i in range(s, e):
if p_e == ordering[i]:
return s, e
e -= 1
return None, None
def _xi_cluster(reachability_plot, predecessor_plot, ordering, xi, min_samples,
min_cluster_size, predecessor_correction):
"""Automatically extract clusters according to the Xi-steep method.
This is rouphly an implementation of Figure 19 of the OPTICS paper.
Parameters
----------
reachability_plot : array-like of shape (n_samples,)
The reachability plot, i.e. reachability ordered according to
the calculated ordering, all computed by OPTICS.
predecessor_plot : array-like of shape (n_samples,)
Predecessors ordered according to the calculated ordering.
xi : float, between 0 and 1
Determines the minimum steepness on the reachability plot that
constitutes a cluster boundary. For example, an upwards point in the
reachability plot is defined by the ratio from one point to its
successor being at most 1-xi.
min_samples : int > 1
The same as the min_samples given to OPTICS. Up and down steep regions
can't have more then ``min_samples`` consecutive non-steep points.
min_cluster_size : int > 1
Minimum number of samples in an OPTICS cluster.
predecessor_correction : bool
Correct clusters based on the calculated predecessors.
Returns
-------
clusters : ndarray of shape (n_clusters, 2)
The list of clusters in the form of [start, end] in each row, with all
indices inclusive. The clusters are ordered in a way that larger
clusters encompassing smaller clusters come after those smaller
clusters.
"""
# Our implementation adds an inf to the end of reachability plot
# this helps to find potential clusters at the end of the
# reachability plot even if there's no upward region at the end of it.
reachability_plot = np.hstack((reachability_plot, np.inf))
xi_complement = 1 - xi
sdas = [] # steep down areas, introduced in section 4.3.2 of the paper
clusters = []
index = 0
mib = 0. # maximum in between, section 4.3.2
# Our implementation corrects a mistake in the original
# paper, i.e., in Definition 9 steep downward point,
# r(p) * (1 - x1) <= r(p + 1) should be
# r(p) * (1 - x1) >= r(p + 1)
with np.errstate(invalid='ignore'):
ratio = reachability_plot[:-1] / reachability_plot[1:]
steep_upward = ratio <= xi_complement
steep_downward = ratio >= 1 / xi_complement
downward = ratio > 1
upward = ratio < 1
# the following loop is is almost exactly as Figure 19 of the paper.
# it jumps over the areas which are not either steep down or up areas
for steep_index in iter(np.flatnonzero(steep_upward | steep_downward)):
# just continue if steep_index has been a part of a discovered xward
# area.
if steep_index < index:
continue
mib = max(mib, np.max(reachability_plot[index:steep_index + 1]))
# steep downward areas
if steep_downward[steep_index]:
sdas = _update_filter_sdas(sdas, mib, xi_complement,
reachability_plot)
D_start = steep_index
D_end = _extend_region(steep_downward, upward,
D_start, min_samples)
D = {'start': D_start, 'end': D_end, 'mib': 0.}
sdas.append(D)
index = D_end + 1
mib = reachability_plot[index]
# steep upward areas
else:
sdas = _update_filter_sdas(sdas, mib, xi_complement,
reachability_plot)
U_start = steep_index
U_end = _extend_region(steep_upward, downward, U_start,
min_samples)
index = U_end + 1
mib = reachability_plot[index]
U_clusters = []
for D in sdas:
c_start = D['start']
c_end = U_end
# line (**), sc2*
if reachability_plot[c_end + 1] * xi_complement < D['mib']:
continue
# Definition 11: criterion 4
D_max = reachability_plot[D['start']]
if D_max * xi_complement >= reachability_plot[c_end + 1]:
# Find the first index from the left side which is almost
# at the same level as the end of the detected cluster.
while (reachability_plot[c_start + 1] >
reachability_plot[c_end + 1]
and c_start < D['end']):
c_start += 1
elif reachability_plot[c_end + 1] * xi_complement >= D_max:
# Find the first index from the right side which is almost
# at the same level as the beginning of the detected
# cluster.
# Our implementation corrects a mistake in the original
# paper, i.e., in Definition 11 4c, r(x) < r(sD) should be
# r(x) > r(sD).
while (reachability_plot[c_end - 1] > D_max
and c_end > U_start):
c_end -= 1
# predecessor correction
if predecessor_correction:
c_start, c_end = _correct_predecessor(reachability_plot,
predecessor_plot,
ordering,
c_start,
c_end)
if c_start is None:
continue
# Definition 11: criterion 3.a
if c_end - c_start + 1 < min_cluster_size:
continue
# Definition 11: criterion 1
if c_start > D['end']:
continue
# Definition 11: criterion 2
if c_end < U_start:
continue
U_clusters.append((c_start, c_end))
# add smaller clusters first.
U_clusters.reverse()
clusters.extend(U_clusters)
return np.array(clusters)
def _extract_xi_labels(ordering, clusters):
"""Extracts the labels from the clusters returned by `_xi_cluster`.
We rely on the fact that clusters are stored
with the smaller clusters coming before the larger ones.
Parameters
----------
ordering : array-like of shape (n_samples,)
The ordering of points calculated by OPTICS
clusters : array-like of shape (n_clusters, 2)
List of clusters i.e. (start, end) tuples,
as returned by `_xi_cluster`.
Returns
-------
labels : ndarray of shape (n_samples,)
"""
labels = np.full(len(ordering), -1, dtype=int)
label = 0
for c in clusters:
if not np.any(labels[c[0]:(c[1] + 1)] != -1):
labels[c[0]:(c[1] + 1)] = label
label += 1
labels[ordering] = labels.copy()
return labels
| bsd-3-clause |
Microsoft/PTVS | Python/Product/Miniconda/Miniconda3-x64/Lib/distutils/command/bdist_msi.py | 11 | 35231 | # Copyright (C) 2005, 2006 Martin von Löwis
# Licensed to PSF under a Contributor Agreement.
# The bdist_wininst command proper
# based on bdist_wininst
"""
Implements the bdist_msi command.
"""
import sys, os
from distutils.core import Command
from distutils.dir_util import remove_tree
from distutils.sysconfig import get_python_version
from distutils.version import StrictVersion
from distutils.errors import DistutilsOptionError
from distutils.util import get_platform
from distutils import log
import msilib
from msilib import schema, sequence, text
from msilib import Directory, Feature, Dialog, add_data
class PyDialog(Dialog):
"""Dialog class with a fixed layout: controls at the top, then a ruler,
then a list of buttons: back, next, cancel. Optionally a bitmap at the
left."""
def __init__(self, *args, **kw):
"""Dialog(database, name, x, y, w, h, attributes, title, first,
default, cancel, bitmap=true)"""
Dialog.__init__(self, *args)
ruler = self.h - 36
bmwidth = 152*ruler/328
#if kw.get("bitmap", True):
# self.bitmap("Bitmap", 0, 0, bmwidth, ruler, "PythonWin")
self.line("BottomLine", 0, ruler, self.w, 0)
def title(self, title):
"Set the title text of the dialog at the top."
# name, x, y, w, h, flags=Visible|Enabled|Transparent|NoPrefix,
# text, in VerdanaBold10
self.text("Title", 15, 10, 320, 60, 0x30003,
r"{\VerdanaBold10}%s" % title)
def back(self, title, next, name = "Back", active = 1):
"""Add a back button with a given title, the tab-next button,
its name in the Control table, possibly initially disabled.
Return the button, so that events can be associated"""
if active:
flags = 3 # Visible|Enabled
else:
flags = 1 # Visible
return self.pushbutton(name, 180, self.h-27 , 56, 17, flags, title, next)
def cancel(self, title, next, name = "Cancel", active = 1):
"""Add a cancel button with a given title, the tab-next button,
its name in the Control table, possibly initially disabled.
Return the button, so that events can be associated"""
if active:
flags = 3 # Visible|Enabled
else:
flags = 1 # Visible
return self.pushbutton(name, 304, self.h-27, 56, 17, flags, title, next)
def next(self, title, next, name = "Next", active = 1):
"""Add a Next button with a given title, the tab-next button,
its name in the Control table, possibly initially disabled.
Return the button, so that events can be associated"""
if active:
flags = 3 # Visible|Enabled
else:
flags = 1 # Visible
return self.pushbutton(name, 236, self.h-27, 56, 17, flags, title, next)
def xbutton(self, name, title, next, xpos):
"""Add a button with a given title, the tab-next button,
its name in the Control table, giving its x position; the
y-position is aligned with the other buttons.
Return the button, so that events can be associated"""
return self.pushbutton(name, int(self.w*xpos - 28), self.h-27, 56, 17, 3, title, next)
class bdist_msi(Command):
description = "create a Microsoft Installer (.msi) binary distribution"
user_options = [('bdist-dir=', None,
"temporary directory for creating the distribution"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_platform()),
('keep-temp', 'k',
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
('target-version=', None,
"require a specific python version" +
" on the target system"),
('no-target-compile', 'c',
"do not compile .py to .pyc on the target system"),
('no-target-optimize', 'o',
"do not compile .py to .pyo (optimized)"
"on the target system"),
('dist-dir=', 'd',
"directory to put final built distributions in"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
('install-script=', None,
"basename of installation script to be run after"
"installation or before deinstallation"),
('pre-install-script=', None,
"Fully qualified filename of a script to be run before "
"any files are installed. This script need not be in the "
"distribution"),
]
boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
'skip-build']
all_versions = ['2.0', '2.1', '2.2', '2.3', '2.4',
'2.5', '2.6', '2.7', '2.8', '2.9',
'3.0', '3.1', '3.2', '3.3', '3.4',
'3.5', '3.6', '3.7', '3.8', '3.9']
other_version = 'X'
def initialize_options(self):
self.bdist_dir = None
self.plat_name = None
self.keep_temp = 0
self.no_target_compile = 0
self.no_target_optimize = 0
self.target_version = None
self.dist_dir = None
self.skip_build = None
self.install_script = None
self.pre_install_script = None
self.versions = None
def finalize_options(self):
self.set_undefined_options('bdist', ('skip_build', 'skip_build'))
if self.bdist_dir is None:
bdist_base = self.get_finalized_command('bdist').bdist_base
self.bdist_dir = os.path.join(bdist_base, 'msi')
short_version = get_python_version()
if (not self.target_version) and self.distribution.has_ext_modules():
self.target_version = short_version
if self.target_version:
self.versions = [self.target_version]
if not self.skip_build and self.distribution.has_ext_modules()\
and self.target_version != short_version:
raise DistutilsOptionError(
"target version can only be %s, or the '--skip-build'"
" option must be specified" % (short_version,))
else:
self.versions = list(self.all_versions)
self.set_undefined_options('bdist',
('dist_dir', 'dist_dir'),
('plat_name', 'plat_name'),
)
if self.pre_install_script:
raise DistutilsOptionError(
"the pre-install-script feature is not yet implemented")
if self.install_script:
for script in self.distribution.scripts:
if self.install_script == os.path.basename(script):
break
else:
raise DistutilsOptionError(
"install_script '%s' not found in scripts"
% self.install_script)
self.install_script_key = None
def run(self):
if not self.skip_build:
self.run_command('build')
install = self.reinitialize_command('install', reinit_subcommands=1)
install.prefix = self.bdist_dir
install.skip_build = self.skip_build
install.warn_dir = 0
install_lib = self.reinitialize_command('install_lib')
# we do not want to include pyc or pyo files
install_lib.compile = 0
install_lib.optimize = 0
if self.distribution.has_ext_modules():
# If we are building an installer for a Python version other
# than the one we are currently running, then we need to ensure
# our build_lib reflects the other Python version rather than ours.
# Note that for target_version!=sys.version, we must have skipped the
# build step, so there is no issue with enforcing the build of this
# version.
target_version = self.target_version
if not target_version:
assert self.skip_build, "Should have already checked this"
target_version = '%d.%d' % sys.version_info[:2]
plat_specifier = ".%s-%s" % (self.plat_name, target_version)
build = self.get_finalized_command('build')
build.build_lib = os.path.join(build.build_base,
'lib' + plat_specifier)
log.info("installing to %s", self.bdist_dir)
install.ensure_finalized()
# avoid warning of 'install_lib' about installing
# into a directory not in sys.path
sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
install.run()
del sys.path[0]
self.mkpath(self.dist_dir)
fullname = self.distribution.get_fullname()
installer_name = self.get_installer_filename(fullname)
installer_name = os.path.abspath(installer_name)
if os.path.exists(installer_name): os.unlink(installer_name)
metadata = self.distribution.metadata
author = metadata.author
if not author:
author = metadata.maintainer
if not author:
author = "UNKNOWN"
version = metadata.get_version()
# ProductVersion must be strictly numeric
# XXX need to deal with prerelease versions
sversion = "%d.%d.%d" % StrictVersion(version).version
# Prefix ProductName with Python x.y, so that
# it sorts together with the other Python packages
# in Add-Remove-Programs (APR)
fullname = self.distribution.get_fullname()
if self.target_version:
product_name = "Python %s %s" % (self.target_version, fullname)
else:
product_name = "Python %s" % (fullname)
self.db = msilib.init_database(installer_name, schema,
product_name, msilib.gen_uuid(),
sversion, author)
msilib.add_tables(self.db, sequence)
props = [('DistVersion', version)]
email = metadata.author_email or metadata.maintainer_email
if email:
props.append(("ARPCONTACT", email))
if metadata.url:
props.append(("ARPURLINFOABOUT", metadata.url))
if props:
add_data(self.db, 'Property', props)
self.add_find_python()
self.add_files()
self.add_scripts()
self.add_ui()
self.db.Commit()
if hasattr(self.distribution, 'dist_files'):
tup = 'bdist_msi', self.target_version or 'any', fullname
self.distribution.dist_files.append(tup)
if not self.keep_temp:
remove_tree(self.bdist_dir, dry_run=self.dry_run)
def add_files(self):
db = self.db
cab = msilib.CAB("distfiles")
rootdir = os.path.abspath(self.bdist_dir)
root = Directory(db, cab, None, rootdir, "TARGETDIR", "SourceDir")
f = Feature(db, "Python", "Python", "Everything",
0, 1, directory="TARGETDIR")
items = [(f, root, '')]
for version in self.versions + [self.other_version]:
target = "TARGETDIR" + version
name = default = "Python" + version
desc = "Everything"
if version is self.other_version:
title = "Python from another location"
level = 2
else:
title = "Python %s from registry" % version
level = 1
f = Feature(db, name, title, desc, 1, level, directory=target)
dir = Directory(db, cab, root, rootdir, target, default)
items.append((f, dir, version))
db.Commit()
seen = {}
for feature, dir, version in items:
todo = [dir]
while todo:
dir = todo.pop()
for file in os.listdir(dir.absolute):
afile = os.path.join(dir.absolute, file)
if os.path.isdir(afile):
short = "%s|%s" % (dir.make_short(file), file)
default = file + version
newdir = Directory(db, cab, dir, file, default, short)
todo.append(newdir)
else:
if not dir.component:
dir.start_component(dir.logical, feature, 0)
if afile not in seen:
key = seen[afile] = dir.add_file(file)
if file==self.install_script:
if self.install_script_key:
raise DistutilsOptionError(
"Multiple files with name %s" % file)
self.install_script_key = '[#%s]' % key
else:
key = seen[afile]
add_data(self.db, "DuplicateFile",
[(key + version, dir.component, key, None, dir.logical)])
db.Commit()
cab.commit(db)
def add_find_python(self):
"""Adds code to the installer to compute the location of Python.
Properties PYTHON.MACHINE.X.Y and PYTHON.USER.X.Y will be set from the
registry for each version of Python.
Properties TARGETDIRX.Y will be set from PYTHON.USER.X.Y if defined,
else from PYTHON.MACHINE.X.Y.
Properties PYTHONX.Y will be set to TARGETDIRX.Y\\python.exe"""
start = 402
for ver in self.versions:
install_path = r"SOFTWARE\Python\PythonCore\%s\InstallPath" % ver
machine_reg = "python.machine." + ver
user_reg = "python.user." + ver
machine_prop = "PYTHON.MACHINE." + ver
user_prop = "PYTHON.USER." + ver
machine_action = "PythonFromMachine" + ver
user_action = "PythonFromUser" + ver
exe_action = "PythonExe" + ver
target_dir_prop = "TARGETDIR" + ver
exe_prop = "PYTHON" + ver
if msilib.Win64:
# type: msidbLocatorTypeRawValue + msidbLocatorType64bit
Type = 2+16
else:
Type = 2
add_data(self.db, "RegLocator",
[(machine_reg, 2, install_path, None, Type),
(user_reg, 1, install_path, None, Type)])
add_data(self.db, "AppSearch",
[(machine_prop, machine_reg),
(user_prop, user_reg)])
add_data(self.db, "CustomAction",
[(machine_action, 51+256, target_dir_prop, "[" + machine_prop + "]"),
(user_action, 51+256, target_dir_prop, "[" + user_prop + "]"),
(exe_action, 51+256, exe_prop, "[" + target_dir_prop + "]\\python.exe"),
])
add_data(self.db, "InstallExecuteSequence",
[(machine_action, machine_prop, start),
(user_action, user_prop, start + 1),
(exe_action, None, start + 2),
])
add_data(self.db, "InstallUISequence",
[(machine_action, machine_prop, start),
(user_action, user_prop, start + 1),
(exe_action, None, start + 2),
])
add_data(self.db, "Condition",
[("Python" + ver, 0, "NOT TARGETDIR" + ver)])
start += 4
assert start < 500
def add_scripts(self):
if self.install_script:
start = 6800
for ver in self.versions + [self.other_version]:
install_action = "install_script." + ver
exe_prop = "PYTHON" + ver
add_data(self.db, "CustomAction",
[(install_action, 50, exe_prop, self.install_script_key)])
add_data(self.db, "InstallExecuteSequence",
[(install_action, "&Python%s=3" % ver, start)])
start += 1
# XXX pre-install scripts are currently refused in finalize_options()
# but if this feature is completed, it will also need to add
# entries for each version as the above code does
if self.pre_install_script:
scriptfn = os.path.join(self.bdist_dir, "preinstall.bat")
f = open(scriptfn, "w")
# The batch file will be executed with [PYTHON], so that %1
# is the path to the Python interpreter; %0 will be the path
# of the batch file.
# rem ="""
# %1 %0
# exit
# """
# <actual script>
f.write('rem ="""\n%1 %0\nexit\n"""\n')
f.write(open(self.pre_install_script).read())
f.close()
add_data(self.db, "Binary",
[("PreInstall", msilib.Binary(scriptfn))
])
add_data(self.db, "CustomAction",
[("PreInstall", 2, "PreInstall", None)
])
add_data(self.db, "InstallExecuteSequence",
[("PreInstall", "NOT Installed", 450)])
def add_ui(self):
db = self.db
x = y = 50
w = 370
h = 300
title = "[ProductName] Setup"
# see "Dialog Style Bits"
modal = 3 # visible | modal
modeless = 1 # visible
track_disk_space = 32
# UI customization properties
add_data(db, "Property",
# See "DefaultUIFont Property"
[("DefaultUIFont", "DlgFont8"),
# See "ErrorDialog Style Bit"
("ErrorDialog", "ErrorDlg"),
("Progress1", "Install"), # modified in maintenance type dlg
("Progress2", "installs"),
("MaintenanceForm_Action", "Repair"),
# possible values: ALL, JUSTME
("WhichUsers", "ALL")
])
# Fonts, see "TextStyle Table"
add_data(db, "TextStyle",
[("DlgFont8", "Tahoma", 9, None, 0),
("DlgFontBold8", "Tahoma", 8, None, 1), #bold
("VerdanaBold10", "Verdana", 10, None, 1),
("VerdanaRed9", "Verdana", 9, 255, 0),
])
# UI Sequences, see "InstallUISequence Table", "Using a Sequence Table"
# Numbers indicate sequence; see sequence.py for how these action integrate
add_data(db, "InstallUISequence",
[("PrepareDlg", "Not Privileged or Windows9x or Installed", 140),
("WhichUsersDlg", "Privileged and not Windows9x and not Installed", 141),
# In the user interface, assume all-users installation if privileged.
("SelectFeaturesDlg", "Not Installed", 1230),
# XXX no support for resume installations yet
#("ResumeDlg", "Installed AND (RESUME OR Preselected)", 1240),
("MaintenanceTypeDlg", "Installed AND NOT RESUME AND NOT Preselected", 1250),
("ProgressDlg", None, 1280)])
add_data(db, 'ActionText', text.ActionText)
add_data(db, 'UIText', text.UIText)
#####################################################################
# Standard dialogs: FatalError, UserExit, ExitDialog
fatal=PyDialog(db, "FatalError", x, y, w, h, modal, title,
"Finish", "Finish", "Finish")
fatal.title("[ProductName] Installer ended prematurely")
fatal.back("< Back", "Finish", active = 0)
fatal.cancel("Cancel", "Back", active = 0)
fatal.text("Description1", 15, 70, 320, 80, 0x30003,
"[ProductName] setup ended prematurely because of an error. Your system has not been modified. To install this program at a later time, please run the installation again.")
fatal.text("Description2", 15, 155, 320, 20, 0x30003,
"Click the Finish button to exit the Installer.")
c=fatal.next("Finish", "Cancel", name="Finish")
c.event("EndDialog", "Exit")
user_exit=PyDialog(db, "UserExit", x, y, w, h, modal, title,
"Finish", "Finish", "Finish")
user_exit.title("[ProductName] Installer was interrupted")
user_exit.back("< Back", "Finish", active = 0)
user_exit.cancel("Cancel", "Back", active = 0)
user_exit.text("Description1", 15, 70, 320, 80, 0x30003,
"[ProductName] setup was interrupted. Your system has not been modified. "
"To install this program at a later time, please run the installation again.")
user_exit.text("Description2", 15, 155, 320, 20, 0x30003,
"Click the Finish button to exit the Installer.")
c = user_exit.next("Finish", "Cancel", name="Finish")
c.event("EndDialog", "Exit")
exit_dialog = PyDialog(db, "ExitDialog", x, y, w, h, modal, title,
"Finish", "Finish", "Finish")
exit_dialog.title("Completing the [ProductName] Installer")
exit_dialog.back("< Back", "Finish", active = 0)
exit_dialog.cancel("Cancel", "Back", active = 0)
exit_dialog.text("Description", 15, 235, 320, 20, 0x30003,
"Click the Finish button to exit the Installer.")
c = exit_dialog.next("Finish", "Cancel", name="Finish")
c.event("EndDialog", "Return")
#####################################################################
# Required dialog: FilesInUse, ErrorDlg
inuse = PyDialog(db, "FilesInUse",
x, y, w, h,
19, # KeepModeless|Modal|Visible
title,
"Retry", "Retry", "Retry", bitmap=False)
inuse.text("Title", 15, 6, 200, 15, 0x30003,
r"{\DlgFontBold8}Files in Use")
inuse.text("Description", 20, 23, 280, 20, 0x30003,
"Some files that need to be updated are currently in use.")
inuse.text("Text", 20, 55, 330, 50, 3,
"The following applications are using files that need to be updated by this setup. Close these applications and then click Retry to continue the installation or Cancel to exit it.")
inuse.control("List", "ListBox", 20, 107, 330, 130, 7, "FileInUseProcess",
None, None, None)
c=inuse.back("Exit", "Ignore", name="Exit")
c.event("EndDialog", "Exit")
c=inuse.next("Ignore", "Retry", name="Ignore")
c.event("EndDialog", "Ignore")
c=inuse.cancel("Retry", "Exit", name="Retry")
c.event("EndDialog","Retry")
# See "Error Dialog". See "ICE20" for the required names of the controls.
error = Dialog(db, "ErrorDlg",
50, 10, 330, 101,
65543, # Error|Minimize|Modal|Visible
title,
"ErrorText", None, None)
error.text("ErrorText", 50,9,280,48,3, "")
#error.control("ErrorIcon", "Icon", 15, 9, 24, 24, 5242881, None, "py.ico", None, None)
error.pushbutton("N",120,72,81,21,3,"No",None).event("EndDialog","ErrorNo")
error.pushbutton("Y",240,72,81,21,3,"Yes",None).event("EndDialog","ErrorYes")
error.pushbutton("A",0,72,81,21,3,"Abort",None).event("EndDialog","ErrorAbort")
error.pushbutton("C",42,72,81,21,3,"Cancel",None).event("EndDialog","ErrorCancel")
error.pushbutton("I",81,72,81,21,3,"Ignore",None).event("EndDialog","ErrorIgnore")
error.pushbutton("O",159,72,81,21,3,"Ok",None).event("EndDialog","ErrorOk")
error.pushbutton("R",198,72,81,21,3,"Retry",None).event("EndDialog","ErrorRetry")
#####################################################################
# Global "Query Cancel" dialog
cancel = Dialog(db, "CancelDlg", 50, 10, 260, 85, 3, title,
"No", "No", "No")
cancel.text("Text", 48, 15, 194, 30, 3,
"Are you sure you want to cancel [ProductName] installation?")
#cancel.control("Icon", "Icon", 15, 15, 24, 24, 5242881, None,
# "py.ico", None, None)
c=cancel.pushbutton("Yes", 72, 57, 56, 17, 3, "Yes", "No")
c.event("EndDialog", "Exit")
c=cancel.pushbutton("No", 132, 57, 56, 17, 3, "No", "Yes")
c.event("EndDialog", "Return")
#####################################################################
# Global "Wait for costing" dialog
costing = Dialog(db, "WaitForCostingDlg", 50, 10, 260, 85, modal, title,
"Return", "Return", "Return")
costing.text("Text", 48, 15, 194, 30, 3,
"Please wait while the installer finishes determining your disk space requirements.")
c = costing.pushbutton("Return", 102, 57, 56, 17, 3, "Return", None)
c.event("EndDialog", "Exit")
#####################################################################
# Preparation dialog: no user input except cancellation
prep = PyDialog(db, "PrepareDlg", x, y, w, h, modeless, title,
"Cancel", "Cancel", "Cancel")
prep.text("Description", 15, 70, 320, 40, 0x30003,
"Please wait while the Installer prepares to guide you through the installation.")
prep.title("Welcome to the [ProductName] Installer")
c=prep.text("ActionText", 15, 110, 320, 20, 0x30003, "Pondering...")
c.mapping("ActionText", "Text")
c=prep.text("ActionData", 15, 135, 320, 30, 0x30003, None)
c.mapping("ActionData", "Text")
prep.back("Back", None, active=0)
prep.next("Next", None, active=0)
c=prep.cancel("Cancel", None)
c.event("SpawnDialog", "CancelDlg")
#####################################################################
# Feature (Python directory) selection
seldlg = PyDialog(db, "SelectFeaturesDlg", x, y, w, h, modal, title,
"Next", "Next", "Cancel")
seldlg.title("Select Python Installations")
seldlg.text("Hint", 15, 30, 300, 20, 3,
"Select the Python locations where %s should be installed."
% self.distribution.get_fullname())
seldlg.back("< Back", None, active=0)
c = seldlg.next("Next >", "Cancel")
order = 1
c.event("[TARGETDIR]", "[SourceDir]", ordering=order)
for version in self.versions + [self.other_version]:
order += 1
c.event("[TARGETDIR]", "[TARGETDIR%s]" % version,
"FEATURE_SELECTED AND &Python%s=3" % version,
ordering=order)
c.event("SpawnWaitDialog", "WaitForCostingDlg", ordering=order + 1)
c.event("EndDialog", "Return", ordering=order + 2)
c = seldlg.cancel("Cancel", "Features")
c.event("SpawnDialog", "CancelDlg")
c = seldlg.control("Features", "SelectionTree", 15, 60, 300, 120, 3,
"FEATURE", None, "PathEdit", None)
c.event("[FEATURE_SELECTED]", "1")
ver = self.other_version
install_other_cond = "FEATURE_SELECTED AND &Python%s=3" % ver
dont_install_other_cond = "FEATURE_SELECTED AND &Python%s<>3" % ver
c = seldlg.text("Other", 15, 200, 300, 15, 3,
"Provide an alternate Python location")
c.condition("Enable", install_other_cond)
c.condition("Show", install_other_cond)
c.condition("Disable", dont_install_other_cond)
c.condition("Hide", dont_install_other_cond)
c = seldlg.control("PathEdit", "PathEdit", 15, 215, 300, 16, 1,
"TARGETDIR" + ver, None, "Next", None)
c.condition("Enable", install_other_cond)
c.condition("Show", install_other_cond)
c.condition("Disable", dont_install_other_cond)
c.condition("Hide", dont_install_other_cond)
#####################################################################
# Disk cost
cost = PyDialog(db, "DiskCostDlg", x, y, w, h, modal, title,
"OK", "OK", "OK", bitmap=False)
cost.text("Title", 15, 6, 200, 15, 0x30003,
r"{\DlgFontBold8}Disk Space Requirements")
cost.text("Description", 20, 20, 280, 20, 0x30003,
"The disk space required for the installation of the selected features.")
cost.text("Text", 20, 53, 330, 60, 3,
"The highlighted volumes (if any) do not have enough disk space "
"available for the currently selected features. You can either "
"remove some files from the highlighted volumes, or choose to "
"install less features onto local drive(s), or select different "
"destination drive(s).")
cost.control("VolumeList", "VolumeCostList", 20, 100, 330, 150, 393223,
None, "{120}{70}{70}{70}{70}", None, None)
cost.xbutton("OK", "Ok", None, 0.5).event("EndDialog", "Return")
#####################################################################
# WhichUsers Dialog. Only available on NT, and for privileged users.
# This must be run before FindRelatedProducts, because that will
# take into account whether the previous installation was per-user
# or per-machine. We currently don't support going back to this
# dialog after "Next" was selected; to support this, we would need to
# find how to reset the ALLUSERS property, and how to re-run
# FindRelatedProducts.
# On Windows9x, the ALLUSERS property is ignored on the command line
# and in the Property table, but installer fails according to the documentation
# if a dialog attempts to set ALLUSERS.
whichusers = PyDialog(db, "WhichUsersDlg", x, y, w, h, modal, title,
"AdminInstall", "Next", "Cancel")
whichusers.title("Select whether to install [ProductName] for all users of this computer.")
# A radio group with two options: allusers, justme
g = whichusers.radiogroup("AdminInstall", 15, 60, 260, 50, 3,
"WhichUsers", "", "Next")
g.add("ALL", 0, 5, 150, 20, "Install for all users")
g.add("JUSTME", 0, 25, 150, 20, "Install just for me")
whichusers.back("Back", None, active=0)
c = whichusers.next("Next >", "Cancel")
c.event("[ALLUSERS]", "1", 'WhichUsers="ALL"', 1)
c.event("EndDialog", "Return", ordering = 2)
c = whichusers.cancel("Cancel", "AdminInstall")
c.event("SpawnDialog", "CancelDlg")
#####################################################################
# Installation Progress dialog (modeless)
progress = PyDialog(db, "ProgressDlg", x, y, w, h, modeless, title,
"Cancel", "Cancel", "Cancel", bitmap=False)
progress.text("Title", 20, 15, 200, 15, 0x30003,
r"{\DlgFontBold8}[Progress1] [ProductName]")
progress.text("Text", 35, 65, 300, 30, 3,
"Please wait while the Installer [Progress2] [ProductName]. "
"This may take several minutes.")
progress.text("StatusLabel", 35, 100, 35, 20, 3, "Status:")
c=progress.text("ActionText", 70, 100, w-70, 20, 3, "Pondering...")
c.mapping("ActionText", "Text")
#c=progress.text("ActionData", 35, 140, 300, 20, 3, None)
#c.mapping("ActionData", "Text")
c=progress.control("ProgressBar", "ProgressBar", 35, 120, 300, 10, 65537,
None, "Progress done", None, None)
c.mapping("SetProgress", "Progress")
progress.back("< Back", "Next", active=False)
progress.next("Next >", "Cancel", active=False)
progress.cancel("Cancel", "Back").event("SpawnDialog", "CancelDlg")
###################################################################
# Maintenance type: repair/uninstall
maint = PyDialog(db, "MaintenanceTypeDlg", x, y, w, h, modal, title,
"Next", "Next", "Cancel")
maint.title("Welcome to the [ProductName] Setup Wizard")
maint.text("BodyText", 15, 63, 330, 42, 3,
"Select whether you want to repair or remove [ProductName].")
g=maint.radiogroup("RepairRadioGroup", 15, 108, 330, 60, 3,
"MaintenanceForm_Action", "", "Next")
#g.add("Change", 0, 0, 200, 17, "&Change [ProductName]")
g.add("Repair", 0, 18, 200, 17, "&Repair [ProductName]")
g.add("Remove", 0, 36, 200, 17, "Re&move [ProductName]")
maint.back("< Back", None, active=False)
c=maint.next("Finish", "Cancel")
# Change installation: Change progress dialog to "Change", then ask
# for feature selection
#c.event("[Progress1]", "Change", 'MaintenanceForm_Action="Change"', 1)
#c.event("[Progress2]", "changes", 'MaintenanceForm_Action="Change"', 2)
# Reinstall: Change progress dialog to "Repair", then invoke reinstall
# Also set list of reinstalled features to "ALL"
c.event("[REINSTALL]", "ALL", 'MaintenanceForm_Action="Repair"', 5)
c.event("[Progress1]", "Repairing", 'MaintenanceForm_Action="Repair"', 6)
c.event("[Progress2]", "repairs", 'MaintenanceForm_Action="Repair"', 7)
c.event("Reinstall", "ALL", 'MaintenanceForm_Action="Repair"', 8)
# Uninstall: Change progress to "Remove", then invoke uninstall
# Also set list of removed features to "ALL"
c.event("[REMOVE]", "ALL", 'MaintenanceForm_Action="Remove"', 11)
c.event("[Progress1]", "Removing", 'MaintenanceForm_Action="Remove"', 12)
c.event("[Progress2]", "removes", 'MaintenanceForm_Action="Remove"', 13)
c.event("Remove", "ALL", 'MaintenanceForm_Action="Remove"', 14)
# Close dialog when maintenance action scheduled
c.event("EndDialog", "Return", 'MaintenanceForm_Action<>"Change"', 20)
#c.event("NewDialog", "SelectFeaturesDlg", 'MaintenanceForm_Action="Change"', 21)
maint.cancel("Cancel", "RepairRadioGroup").event("SpawnDialog", "CancelDlg")
def get_installer_filename(self, fullname):
# Factored out to allow overriding in subclasses
if self.target_version:
base_name = "%s.%s-py%s.msi" % (fullname, self.plat_name,
self.target_version)
else:
base_name = "%s.%s.msi" % (fullname, self.plat_name)
installer_name = os.path.join(self.dist_dir, base_name)
return installer_name
| apache-2.0 |
howthebodyworks/pelican-plugins | optimize_images/optimize_images.py | 8 | 1807 | # -*- coding: utf-8 -*-
"""
Optimized images (jpg and png)
Assumes that jpegtran and optipng are isntalled on path.
http://jpegclub.org/jpegtran/
http://optipng.sourceforge.net/
Copyright (c) 2012 Irfan Ahmad (http://i.com.pk)
"""
import logging
import os
from subprocess import call
from pelican import signals
logger = logging.getLogger(__name__)
# Display command output on DEBUG and TRACE
SHOW_OUTPUT = logger.getEffectiveLevel() <= logging.DEBUG
# A list of file types with their respective commands
COMMANDS = {
# '.ext': ('command {flags} {filename', 'silent_flag', 'verbose_flag')
'.svg': ('svgo {flags} --input="{filename}" --output="{filename}"', '--quiet', ''),
'.jpg': ('jpegtran {flags} -copy none -optimize -outfile "{filename}" "{filename}"', '', '-v'),
'.png': ('optipng {flags} "{filename}"', '--quiet', ''),
}
def optimize_images(pelican):
"""
Optimized jpg and png images
:param pelican: The Pelican instance
"""
for dirpath, _, filenames in os.walk(pelican.settings['OUTPUT_PATH']):
for name in filenames:
if os.path.splitext(name)[1] in COMMANDS.keys():
optimize(dirpath, name)
def optimize(dirpath, filename):
"""
Check if the name is a type of file that should be optimized.
And optimizes it if required.
:param dirpath: Path of the file to be optimzed
:param name: A file name to be optimized
"""
filepath = os.path.join(dirpath, filename)
logger.info('optimizing %s', filepath)
ext = os.path.splitext(filename)[1]
command, silent, verbose = COMMANDS[ext]
flags = verbose if SHOW_OUTPUT else silent
command = command.format(filename=filepath, flags=flags)
call(command, shell=True)
def register():
signals.finalized.connect(optimize_images)
| agpl-3.0 |
nicoboss/Floatmotion | pygame/surfarray.py | 1 | 14109 | ## pygame - Python Game Library
## Copyright (C) 2007 Marcus von Appen
##
## This library is free software; you can redistribute it and/or
## modify it under the terms of the GNU Library General Public
## License as published by the Free Software Foundation; either
## version 2 of the License, or (at your option) any later version.
##
## This library is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Library General Public License for more details.
##
## You should have received a copy of the GNU Library General Public
## License along with this library; if not, write to the Free
## Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##
## Marcus von Appen
## mva@sysfault.org
"""pygame module for accessing surface pixel data using array interfaces
Functions to convert pixel data between pygame Surfaces and arrays. This
module will only be functional when pygame can use the external Numpy or
Numeric packages.
Every pixel is stored as a single integer value to represent the red,
green, and blue colors. The 8bit images use a value that looks into a
colormap. Pixels with higher depth use a bit packing process to place
three or four values into a single number.
The arrays are indexed by the X axis first, followed by the Y
axis. Arrays that treat the pixels as a single integer are referred to
as 2D arrays. This module can also separate the red, green, and blue
color values into separate indices. These types of arrays are referred
to as 3D arrays, and the last index is 0 for red, 1 for green, and 2 for
blue.
Supported array types are
numpy
numeric (deprecated; will be removed in Pygame 1.9.3.)
The default will be numpy, if installed. Otherwise, Numeric will be set
as default if installed, and a deprecation warning will be issued. If
neither numpy nor Numeric are installed, the module will raise an
ImportError.
The array type to use can be changed at runtime using the use_arraytype()
method, which requires one of the above types as string.
Note: numpy and Numeric are not completely compatible. Certain array
manipulations, which work for one type, might behave differently or even
completely break for the other.
Additionally, in contrast to Numeric, numpy does use unsigned 16-bit
integers. Images with 16-bit data will be treated as unsigned
integers. Numeric instead uses signed integers for the representation,
which is important to keep in mind, if you use the module's functions
and wonder about the values.
"""
import pygame
import imp
import warnings
# Global array type setting. See use_arraytype().
__arraytype = None
# Try to import the necessary modules.
try:
import pygame._numpysurfarray as numpysf
__hasnumpy = True
__arraytype = "numpy"
except ImportError:
__hasnumpy = False
try:
if not __hasnumpy:
import pygame._numericsurfarray as numericsf
__arraytype = "numeric"
warnings.warn(warnings.DeprecationWarning(
"Numeric support to be removed in Pygame 1.9.3"))
else:
f, p, d = imp.find_module('Numeric')
f.close()
__hasnumeric = True
except ImportError:
__hasnumeric = False
if not __hasnumpy and not __hasnumeric:
raise ImportError("no module named numpy or Numeric found")
from pygame.pixelcopy import array_to_surface
def blit_array (surface, array):
"""pygame.surfarray.blit_array(Surface, array): return None
Blit directly from a array values.
Directly copy values from an array into a Surface. This is faster than
converting the array into a Surface and blitting. The array must be the
same dimensions as the Surface and will completely replace all pixel
values. Only integer, ascii character and record arrays are accepted.
This function will temporarily lock the Surface as the new values are
copied.
"""
return array_to_surface(surface, array)
def array2d (surface):
"""pygame.surfarray.array2d (Surface): return array
Copy pixels into a 2d array.
Copy the pixels from a Surface into a 2D array. The bit depth of the
surface will control the size of the integer values, and will work
for any type of pixel format.
This function will temporarily lock the Surface as pixels are copied
(see the Surface.lock - lock the Surface memory for pixel access
method).
"""
if __arraytype == "numeric":
return numericsf.array2d (surface)
elif __arraytype == "numpy":
return numpysf.array2d (surface)
raise NotImplementedError("surface arrays are not supported")
def pixels2d (surface):
"""pygame.surfarray.pixels2d (Surface): return array
Reference pixels into a 2d array.
Create a new 2D array that directly references the pixel values in a
Surface. Any changes to the array will affect the pixels in the
Surface. This is a fast operation since no data is copied.
Pixels from a 24-bit Surface cannot be referenced, but all other
Surface bit depths can.
The Surface this references will remain locked for the lifetime of
the array (see the Surface.lock - lock the Surface memory for pixel
access method).
"""
if __arraytype == "numeric":
return numericsf.pixels2d (surface)
elif __arraytype == "numpy":
return numpysf.pixels2d (surface)
raise NotImplementedError("surface arrays are not supported")
def array3d (surface):
"""pygame.surfarray.array3d (Surface): return array
Copy pixels into a 3d array.
Copy the pixels from a Surface into a 3D array. The bit depth of the
surface will control the size of the integer values, and will work
for any type of pixel format.
This function will temporarily lock the Surface as pixels are copied
(see the Surface.lock - lock the Surface memory for pixel access
method).
"""
if __arraytype == "numeric":
return numericsf.array3d (surface)
elif __arraytype == "numpy":
return numpysf.array3d (surface)
raise NotImplementedError("surface arrays are not supported")
def pixels3d (surface):
"""pygame.surfarray.pixels3d (Surface): return array
Reference pixels into a 3d array.
Create a new 3D array that directly references the pixel values in a
Surface. Any changes to the array will affect the pixels in the
Surface. This is a fast operation since no data is copied.
This will only work on Surfaces that have 24-bit or 32-bit
formats. Lower pixel formats cannot be referenced.
The Surface this references will remain locked for the lifetime of
the array (see the Surface.lock - lock the Surface memory for pixel
access method).
"""
if __arraytype == "numeric":
return numericsf.pixels3d (surface)
elif __arraytype == "numpy":
return numpysf.pixels3d (surface)
raise NotImplementedError("surface arrays are not supported")
def array_alpha (surface):
"""pygame.surfarray.array_alpha (Surface): return array
Copy pixel alphas into a 2d array.
Copy the pixel alpha values (degree of transparency) from a Surface
into a 2D array. This will work for any type of Surface
format. Surfaces without a pixel alpha will return an array with all
opaque values.
This function will temporarily lock the Surface as pixels are copied
(see the Surface.lock - lock the Surface memory for pixel access
method).
"""
if __arraytype == "numeric":
return numericsf.array_alpha (surface)
elif __arraytype == "numpy":
return numpysf.array_alpha (surface)
raise NotImplementedError("surface arrays are not supported")
def pixels_alpha (surface):
"""pygame.surfarray.pixels_alpha (Surface): return array
Reference pixel alphas into a 2d array.
Create a new 2D array that directly references the alpha values
(degree of transparency) in a Surface. Any changes to the array will
affect the pixels in the Surface. This is a fast operation since no
data is copied.
This can only work on 32-bit Surfaces with a per-pixel alpha value.
The Surface this array references will remain locked for the
lifetime of the array.
"""
if __arraytype == "numeric":
return numericsf.pixels_alpha (surface)
elif __arraytype == "numpy":
return numpysf.pixels_alpha (surface)
raise NotImplementedError("surface arrays are not supported")
def pixels_red (surface):
"""pygame.surfarray.pixels_red (Surface): return array
Reference pixel red into a 2d array.
Create a new 2D array that directly references the red values
in a Surface. Any changes to the array will affect the pixels
in the Surface. This is a fast operation since no data is copied.
This can only work on 24-bit or 32-bit Surfaces.
The Surface this array references will remain locked for the
lifetime of the array.
"""
if __arraytype == "numpy":
return numpysf.pixels_red (surface)
raise NotImplementedError("surface arrays are not supported")
def pixels_green (surface):
"""pygame.surfarray.pixels_green (Surface): return array
Reference pixel green into a 2d array.
Create a new 2D array that directly references the green values
in a Surface. Any changes to the array will affect the pixels
in the Surface. This is a fast operation since no data is copied.
This can only work on 24-bit or 32-bit Surfaces.
The Surface this array references will remain locked for the
lifetime of the array.
"""
if __arraytype == "numpy":
return numpysf.pixels_green (surface)
raise NotImplementedError("surface arrays are not supported")
def pixels_blue (surface):
"""pygame.surfarray.pixels_blue (Surface): return array
Reference pixel blue into a 2d array.
Create a new 2D array that directly references the blue values
in a Surface. Any changes to the array will affect the pixels
in the Surface. This is a fast operation since no data is copied.
This can only work on 24-bit or 32-bit Surfaces.
The Surface this array references will remain locked for the
lifetime of the array.
"""
if __arraytype == "numpy":
return numpysf.pixels_blue (surface)
raise NotImplementedError("surface arrays are not supported")
def array_colorkey (surface):
"""pygame.surfarray.array_colorkey (Surface): return array
Copy the colorkey values into a 2d array.
Create a new array with the colorkey transparency value from each
pixel. If the pixel matches the colorkey it will be fully
tranparent; otherwise it will be fully opaque.
This will work on any type of Surface format. If the image has no
colorkey a solid opaque array will be returned.
This function will temporarily lock the Surface as pixels are
copied.
"""
if __arraytype == "numeric":
return numericsf.array_colorkey (surface)
elif __arraytype == "numpy":
return numpysf.array_colorkey (surface)
raise NotImplementedError("surface arrays are not supported")
def make_surface (array):
"""pygame.surfarray.make_surface (array): return Surface
Copy an array to a new surface.
Create a new Surface that best resembles the data and format on the
array. The array can be 2D or 3D with any sized integer values.
"""
if __arraytype == "numeric":
return numericsf.make_surface (array)
elif __arraytype == "numpy":
return numpysf.make_surface (array)
raise NotImplementedError("surface arrays are not supported")
def map_array (surface, array):
"""pygame.surfarray.map_array (Surface, array3d): return array2d
Map a 3D array into a 2D array.
Convert a 3D array into a 2D array. This will use the given Surface
format to control the conversion. Palette surface formats are not
supported.
"""
if __arraytype == "numeric":
return numericsf.map_array (surface, array)
elif __arraytype == "numpy":
return numpysf.map_array (surface, array)
raise NotImplementedError("surface arrays are not supported")
def use_arraytype (arraytype):
"""pygame.surfarray.use_arraytype (arraytype): return None
Sets the array system to be used for surface arrays.
Uses the requested array type for the module functions.
Currently supported array types are:
numeric (deprecated; to be removed in Pygame 1.9.3)
numpy
If the requested type is not available, a ValueError will be raised.
"""
global __arraytype
global numericsf
arraytype = arraytype.lower ()
if arraytype == "numeric":
if __hasnumeric:
import pygame._numericsurfarray as numericsf
__arraytype = arraytype
else:
raise ValueError("Numeric arrays are not available")
elif arraytype == "numpy":
if __hasnumpy:
__arraytype = arraytype
else:
raise ValueError("numpy arrays are not available")
else:
raise ValueError("invalid array type")
def get_arraytype ():
"""pygame.surfarray.get_arraytype (): return str
Gets the currently active array type.
Returns the currently active array type. This will be a value of the
get_arraytypes() tuple and indicates which type of array module is
used for the array creation.
"""
return __arraytype
def get_arraytypes ():
"""pygame.surfarray.get_arraytypes (): return tuple
Gets the array system types currently supported.
Checks, which array system types are available and returns them as a
tuple of strings. The values of the tuple can be used directly in
the use_arraytype () method.
If no supported array system could be found, None will be returned.
"""
vals = []
if __hasnumeric:
vals.append ("numeric")
if __hasnumpy:
vals.append ("numpy")
if len (vals) == 0:
return None
return tuple (vals)
| agpl-3.0 |
Star2Billing/newfies-dialer | newfies/dialer_gateway/models.py | 4 | 5851 | #
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2015 Star2Billing S.L.
#
# The primary maintainer of this project is
# Arezqui Belaid <info@star2billing.com>
#
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django_lets_go.intermediate_model_base_class import Model
from dialer_gateway.constants import GATEWAY_STATUS
class Gateway(Model):
"""This defines the trunk to deliver the Voip Calls.
Each of the Gateways are routes that support different protocols and
sets of rules to alter the dialed number.
**Attributes**:
* ``name`` - Gateway name.
* ``description`` - Description about the Gateway.
* ``addprefix`` - Add prefix.
* ``removeprefix`` - Remove prefix.
* ``gateways`` - "user/,user/", # Gateway string to try dialing \
separated by comma. First in the list will be tried first
* ``gateway_codecs`` - "'PCMA,PCMU','PCMA,PCMU'", \
# Codec string as needed by FS for each gateway separated by comma
* ``gateway_timeouts`` - "10,10", \
# Seconds to timeout in string for each gateway separated by comma
* ``gateway_retries`` - "2,1", \
# Retry String for Gateways separated by comma, \
on how many times each gateway should be retried
* ``originate_dial_string`` - originate_dial_string
* ``secondused`` -
* ``failover`` -
* ``addparameter`` -
* ``count_call`` -
* ``count_in_use`` -
* ``maximum_call`` -
* ``status`` - Gateway status
**Name of DB table**: dialer_gateway
"""
name = models.CharField(unique=True, max_length=255,
verbose_name=_('name'), help_text=_("gateway name"))
status = models.IntegerField(choices=list(GATEWAY_STATUS),
default=GATEWAY_STATUS.ACTIVE,
verbose_name=_("gateway status"), blank=True, null=True)
description = models.TextField(verbose_name=_('description'), blank=True,
help_text=_("gateway provider notes"))
addprefix = models.CharField(verbose_name=_('add prefix'),
max_length=60, blank=True)
removeprefix = models.CharField(verbose_name=_('remove prefix'),
max_length=60, blank=True)
gateways = models.CharField(max_length=500, verbose_name=_("gateways"),
help_text=_('Gateway string to dial, ie "sofia/gateway/myprovider/"'))
gateway_codecs = models.CharField(max_length=500, blank=True,
verbose_name=_("gateway codecs"),
help_text=_('codec string as needed by FS, ie "PCMA,PCMU"'))
gateway_timeouts = models.CharField(max_length=500, blank=True,
verbose_name=_("gateway timeouts"),
help_text=_('timeout in seconds, ie "10"'))
gateway_retries = models.CharField(max_length=500, blank=True, verbose_name=_("gateway retries"),
help_text=_('"2,1", # retry String for Gateways separated by comma, on how many times each gateway should be retried'))
originate_dial_string = models.CharField(max_length=500, blank=True, verbose_name=_("originate dial string"),
help_text=_('add channels variables : http://wiki.freeswitch.org/wiki/Channel_Variables, ie: bridge_early_media=true,hangup_after_bridge=true'))
secondused = models.IntegerField(null=True, blank=True,
verbose_name=_("second used"))
created_date = models.DateTimeField(auto_now_add=True,
verbose_name=_('date'))
updated_date = models.DateTimeField(auto_now=True)
failover = models.ForeignKey('self', null=True, blank=True,
related_name="Failover Gateway", help_text=_("select gateway"))
addparameter = models.CharField(verbose_name=_('add parameter'),
max_length=360, blank=True)
count_call = models.IntegerField(null=True, blank=True,
verbose_name=_("call count"))
count_in_use = models.IntegerField(null=True, blank=True,
verbose_name=_("count in use"))
maximum_call = models.IntegerField(verbose_name=_('max concurrent calls'),
null=True, blank=True)
# gatewaygroup = models.ManyToManyField(GatewayGroup)
class Meta:
db_table = u'dialer_gateway'
verbose_name = _("dialer gateway")
verbose_name_plural = _("dialer gateways")
def set_name(self, name):
self.name = name
def __unicode__(self):
return u"%s" % self.name
# def prepare_phonenumber(self):
# return True
"""
class GatewayGroup(Model):
name = models.CharField(max_length=90)
description = models.TextField(null=True, blank=True,
help_text=_("Short description \
about the Gateway Group"))
created_date = models.DateTimeField(auto_now_add=True, verbose_name='Date')
updated_date = models.DateTimeField(auto_now=True)
class Meta:
db_table = u'dialer_gateway_group'
verbose_name = _("Dialer Gateway Group")
verbose_name_plural = _("Dialer Gateway Groups")
def __unicode__(self):
return u"%s" % self.name
"""
| mpl-2.0 |
cgstudiomap/cgstudiomap | main/eggs/simplejson-3.8.1-py2.7-linux-x86_64.egg/simplejson/tests/test_recursion.py | 149 | 1679 | from unittest import TestCase
import simplejson as json
class JSONTestObject:
pass
class RecursiveJSONEncoder(json.JSONEncoder):
recurse = False
def default(self, o):
if o is JSONTestObject:
if self.recurse:
return [JSONTestObject]
else:
return 'JSONTestObject'
return json.JSONEncoder.default(o)
class TestRecursion(TestCase):
def test_listrecursion(self):
x = []
x.append(x)
try:
json.dumps(x)
except ValueError:
pass
else:
self.fail("didn't raise ValueError on list recursion")
x = []
y = [x]
x.append(y)
try:
json.dumps(x)
except ValueError:
pass
else:
self.fail("didn't raise ValueError on alternating list recursion")
y = []
x = [y, y]
# ensure that the marker is cleared
json.dumps(x)
def test_dictrecursion(self):
x = {}
x["test"] = x
try:
json.dumps(x)
except ValueError:
pass
else:
self.fail("didn't raise ValueError on dict recursion")
x = {}
y = {"a": x, "b": x}
# ensure that the marker is cleared
json.dumps(y)
def test_defaultrecursion(self):
enc = RecursiveJSONEncoder()
self.assertEqual(enc.encode(JSONTestObject), '"JSONTestObject"')
enc.recurse = True
try:
enc.encode(JSONTestObject)
except ValueError:
pass
else:
self.fail("didn't raise ValueError on default recursion")
| agpl-3.0 |
ruslanloman/nova | nova/tests/unit/api/openstack/compute/contrib/test_floating_ips_bulk.py | 21 | 8799 | # Copyright 2012 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import netaddr
from oslo_config import cfg
import webob
from nova.api.openstack.compute.contrib import floating_ips_bulk as fipbulk_v2
from nova.api.openstack.compute.plugins.v3 import floating_ips_bulk as\
fipbulk_v21
from nova import context
from nova import exception
from nova import objects
from nova import test
from nova.tests.unit.api.openstack import fakes
CONF = cfg.CONF
class FloatingIPBulkV21(test.TestCase):
floating_ips_bulk = fipbulk_v21
bad_request = exception.ValidationError
def setUp(self):
super(FloatingIPBulkV21, self).setUp()
self.context = context.get_admin_context()
self.controller = self.floating_ips_bulk.FloatingIPBulkController()
self.req = fakes.HTTPRequest.blank('')
def _setup_floating_ips(self, ip_range):
body = {'floating_ips_bulk_create': {'ip_range': ip_range}}
res_dict = self.controller.create(self.req, body=body)
response = {"floating_ips_bulk_create": {
'ip_range': ip_range,
'pool': CONF.default_floating_pool,
'interface': CONF.public_interface}}
self.assertEqual(res_dict, response)
def test_create_ips(self):
ip_range = '192.168.1.0/28'
self._setup_floating_ips(ip_range)
def test_create_ips_pool(self):
ip_range = '10.0.1.0/29'
pool = 'a new pool'
body = {'floating_ips_bulk_create':
{'ip_range': ip_range,
'pool': pool}}
res_dict = self.controller.create(self.req, body=body)
response = {"floating_ips_bulk_create": {
'ip_range': ip_range,
'pool': pool,
'interface': CONF.public_interface}}
self.assertEqual(res_dict, response)
def test_list_ips(self):
self._test_list_ips(self.req)
def _test_list_ips(self, req):
ip_range = '192.168.1.1/28'
self._setup_floating_ips(ip_range)
res_dict = self.controller.index(req)
ip_info = [{'address': str(ip_addr),
'pool': CONF.default_floating_pool,
'interface': CONF.public_interface,
'project_id': None,
'instance_uuid': None,
'fixed_ip': None}
for ip_addr in netaddr.IPNetwork(ip_range).iter_hosts()]
response = {'floating_ip_info': ip_info}
self.assertEqual(res_dict, response)
def test_list_ips_associated(self):
self._test_list_ips_associated(self.req)
@mock.patch('nova.objects.FloatingIPList.get_all')
def _test_list_ips_associated(self, req, mock_get):
instance_uuid = "fake-uuid"
fixed_address = "10.0.0.1"
floating_address = "192.168.0.1"
fixed_ip = objects.FixedIP(instance_uuid=instance_uuid,
address=fixed_address)
floating_ip = objects.FloatingIP(address=floating_address,
fixed_ip=fixed_ip,
pool=CONF.default_floating_pool,
interface=CONF.public_interface,
project_id=None)
floating_list = objects.FloatingIPList(objects=[floating_ip])
mock_get.return_value = floating_list
res_dict = self.controller.index(req)
ip_info = [{'address': floating_address,
'pool': CONF.default_floating_pool,
'interface': CONF.public_interface,
'project_id': None,
'instance_uuid': instance_uuid,
'fixed_ip': fixed_address}]
response = {'floating_ip_info': ip_info}
self.assertEqual(res_dict, response)
def test_list_ip_by_host(self):
self._test_list_ip_by_host(self.req)
def _test_list_ip_by_host(self, req):
ip_range = '192.168.1.1/28'
self._setup_floating_ips(ip_range)
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.show, req, 'host')
def test_delete_ips(self):
self._test_delete_ips(self.req)
def _test_delete_ips(self, req):
ip_range = '192.168.1.0/29'
self._setup_floating_ips(ip_range)
body = {'ip_range': ip_range}
res_dict = self.controller.update(req, "delete", body=body)
response = {"floating_ips_bulk_delete": ip_range}
self.assertEqual(res_dict, response)
# Check that the IPs are actually deleted
res_dict = self.controller.index(req)
response = {'floating_ip_info': []}
self.assertEqual(res_dict, response)
def test_create_duplicate_fail(self):
ip_range = '192.168.1.0/30'
self._setup_floating_ips(ip_range)
ip_range = '192.168.1.0/29'
body = {'floating_ips_bulk_create': {'ip_range': ip_range}}
self.assertRaises(webob.exc.HTTPConflict, self.controller.create,
self.req, body=body)
def test_create_bad_cidr_fail(self):
# netaddr can't handle /32 or 31 cidrs
ip_range = '192.168.1.1/32'
body = {'floating_ips_bulk_create': {'ip_range': ip_range}}
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, body=body)
def test_create_invalid_cidr_fail(self):
ip_range = 'not a cidr'
body = {'floating_ips_bulk_create': {'ip_range': ip_range}}
self.assertRaises(self.bad_request, self.controller.create,
self.req, body=body)
class FloatingIPBulkV2(FloatingIPBulkV21):
floating_ips_bulk = fipbulk_v2
bad_request = webob.exc.HTTPBadRequest
def setUp(self):
super(FloatingIPBulkV2, self).setUp()
self.non_admin_req = fakes.HTTPRequest.blank('')
self.admin_req = fakes.HTTPRequest.blank('', use_admin_context=True)
def test_list_ips_with_non_admin(self):
ip_range = '192.168.1.1/28'
self._setup_floating_ips(ip_range)
self.assertRaises(exception.AdminRequired,
self.controller.index, self.non_admin_req)
def test_list_ip_with_non_admin(self):
ip_range = '192.168.1.1/28'
self._setup_floating_ips(ip_range)
self.assertRaises(exception.AdminRequired, self.controller.show,
self.non_admin_req, "host")
def test_delete_ips(self):
self._test_delete_ips(self.admin_req)
def test_list_ip_by_host(self):
self._test_list_ip_by_host(self.admin_req)
def test_list_ips_associated(self):
self._test_list_ips_associated(self.admin_req)
def test_list_ips(self):
self._test_list_ips(self.admin_req)
class FloatingIPBulkPolicyEnforcementV21(test.NoDBTestCase):
def setUp(self):
super(FloatingIPBulkPolicyEnforcementV21, self).setUp()
self.controller = fipbulk_v21.FloatingIPBulkController()
self.req = fakes.HTTPRequest.blank('')
def _common_policy_check(self, func, *arg, **kwarg):
rule_name = "os_compute_api:os-floating-ips-bulk"
rule = {rule_name: "project:non_fake"}
self.policy.set_rules(rule)
exc = self.assertRaises(
exception.PolicyNotAuthorized, func, *arg, **kwarg)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
def test_index_policy_failed(self):
self._common_policy_check(self.controller.index, self.req)
def test_show_ip_policy_failed(self):
self._common_policy_check(self.controller.show, self.req, "host")
def test_create_policy_failed(self):
ip_range = '192.168.1.0/28'
body = {'floating_ips_bulk_create': {'ip_range': ip_range}}
self._common_policy_check(self.controller.create, self.req, body=body)
def test_update_policy_failed(self):
ip_range = '192.168.1.0/29'
body = {'ip_range': ip_range}
self._common_policy_check(self.controller.update, self.req,
"delete", body=body)
| apache-2.0 |
lahwaacz/qutebrowser | qutebrowser/misc/autoupdate.py | 4 | 2855 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2017 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Classes related to auto-updating and getting the latest version."""
import json
from PyQt5.QtCore import pyqtSignal, pyqtSlot, QObject, QUrl
from qutebrowser.misc import httpclient
class PyPIVersionClient(QObject):
"""A client for the PyPI API using HTTPClient.
It gets the latest version of qutebrowser from PyPI.
Attributes:
_client: The HTTPClient used.
Class attributes:
API_URL: The base API URL.
Signals:
success: Emitted when getting the version info succeeded.
arg: The newest version.
error: Emitted when getting the version info failed.
arg: The error message, as string.
"""
API_URL = 'https://pypi.python.org/pypi/{}/json'
success = pyqtSignal(str)
error = pyqtSignal(str)
def __init__(self, parent=None, client=None):
super().__init__(parent)
if client is None:
self._client = httpclient.HTTPClient(self)
else:
self._client = client
self._client.error.connect(self.error)
self._client.success.connect(self.on_client_success)
def get_version(self, package='qutebrowser'):
"""Get the newest version of a given package.
Emits success/error when done.
Args:
package: The name of the package to check.
"""
url = QUrl(self.API_URL.format(package))
self._client.get(url)
@pyqtSlot(str)
def on_client_success(self, data):
"""Process the data and finish when the client finished.
Args:
data: A string with the received data.
"""
try:
json_data = json.loads(data)
except ValueError as e:
self.error.emit("Invalid JSON received in reply: {}!".format(e))
return
try:
self.success.emit(json_data['info']['version'])
except KeyError as e:
self.error.emit("Malformed data received in reply "
"({!r} not found)!".format(e))
return
| gpl-3.0 |
kevinlee12/oppia | core/controllers/moderator.py | 4 | 3003 | # Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Controllers for the moderator page."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
from core.controllers import acl_decorators
from core.controllers import base
from core.domain import activity_domain
from core.domain import activity_services
from core.domain import email_manager
from core.domain import summary_services
import feconf
class ModeratorPage(base.BaseHandler):
"""The moderator page."""
@acl_decorators.can_access_moderator_page
def get(self):
"""Handles GET requests."""
self.render_template('moderator-page.mainpage.html')
class FeaturedActivitiesHandler(base.BaseHandler):
"""The moderator page handler for featured activities."""
GET_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON
@acl_decorators.can_access_moderator_page
def get(self):
"""Handles GET requests."""
self.render_json({
'featured_activity_references': [
activity_reference.to_dict() for activity_reference in
activity_services.get_featured_activity_references()
],
})
@acl_decorators.can_access_moderator_page
def post(self):
"""Handles POST requests."""
featured_activity_reference_dicts = self.payload.get(
'featured_activity_reference_dicts')
featured_activity_references = [
activity_domain.ActivityReference(
reference_dict['type'], reference_dict['id'])
for reference_dict in featured_activity_reference_dicts]
try:
summary_services.require_activities_to_be_public(
featured_activity_references)
except Exception as e:
raise self.InvalidInputException(e)
activity_services.update_featured_activity_references(
featured_activity_references)
self.render_json({})
class EmailDraftHandler(base.BaseHandler):
"""Provide default email templates for moderator emails."""
GET_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON
@acl_decorators.can_send_moderator_emails
def get(self):
"""Handles GET requests."""
self.render_json({
'draft_email_body': (
email_manager.get_moderator_unpublish_exploration_email()),
})
| apache-2.0 |
kaplun/invenio | modules/bibfield/lib/bibfield_utils.py | 15 | 17601 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2004, 2005, 2006, 2007, 2008, 2010, 2011, 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
BibField Utils
Helper classes and functions to work with BibField
"""
__revision__ = "$Id$"
import datetime
import os
import re
import six
from invenio.config import CFG_PYLIBDIR
from invenio.pluginutils import PluginContainer
from invenio.containerutils import SmartDict
from invenio.importutils import try_to_eval
from invenio.bibfield_config_engine import BibFieldParser as FieldParser
CFG_BIBFIELD_FUNCTIONS = PluginContainer(os.path.join(CFG_PYLIBDIR, 'invenio', 'bibfield_functions', '*.py'))
CFG_BIBFIELD_PRODUCERS = PluginContainer(os.path.join(CFG_PYLIBDIR, 'invenio', 'bibfield_functions', 'produce_*.py'))
class BibFieldException(Exception):
"""
General exception to use within BibField
"""
pass
class InvenioBibFieldContinuableError(Exception):
"""BibField continuable error"""
pass
class InvenioBibFieldError(Exception):
"""BibField fatal error, @see CFG_BIBUPLOAD_BIBFIELD_STOP_ERROR_POLICY"""
class SmartJson(SmartDict):
"""Base class for record Json structure"""
def __init__(self, json):
super(SmartJson, self).__init__(json)
self._dict_bson = SmartDict()
self._validator = None
# if '__meta_metadata__.__additional_info__.model_meta_classes' in self:
# meta_classes = [import_string(str_cls)
# for str_cls in self['__meta_metadata__.__additional_info__.model_meta_classes']]
# self.__class__ = type(self.__class__.__name__,
# [self.__class__] + meta_classes, {})
def __getitem__(self, key):
"""
Uses the load capabilities to output the information stored in the DB.
"""
try:
return self._dict_bson[key]
except KeyError:
#We will try to find the key inside the json dict and load it
pass
main_key = SmartDict.main_key_pattern.sub('', key)
if main_key in self._dict['__meta_metadata__']['__aliases__']:
try:
rest_of_key = SmartDict.main_key_pattern.findall(key)[0]
except IndexError:
rest_of_key = ''
return self[self._dict['__meta_metadata__']['__aliases__'][main_key] + rest_of_key]
try:
if self._dict['__meta_metadata__'][main_key]['type'] == 'calculated':
self._load_precalculated_value(main_key)
else:
self._loads(main_key)
except KeyError:
self._loads(main_key)
return self._dict_bson[key]
def __setitem__(self, key, value):
"""
Uses the dumps capabilities to set the items to store them in the DB
"""
main_key = SmartDict.main_key_pattern.sub('', key)
if main_key in self:
self._dict_bson[key] = value
else:
from invenio.bibfield import CFG_BIBFIELD_READERS as readers
reader = readers['bibfield_%sreader.py' % (self['__meta_metadata__']['__additional_info__']['master_format'], )]()
reader.set(self, main_key)
self._dict_bson[key] = value
self._dumps(main_key)
def __eq__(self, other):
try:
for key in self.keys():
if key in ('__meta_metadata__', ):
pass
if not self.get(k) == other.get(k):
return False
except:
return False
return True
def items(self):
for key in self.keys():
yield (key, self[key])
@property
def fatal_errors(self):
"""@return All the fatal/non-continuable errors that check_record has found"""
return self.get('__meta_metadata__.__errors__', [])
@property
def continuable_errors(self):
"""@return All the continuable errors that check_record has found"""
return self.get('__meta_metadata__.__continuable_errors__', [])
@property
def validation_errors(self):
if self._validator is None:
self.validate()
return self._validator.errors
def check_record(self, reset=True):
"""
Using the checking rules defined inside bibfied configurations files checks
if the record is well build. If not it stores the problems inside
self['__error_messages'] splitting then by continuable errors and fatal/non-continuable
errors
"""
def check_rules(checker_functions, key):
"""docstring for check_rule"""
for checker_function in checker_functions:
if 'all' in checker_function[0] or self['__meta_metadata__.__additional_info__.master_format'] in checker_function[0]:
try:
try_to_eval("%s(self,'%s',%s)" % (checker_function[1], key, checker_function[2]))
except InvenioBibFieldContinuableError, err:
self['__meta_metadata__']['__continuable_errors__'].append('Checking CError - ' + str(err))
except InvenioBibFieldError, err:
self['__meta_metadata__']['__errors__'].append('Checking Error - ' + str(err))
if reset or '__meta_metadata___.__errors__' not in self or '__meta_metadata___.__continuable_error__' not in self:
self['__meta_metadata__']['__errors__'] = []
self['__meta_metadata__']['__continuable_errors__'] = []
for key in self.keys():
try:
check_rules(FieldParser.field_definitions()[key]['checker'], key)
except TypeError:
for kkey in FieldParser.field_definitions()[key]:
check_rules(FieldParser.field_definitions()[kkey]['checker'], kkey)
except KeyError:
continue
def get(self, key, default=None, reset_cache=False):
if reset_cache:
main_key = SmartDict.main_key_pattern.sub('', key)
self._load_precalculated_value(main_key, force=True)
try:
return self[key]
except KeyError:
return default
def get_persistent_identifiers(self):
"""
Using _persistent_identifiers_keys calculated fields gets a subset
of the record containing al persistent indentifiers
"""
return dict((key, self[key]) for key in self.get('persistent_identifiers_keys', reset_cache=True))
# def is_empty(self):
# """
# One record is empty if there is nothing stored inside rec_json or there is
# only '__key'
# """
# if len(self.keys()) == 0 or \
# all(key.startswith('__') for key in self.keys()):
# return True
# return False
def dumps(self):
""" """
for key in self._dict_bson.keys():
if key == '__meta_metadata__':
continue
self._dumps(key)
return self._dict
def loads(self):
""" """
for key in self._dict.keys():
if key == '__meta_metadata__':
continue
self._loads(key)
return self._dict_bson._dict
def produce(self, output, fields=None):
return CFG_BIBFIELD_PRODUCERS['produce_' + output](self, fields=fields)
def validate(self):
def find_schema(json_id):
schema = FieldParser.field_definitions(self['__meta_metadata__']['__additional_info__']['namespace']).get(json_id, {})
if isinstance(schema, list):
for jjson_id in schema:
yield FieldParser.field_definitions(self['__meta_metadata__']['__additional_info__']['namespace']).get(jjson_id, {}).get('schema', {})
raise StopIteration()
yield schema.get('schema', {})
if self._validator is None:
schema = {}
# model_fields = ModelParser.model_definitions(self['__meta_metadata__']['__additional_info__']['namespace']).get(fields, {})
# if model_fields:
# for field in self.document.keys():
# if field not in model_fields:
# model_fields[field] = field
# model_field = [json_id for json_id in model_fields.values()]
# else:
# model_fields = self.document.keys()
model_fields = self.document.keys()
for json_id in model_fields:
for schema in find_schema(json_id):
self.schema.update(schema)
self._validator = Validator(schema=shema)
return self._validator.validate(self)
def _dumps(self, field):
""" """
try:
self._dict[field] = reduce(lambda obj, key: obj[key], \
self._dict['__meta_metadata__'][field]['dumps'], \
FieldParser.field_definitions(self['__meta_metadata__']['__additional_info__']['namespace']))(self._dict_bson[field])
except (KeyError, IndexError):
if self['__meta_metadata__'][field]['memoize'] or \
self['__meta_metadata__'][field]['type'] in ('derived', 'creator', 'UNKNOW'):
self._dict[field] = self._dict_bson[field]
def _loads(self, field):
""" """
try:
self._dict_bson[field] = reduce(lambda obj, key: obj[key], \
self._dict['__meta_metadata__'][field]['loads'], \
FieldParser.field_definition(self['__meta_metadata__']['__additional_info__']['namespace']))(self._dict[field])
except (KeyError, IndexError):
self._dict_bson[field] = self._dict[field]
def _load_precalculated_value(self, field, force=False):
"""
"""
if self._dict['__meta_metadata__'][field]['memoize'] is None:
func = reduce(lambda obj, key: obj[key], \
self._dict['__meta_metadata__'][field]['function'], \
FieldParser.field_definitions())
self._dict_bson[field] = try_to_eval(func, CFG_BIBFIELD_FUNCTIONS, self=self)
else:
live_time = datetime.timedelta(0, self._dict['__meta_metadata__'][field]['memoize'])
timestamp = datetime.datetime.strptime(self._dict['__meta_metadata__'][field]['timestamp'], "%Y-%m-%dT%H:%M:%S")
if datetime.datetime.now() > timestamp + live_time or force:
old_value = self._dict_bson[field]
func = reduce(lambda obj, key: obj[key], \
self._dict['__meta_metadata__'][field]['function'], \
FieldParser.field_definitions(self['__meta_metadata__']['__additional_info__']['namespace']))
self._dict_bson[field] = try_to_eval(func, CFG_BIBFIELD_FUNCTIONS, self=self)
if not old_value == self._dict_bson[field]:
#FIXME: trigger update in DB and fire signal to update others
pass
# Legacy methods, try not to use them as they are already deprecated
def legacy_export_as_marc(self):
"""
It creates a valid marcxml using the legacy rules defined in the config
file
"""
from collections import Iterable
def encode_for_marcxml(value):
from invenio.textutils import encode_for_xml
if isinstance(value, unicode):
value = value.encode('utf8')
return encode_for_xml(str(value))
export = '<record>'
marc_dicts = self.produce('json_for_marc')
for marc_dict in marc_dicts:
content = ''
tag = ''
ind1 = ''
ind2 = ''
for key, value in marc_dict.iteritems():
if isinstance(value, six.string_types) or not isinstance(value, Iterable):
value = [value]
for v in value:
if v is None:
continue
if key.startswith('00') and len(key) == 3:
# Control Field (No indicators no subfields)
export += '<controlfield tag="%s">%s</controlfield>\n' % (key, encode_for_marcxml(v))
elif len(key) == 6:
if not (tag == key[:3] and ind1 == key[3].replace('_', '') and ind2 == key[4].replace('_', '')):
tag = key[:3]
ind1 = key[3].replace('_', '')
ind2 = key[4].replace('_', '')
if content:
export += '<datafield tag="%s" ind1="%s" ind2="%s">%s</datafield>\n' % (tag, ind1, ind2, content)
content = ''
content += '<subfield code="%s">%s</subfield>' % (key[5], encode_for_marcxml(v))
else:
pass
if content:
export += '<datafield tag="%s" ind1="%s" ind2="%s">%s</datafield>\n' % (tag, ind1, ind2, content)
export += '</record>'
return export
def legacy_create_recstruct(self):
"""
It creates the recstruct representation using the legacy rules defined in
the configuration file
#CHECK: it might be a bit overkilling
"""
from invenio.bibrecord import create_record
return create_record(self.legacy_export_as_marc())[0]
# def is_cacheable(self, field):
# """
# Check if a field is inside the __do_not_cache or not
# @return True if it is not in __do_not_cache
# """
# return not get_main_field(field) in self.rec_json['__do_not_cache']
# def update_field_cache(self, field):
# """
# Updates the value of the cache for the given calculated field
# """
# field = get_main_field(field)
# if re.search('^_[a-zA-Z0-9]', field) and not field in self.rec_json['__do_not_cache']:
# self.rec_json[field] = self._recalculate_field_value(field)[field]
#TODO: waiting for a pull request to Cerberus to be merged
from cerberus import Validator as ValidatorBase
from cerberus import ValidationError, SchemaError
from cerberus import errors
class Validator(ValidatorBase):
"""
"""
def __init__(self, schema=None, transparent_schema_rules=True,
ignore_none_values=False, allow_unknown=True):
super(Validator, self).__init__(schema, transparent_schema_rules,
ignore_none_values, allow_unknown)
def _validate(self, document, schema=None, update=False):
self._errors = {}
self.update = update
if schema is not None:
self.schema = schema
elif self.schema is None:
raise SchemaError(errors.ERROR_SCHEMA_MISSING)
if not isinstance(self.schema, dict):
raise SchemaError(errors.ERROR_SCHEMA_FORMAT % str(self.schema))
if document is None:
raise ValidationError(errors.ERROR_DOCUMENT_MISSING)
if not hasattr(document, 'get'):
raise ValidationError(errors.ERROR_DOCUMENT_FORMAT % str(document))
self.document = document
special_rules = ["required", "nullable", "type"]
for field, value in self.document.items():
if self.ignore_none_values and value is None:
continue
definition = self.schema.get(field)
if definition:
if isinstance(definition, dict):
if definition.get("nullable", False) == True \
and value is None: # noqa
continue
if 'type' in definition:
self._validate_type(definition['type'], field, value)
if self.errors:
continue
definition_rules = [rule for rule in definition.keys()
if rule not in special_rules]
for rule in definition_rules:
validatorname = "_validate_" + rule.replace(" ", "_")
validator = getattr(self, validatorname, None)
if validator:
validator(definition[rule], field, value)
elif not self.transparent_schema_rules:
raise SchemaError(errors.ERROR_UNKNOWN_RULE %
(rule, field))
else:
raise SchemaError(errors.ERROR_DEFINITION_FORMAT % field)
else:
if not self.allow_unknown:
self._error(field, errors.ERROR_UNKNOWN_FIELD)
if not self.update:
self._validate_required_fields()
return len(self._errors) == 0
| gpl-2.0 |
MattDevo/edk2 | AppPkg/Applications/Python/Python-2.7.2/Demo/metaclasses/Synch.py | 6 | 8194 | """Synchronization metaclass.
This metaclass makes it possible to declare synchronized methods.
"""
import thread
# First we need to define a reentrant lock.
# This is generally useful and should probably be in a standard Python
# library module. For now, we in-line it.
class Lock:
"""Reentrant lock.
This is a mutex-like object which can be acquired by the same
thread more than once. It keeps a reference count of the number
of times it has been acquired by the same thread. Each acquire()
call must be matched by a release() call and only the last
release() call actually releases the lock for acquisition by
another thread.
The implementation uses two locks internally:
__mutex is a short term lock used to protect the instance variables
__wait is the lock for which other threads wait
A thread intending to acquire both locks should acquire __wait
first.
The implementation uses two other instance variables, protected by
locking __mutex:
__tid is the thread ID of the thread that currently has the lock
__count is the number of times the current thread has acquired it
When the lock is released, __tid is None and __count is zero.
"""
def __init__(self):
"""Constructor. Initialize all instance variables."""
self.__mutex = thread.allocate_lock()
self.__wait = thread.allocate_lock()
self.__tid = None
self.__count = 0
def acquire(self, flag=1):
"""Acquire the lock.
If the optional flag argument is false, returns immediately
when it cannot acquire the __wait lock without blocking (it
may still block for a little while in order to acquire the
__mutex lock).
The return value is only relevant when the flag argument is
false; it is 1 if the lock is acquired, 0 if not.
"""
self.__mutex.acquire()
try:
if self.__tid == thread.get_ident():
self.__count = self.__count + 1
return 1
finally:
self.__mutex.release()
locked = self.__wait.acquire(flag)
if not flag and not locked:
return 0
try:
self.__mutex.acquire()
assert self.__tid == None
assert self.__count == 0
self.__tid = thread.get_ident()
self.__count = 1
return 1
finally:
self.__mutex.release()
def release(self):
"""Release the lock.
If this thread doesn't currently have the lock, an assertion
error is raised.
Only allow another thread to acquire the lock when the count
reaches zero after decrementing it.
"""
self.__mutex.acquire()
try:
assert self.__tid == thread.get_ident()
assert self.__count > 0
self.__count = self.__count - 1
if self.__count == 0:
self.__tid = None
self.__wait.release()
finally:
self.__mutex.release()
def _testLock():
done = []
def f2(lock, done=done):
lock.acquire()
print "f2 running in thread %d\n" % thread.get_ident(),
lock.release()
done.append(1)
def f1(lock, f2=f2, done=done):
lock.acquire()
print "f1 running in thread %d\n" % thread.get_ident(),
try:
f2(lock)
finally:
lock.release()
done.append(1)
lock = Lock()
lock.acquire()
f1(lock) # Adds 2 to done
lock.release()
lock.acquire()
thread.start_new_thread(f1, (lock,)) # Adds 2
thread.start_new_thread(f1, (lock, f1)) # Adds 3
thread.start_new_thread(f2, (lock,)) # Adds 1
thread.start_new_thread(f2, (lock,)) # Adds 1
lock.release()
import time
while len(done) < 9:
print len(done)
time.sleep(0.001)
print len(done)
# Now, the Locking metaclass is a piece of cake.
# As an example feature, methods whose name begins with exactly one
# underscore are not synchronized.
from Meta import MetaClass, MetaHelper, MetaMethodWrapper
class LockingMethodWrapper(MetaMethodWrapper):
def __call__(self, *args, **kw):
if self.__name__[:1] == '_' and self.__name__[1:] != '_':
return apply(self.func, (self.inst,) + args, kw)
self.inst.__lock__.acquire()
try:
return apply(self.func, (self.inst,) + args, kw)
finally:
self.inst.__lock__.release()
class LockingHelper(MetaHelper):
__methodwrapper__ = LockingMethodWrapper
def __helperinit__(self, formalclass):
MetaHelper.__helperinit__(self, formalclass)
self.__lock__ = Lock()
class LockingMetaClass(MetaClass):
__helper__ = LockingHelper
Locking = LockingMetaClass('Locking', (), {})
def _test():
# For kicks, take away the Locking base class and see it die
class Buffer(Locking):
def __init__(self, initialsize):
assert initialsize > 0
self.size = initialsize
self.buffer = [None]*self.size
self.first = self.last = 0
def put(self, item):
# Do we need to grow the buffer?
if (self.last+1) % self.size != self.first:
# Insert the new item
self.buffer[self.last] = item
self.last = (self.last+1) % self.size
return
# Double the buffer size
# First normalize it so that first==0 and last==size-1
print "buffer =", self.buffer
print "first = %d, last = %d, size = %d" % (
self.first, self.last, self.size)
if self.first <= self.last:
temp = self.buffer[self.first:self.last]
else:
temp = self.buffer[self.first:] + self.buffer[:self.last]
print "temp =", temp
self.buffer = temp + [None]*(self.size+1)
self.first = 0
self.last = self.size-1
self.size = self.size*2
print "Buffer size doubled to", self.size
print "new buffer =", self.buffer
print "first = %d, last = %d, size = %d" % (
self.first, self.last, self.size)
self.put(item) # Recursive call to test the locking
def get(self):
# Is the buffer empty?
if self.first == self.last:
raise EOFError # Avoid defining a new exception
item = self.buffer[self.first]
self.first = (self.first+1) % self.size
return item
def producer(buffer, wait, n=1000):
import time
i = 0
while i < n:
print "put", i
buffer.put(i)
i = i+1
print "Producer: done producing", n, "items"
wait.release()
def consumer(buffer, wait, n=1000):
import time
i = 0
tout = 0.001
while i < n:
try:
x = buffer.get()
if x != i:
raise AssertionError, \
"get() returned %s, expected %s" % (x, i)
print "got", i
i = i+1
tout = 0.001
except EOFError:
time.sleep(tout)
tout = tout*2
print "Consumer: done consuming", n, "items"
wait.release()
pwait = thread.allocate_lock()
pwait.acquire()
cwait = thread.allocate_lock()
cwait.acquire()
buffer = Buffer(1)
n = 1000
thread.start_new_thread(consumer, (buffer, cwait, n))
thread.start_new_thread(producer, (buffer, pwait, n))
pwait.acquire()
print "Producer done"
cwait.acquire()
print "All done"
print "buffer size ==", len(buffer.buffer)
if __name__ == '__main__':
_testLock()
_test()
| bsd-2-clause |
oandrew/home-assistant | homeassistant/components/sensor/dweet.py | 23 | 3540 | """
Support for showing values from Dweet.io.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.dweet/
"""
import json
import logging
from datetime import timedelta
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_NAME, CONF_VALUE_TEMPLATE, STATE_UNKNOWN, CONF_UNIT_OF_MEASUREMENT)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
REQUIREMENTS = ['dweepy==0.2.0']
_LOGGER = logging.getLogger(__name__)
CONF_DEVICE = 'device'
DEFAULT_NAME = 'Dweet.io Sensor'
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_DEVICE): cv.string,
vol.Required(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
})
# pylint: disable=unused-variable, too-many-function-args
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the Dweet sensor."""
import dweepy
name = config.get(CONF_NAME)
device = config.get(CONF_DEVICE)
value_template = config.get(CONF_VALUE_TEMPLATE)
unit = config.get(CONF_UNIT_OF_MEASUREMENT)
value_template.hass = hass
try:
content = json.dumps(dweepy.get_latest_dweet_for(device)[0]['content'])
except dweepy.DweepyError:
_LOGGER.error("Device/thing '%s' could not be found", device)
return False
if value_template.render_with_possible_json_value(content) == '':
_LOGGER.error("'%s' was not found", value_template)
return False
dweet = DweetData(device)
add_devices([DweetSensor(hass, dweet, name, value_template, unit)])
class DweetSensor(Entity):
"""Representation of a Dweet sensor."""
def __init__(self, hass, dweet, name, value_template, unit_of_measurement):
"""Initialize the sensor."""
self.hass = hass
self.dweet = dweet
self._name = name
self._value_template = value_template
self._state = STATE_UNKNOWN
self._unit_of_measurement = unit_of_measurement
self.update()
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def state(self):
"""Return the state."""
if self.dweet.data is None:
return STATE_UNKNOWN
else:
values = json.dumps(self.dweet.data[0]['content'])
value = self._value_template.render_with_possible_json_value(
values)
return value
def update(self):
"""Get the latest data from REST API."""
self.dweet.update()
class DweetData(object):
"""The class for handling the data retrieval."""
def __init__(self, device):
"""Initialize the sensor."""
self._device = device
self.data = None
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from Dweet.io."""
import dweepy
try:
self.data = dweepy.get_latest_dweet_for(self._device)
except dweepy.DweepyError:
_LOGGER.error("Device '%s' could not be found", self._device)
self.data = None
| mit |
nanditav/15712-TensorFlow | tensorflow/python/framework/common_shapes_test.py | 60 | 2551 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for common shapes."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import common_shapes
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.platform import googletest
class CommonShapesTest(test_util.TensorFlowTestCase):
def testBroadcast_one_dimension(self):
s1 = tensor_shape.vector(5)
s2 = tensor_shape.vector(7)
unknown = tensor_shape.unknown_shape()
scalar = tensor_shape.scalar()
expanded_scalar = tensor_shape.TensorShape([1])
# Tensors with same shape should have the same broadcast result.
self.assertEqual(s1, common_shapes.broadcast_shape(s1, s1))
self.assertEqual(s2, common_shapes.broadcast_shape(s2, s2))
self.assertEqual(unknown, common_shapes.broadcast_shape(unknown, unknown))
self.assertEqual(scalar, common_shapes.broadcast_shape(scalar, scalar))
self.assertEqual(expanded_scalar, common_shapes.broadcast_shape(
expanded_scalar, expanded_scalar))
# [] acts like an identity.
self.assertEqual(s1, common_shapes.broadcast_shape(s1, scalar))
self.assertEqual(s2, common_shapes.broadcast_shape(s2, scalar))
self.assertEqual(s1, common_shapes.broadcast_shape(s1, expanded_scalar))
self.assertEqual(s2, common_shapes.broadcast_shape(s2, expanded_scalar))
self.assertEqual(unknown, common_shapes.broadcast_shape(s1, unknown))
self.assertEqual(unknown, common_shapes.broadcast_shape(s2, unknown))
self.assertEqual(expanded_scalar, common_shapes.broadcast_shape(
scalar, expanded_scalar))
with self.assertRaises(ValueError):
common_shapes.broadcast_shape(s1, s2)
common_shapes.broadcast_shape(s2, s1)
if __name__ == "__main__":
googletest.main()
| apache-2.0 |
boto/requestbuilder | requestbuilder/auth/aws.py | 1 | 25216 | # Copyright (c) 2012-2015, Eucalyptus Systems, Inc.
#
# Permission to use, copy, modify, and/or distribute this software for
# any purpose with or without fee is hereby granted, provided that the
# above copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from __future__ import absolute_import
import base64
import calendar
import datetime
import email.utils
import hashlib
import hmac
import os
import re
import tempfile
import time
import warnings
import six
import six.moves.urllib_parse as urlparse
from requestbuilder import Arg
from requestbuilder.auth import BaseAuth
from requestbuilder.exceptions import AuthError
ISO8601 = '%Y-%m-%dT%H:%M:%SZ'
ISO8601_BASIC = '%Y%m%dT%H%M%SZ'
class HmacKeyAuth(BaseAuth):
'''
Basis for AWS HMAC-based authentication
'''
ARGS = [Arg('-I', '--access-key-id', dest='key_id', metavar='KEY_ID'),
Arg('-S', '--secret-key', dest='secret_key', metavar='KEY'),
Arg('--security-token', dest='security_token', metavar='TOKEN')]
@classmethod
def from_other(cls, other, **kwargs):
kwargs.setdefault('loglevel', other.log.level)
kwargs.setdefault('key_id', other.args.get('key_id'))
kwargs.setdefault('secret_key', other.args.get('secret_key'))
kwargs.setdefault('security_token', other.args.get('security_token'))
kwargs.setdefault('credential_expiration',
other.args.get('credential_expiration'))
new = cls(other.config, **kwargs)
new.configure()
return new
def configure(self):
self.__populate_auth_args()
if not self.args.get('key_id'):
raise AuthError('missing access key ID; please supply one with -I')
if not self.args.get('secret_key'):
raise AuthError('missing secret key; please supply one with -S')
if self.args.get('credential_expiration'):
expiration = None
for fmt in ('%Y-%m-%dT%H:%M:%S.%fZ', '%Y-%m-%dT%H:%M:%SZ'):
try:
expiration = datetime.datetime.strptime(
self.args['credential_expiration'], fmt)
break
except ValueError:
continue
else:
self.log.warn(
'failed to parse credential expiration time '
'\'{0}\'; proceeding without validation'
.format(self.args['credential_expiration']))
if expiration and expiration < datetime.datetime.utcnow():
raise AuthError('credentials have expired')
def configure_from_aws_credential_file(self):
if 'AWS_CREDENTIAL_FILE' in os.environ:
path = os.getenv('AWS_CREDENTIAL_FILE')
path = os.path.expandvars(path)
path = os.path.expanduser(path)
with open(path) as credfile:
for line in credfile:
line = line.split('#', 1)[0]
if '=' in line:
(key, val) = line.split('=', 1)
if (key.strip() == 'AWSAccessKeyId' and
not self.args.get('key_id')):
# There's probably a better way to do this, but it
# seems to work for me. Patches are welcome. :)
self.args['key_id'] = val.strip()
elif (key.strip() == 'AWSSecretKey' and
not self.args.get('secret_key')):
self.args['secret_key'] = val.strip()
return path
def __populate_auth_args(self):
"""
Try to get auth info from each source in turn until one provides
both a key ID and a secret key. After each time a source fails
to provide enough info we wipe self.args out so we don't wind up
mixing info from multiple sources.
"""
# self.args gets highest precedence
if self.args.get('key_id') and not self.args.get('secret_key'):
# __reset_unless_ready will wipe out key_id and result in
# the wrong error message
raise AuthError('missing secret key; please supply one with -S')
if self.args.get('secret_key') and not self.args.get('key_id'):
# If only one is supplied at the command line we should
# immediately blow up
raise AuthError('missing access key ID; please supply one with -I')
if self.__reset_unless_ready():
self.log.debug('using auth info provided directly')
return
# Environment comes next
self.args['key_id'] = (os.getenv('AWS_ACCESS_KEY_ID') or
os.getenv('AWS_ACCESS_KEY'))
self.args['secret_key'] = (os.getenv('AWS_SECRET_ACCESS_KEY') or
os.getenv('AWS_SECRET_KEY'))
self.args['security_token'] = os.getenv('AWS_SECURITY_TOKEN')
self.args['credential_expiration'] = \
os.getenv('AWS_CREDENTIAL_EXPIRATION')
if self.__reset_unless_ready():
self.log.debug('using auth info from environment')
return
# See if an AWS credential file was given in the environment
aws_credfile_path = self.configure_from_aws_credential_file()
if aws_credfile_path and self.__reset_unless_ready():
self.log.debug('using auth info from AWS credential file %s',
aws_credfile_path)
return
# Try the config file
self.args['key_id'] = self.config.get_user_option('key-id')
self.args['secret_key'] = self.config.get_user_option('secret-key',
redact=True)
if self.__reset_unless_ready():
self.log.debug('using auth info from configuration')
return
def __reset_unless_ready(self):
"""
If both an access key ID and a secret key are set in self.args
return True. Otherwise, clear auth info from self.args and
return False.
"""
if self.args.get('key_id') and self.args.get('secret_key'):
return True
for arg in ('key_id', 'secret_key', 'security_token',
'credential_expiration'):
self.args[arg] = None
return False
class HmacV1Auth(HmacKeyAuth):
'''
S3 REST authentication
http://docs.aws.amazon.com/AmazonS3/latest/dev/RESTAuthentication.html
'''
# This list comes from the CanonicalizedResource section of the above page
HASHED_PARAMS = set((
'acl', 'lifecycle', 'location', 'logging', 'notification',
'partNumber', 'policy', 'requestPayment', 'torrent', 'uploadId',
'uploads', 'versionId', 'versioning', 'versions', 'website'))
def apply_to_request(self, req, service):
self._update_request_before_signing(req)
c_headers = self.get_canonicalized_headers(req)
c_resource = self.get_canonicalized_resource(req, service)
to_sign = self._get_string_to_sign(req, c_headers, c_resource)
self.log.debug('string to sign: %s', repr(to_sign))
signature = self.sign_string(to_sign.encode('utf-8'))
self.log.debug('b64-encoded signature: %s', signature)
self._apply_signature(req, signature)
return req
def apply_to_request_params(self, req, service, expiration_datetime):
# This does not implement security tokens.
msg = ('S3RestAuth.apply_to_request_params is deprecated; use '
'requestbuilder.auth.aws.QueryHmacV1Auth instead')
self.log.warn(msg)
warnings.warn(msg, DeprecationWarning)
for param in ('AWSAccessKeyId', 'Expires', 'Signature'):
req.params.pop(param, None)
expiration = calendar.timegm(expiration_datetime.utctimetuple())
delta_t = expiration_datetime - datetime.datetime.utcnow()
delta_t_sec = ((delta_t.microseconds +
(delta_t.seconds + delta_t.days * 24 * 3600) * 10**6)
/ 10**6)
self.log.debug('expiration: %i (%f seconds from now)',
expiration, delta_t_sec)
c_headers = self.get_canonicalized_headers(req)
self.log.debug('canonicalized headers: %s', repr(c_headers))
c_resource = self.get_canonicalized_resource(req, service)
self.log.debug('canonicalized resource: %s', repr(c_resource))
to_sign = '\n'.join((req.method,
req.headers.get('Content-MD5', ''),
req.headers.get('Content-Type', ''),
six.text_type(expiration),
c_headers + c_resource))
self.log.debug('string to sign: %s', repr(to_sign))
signature = self.sign_string(to_sign.encode('utf-8'))
self.log.debug('b64-encoded signature: %s', signature)
req.params['AWSAccessKeyId'] = self.args['key_id']
req.params['Expires'] = six.text_type(expiration)
req.params['Signature'] = signature
if self.args.get('security_token'):
# This is a guess. I have no evidence that this actually works.
req.params['SecurityToken'] = self.args['security_token']
def _update_request_before_signing(self, req):
if not req.headers:
req.headers = {}
req.headers['Date'] = email.utils.formatdate()
req.headers['Host'] = urlparse.urlparse(req.url).netloc
if self.args.get('security_token'):
req.headers['x-amz-security-token'] = self.args['security_token']
req.headers.pop('Signature', None)
def _get_string_to_sign(self, req, c_headers, c_resource):
return '\n'.join((req.method.upper(),
req.headers.get('Content-MD5', ''),
req.headers.get('Content-Type', ''),
req.headers.get('Date'),
c_headers + c_resource))
def _apply_signature(self, req, signature):
req.headers['Authorization'] = 'AWS {0}:{1}'.format(
self.args['key_id'], signature)
def get_canonicalized_resource(self, req, service):
# /bucket/keyname
parsed_req_path = urlparse.urlparse(req.url).path
assert service.endpoint is not None
parsed_svc_path = urlparse.urlparse(service.endpoint).path
# IMPORTANT: this only supports path-style requests
assert parsed_req_path.startswith(parsed_svc_path)
resource = parsed_req_path[len(parsed_svc_path):]
if parsed_svc_path.endswith('/'):
# The leading / got stripped off
resource = '/' + resource
if not resource:
# This resource does not address a bucket
resource = '/'
# Now append sub-resources, a.k.a. query string parameters
if getattr(req, 'params', None):
# A regular Request
params = req.params
else:
# A PreparedRequest
params = _get_params_from_url(req.url)
if params:
subresources = []
for key, val in sorted(params.iteritems()):
if key in self.HASHED_PARAMS:
if val is None:
subresources.append(key)
else:
subresources.append(key + '=' + val)
if subresources:
resource += '?' + '&'.join(subresources)
self.log.debug('canonicalized resource: %s', repr(resource))
return resource
def get_canonicalized_headers(self, req):
headers_dict = {}
for key, val in req.headers.iteritems():
if key.lower().startswith('x-amz-'):
headers_dict.setdefault(key.lower(), [])
headers_dict[key.lower()].append(' '.join(val.split()))
headers_strs = []
for key, vals in sorted(headers_dict.iteritems()):
headers_strs.append('{0}:{1}'.format(key, ','.join(vals)))
if headers_strs:
c_headers = '\n'.join(headers_strs) + '\n'
else:
c_headers = ''
self.log.debug('canonicalized headers: %s', repr(c_headers))
return c_headers
def sign_string(self, to_sign):
req_hmac = hmac.new(self.args['secret_key'], digestmod=hashlib.sha1)
req_hmac.update(to_sign)
return base64.b64encode(req_hmac.digest())
class QueryHmacV1Auth(HmacV1Auth):
DEFAULT_TIMEOUT = 600 # 10 minutes
def _update_request_before_signing(self, req):
timeout = int(self.args.get('timeout')) or self.DEFAULT_TIMEOUT
assert timeout > 0
params = _get_params_from_url(req.url)
params['AWSAccessKeyId'] = self.args['key_id']
params['Expires'] = int(time.time() + timeout)
params.pop('Signature', None)
req.prepare_url(_remove_params_from_url(req.url), params)
def _get_string_to_sign(self, req, c_headers, c_resource):
params = _get_params_from_url(req.url)
return '\n'.join((req.method.upper(),
req.headers.get('Content-MD5', ''),
req.headers.get('Content-Type', ''),
params['Expires'],
c_headers + c_resource))
def _apply_signature(self, req, signature):
req.prepare_url(req.url, {'Signature': signature})
class QueryHmacV2Auth(HmacKeyAuth):
'''
AWS signature version 2
http://docs.aws.amazon.com/general/latest/gr/signature-version-2.html
'''
def apply_to_request(self, req, service):
parsed = urlparse.urlparse(req.url)
if req.method == 'POST':
# This is probably going to break when given multipart data.
params = urlparse.parse_qs(req.body or '', keep_blank_values=True)
else:
params = urlparse.parse_qs(parsed.query, keep_blank_values=True)
params = dict((key, vals[0]) for key, vals in params.iteritems())
params['AWSAccessKeyId'] = self.args['key_id']
params['SignatureVersion'] = 2
params['SignatureMethod'] = 'HmacSHA256'
params['Timestamp'] = time.strftime(ISO8601, time.gmtime())
if self.args.get('security_token'):
params['SecurityToken'] = self.args['security_token']
# Needed for retries so old signatures aren't included in to_sign
params.pop('Signature', None)
to_sign = '{method}\n{host}\n{path}\n'.format(
method=req.method, host=parsed.netloc.lower(),
path=(parsed.path or '/'))
quoted_params = []
for key in sorted(params):
val = six.text_type(params[key])
quoted_params.append(urlparse.quote(key, safe='') + '=' +
urlparse.quote(val, safe='-_~'))
query_string = '&'.join(quoted_params)
to_sign += query_string
# Redact passwords
redacted_to_sign = re.sub('assword=[^&]*', 'assword=<redacted>',
to_sign)
self.log.debug('string to sign: %s', repr(redacted_to_sign))
signature = self.sign_string(to_sign)
self.log.debug('b64-encoded signature: %s', signature)
params['Signature'] = signature
if req.method == 'POST':
req.prepare_body(params, {})
else:
req.prepare_url(_remove_params_from_url(req.url), params)
return req
def sign_string(self, to_sign):
req_hmac = hmac.new(self.args['secret_key'], digestmod=hashlib.sha256)
req_hmac.update(to_sign)
return base64.b64encode(req_hmac.digest())
class HmacV4Auth(HmacKeyAuth):
"""
AWS signature version 4
http://docs.aws.amazon.com/general/latest/gr/signature-version-4.html
"""
def apply_to_request(self, req, service):
if not service.NAME:
self.log.critical('service class %s must have a NAME attribute '
'to use sigv4', service.__class__.__name__)
raise AuthError('BUG: service class {0} does not have a name'
.format(service.__class__.__name__))
payload_hash = self._hash_payload(req) # large files will be slow here
now = time.time()
date_header = time.strftime(ISO8601_BASIC, time.gmtime(now))
scope = self._build_scope(service, now)
credential = '/'.join((self.args['key_id'],) + scope)
self._update_request_before_signing(req, credential, payload_hash,
date_header)
c_uri = self._get_canonical_uri(req)
c_query = self._get_canonical_query(req)
c_headers = self._get_canonical_headers(req)
s_headers = self._get_signed_headers(req)
c_request = '\n'.join((req.method.upper(), c_uri, c_query, c_headers,
'', s_headers, payload_hash))
self.log.debug('canonical request: %s', repr(c_request))
to_sign = '\n'.join(('AWS4-HMAC-SHA256', date_header, '/'.join(scope),
hashlib.sha256(c_request).hexdigest()))
# Redact passwords
redacted_to_sign = re.sub('assword=[^&]*', 'assword=<redacted>',
to_sign)
self.log.debug('string to sign: %s', repr(redacted_to_sign))
derived_hmac = hmac.new('AWS4{0}'.format(self.args['secret_key']),
digestmod=hashlib.sha256)
for chunk in scope:
derived_hmac.update(chunk)
derived_hmac = hmac.new(derived_hmac.digest(),
digestmod=hashlib.sha256)
derived_hmac.update(to_sign)
signature = derived_hmac.hexdigest()
self.log.debug('signature: %s', signature)
self._apply_signature(req, credential, signature)
return req
def _update_request_before_signing(self, req, credential, payload_sha256,
date_header):
parsed = urlparse.urlparse(req.url)
req.headers['Host'] = parsed.netloc
req.headers.pop('Authorization', None)
req.headers['X-Amz-Content-SHA256'] = payload_sha256
req.headers['X-Amz-Date'] = date_header
if self.args.get('security_token'):
req.headers['X-Amz-Security-Token'] = self.args['security_token']
def _apply_signature(self, req, credential, signature):
auth_header = ', '.join((
'AWS4-HMAC-SHA256 Credential={0}'.format(credential),
'SignedHeaders={0}'.format(self._get_signed_headers(req)),
'Signature={0}'.format(signature)))
req.headers['Authorization'] = auth_header
def _build_scope(self, service, timestamp):
if service.region_name:
region = service.region_name
elif os.getenv('AWS_AUTH_REGION'):
region = os.getenv('AWS_AUTH_REGION')
else:
self.log.error('a region name is required to use sigv4')
raise AuthError(
"region name is required; either use a config file "
"to supply the service's URL or set AWS_AUTH_REGION "
"in the environment")
scope = (time.strftime('%Y%m%d', time.gmtime(timestamp)),
region, service.NAME, 'aws4_request')
self.log.debug('scope: %s', '/'.join(scope))
return scope
def _get_canonical_uri(self, req):
path = urlparse.urlsplit(req.url).path or '/'
# TODO: Normalize stuff like ".."
c_uri = urlparse.quote(path, safe='/~')
self.log.debug('canonical URI: %s', c_uri)
return c_uri
def _get_canonical_query(self, req):
req_params = urlparse.parse_qsl(urlparse.urlparse(req.url).query,
keep_blank_values=True)
params = []
for key, val in sorted(req_params or []):
params.append('='.join((urlparse.quote(key, safe='~-_.'),
urlparse.quote(val, safe='~-_.'))))
c_params = '&'.join(params)
self.log.debug('canonical query: %s', c_params)
return c_params
def _get_normalized_headers(self, req):
# This doesn't currently support multi-value headers.
headers = {}
for key, val in req.headers.iteritems():
if key.lower() not in ('connection', 'user-agent'):
# Reverse proxies like to rewrite Connection headers.
# Ignoring User-Agent lets us generate storable query URLs
headers[key.lower().strip()] = val.strip()
return headers
def _get_canonical_headers(self, req):
headers = []
normalized_headers = self._get_normalized_headers(req)
for key, val in sorted(normalized_headers.items()):
headers.append(':'.join((key, val)))
self.log.debug('canonical headers: %s', str(headers))
return '\n'.join(headers)
def _get_signed_headers(self, req):
normalized_headers = self._get_normalized_headers(req)
s_headers = ';'.join(sorted(normalized_headers))
self.log.debug('signed headers: %s', s_headers)
return s_headers
def _hash_payload(self, req):
if self.args.get('payload_hash'):
return self.args['payload_hash']
digest = hashlib.sha256()
if not req.body:
pass
elif hasattr(req.body, 'seek'):
body_position = req.data.tell()
self.log.debug('payload hashing starting')
while True:
chunk = req.body.read(16384)
if not chunk:
break
digest.update(chunk)
req.body.seek(body_position)
self.log.debug('payload hashing done')
elif hasattr(req.body, 'read'):
self.log.debug('payload spooling/hashing starting')
# 10M happens to be the size of a bundle part, the thing we upload
# most frequently.
spool = tempfile.SpooledTemporaryFile(max_size=(10 * 1024 * 1024))
while True:
chunk = req.body.read(16384)
if not chunk:
break
digest.update(chunk)
spool.write(chunk)
self.log.debug('payload spooling/hashing done')
spool.seek(0)
self.log.info('re-pointing request body at spooled payload')
req.body = spool
# Should we close the original req.body here?
else:
digest.update(req.body)
self.log.debug('payload hash: %s', digest.hexdigest())
return digest.hexdigest()
class QueryHmacV4Auth(HmacV4Auth):
def _update_request_before_signing(self, req, credential, payload_sha256,
date_header):
# We don't do anything with payload_sha256. Is that bad?
if (req.method.upper() == 'POST' and
'form-urlencoded' in req.headers.get('Content-Type', '')):
self.log.warn('Query string authentication and POST form data '
'are generally mutually exclusive; GET is '
'recommended instead')
parsed = urlparse.urlparse(req.url)
req.headers['Host'] = parsed.netloc
req.headers.pop('Authorization', None)
params = {
'X-Amz-Algorithm': 'AWS4-HMAC-SHA256',
'X-Amz-Credential': credential,
'X-Amz-Date': date_header,
'X-Amz-SignedHeaders': self._get_signed_headers(req)}
if self.args.get('timeout'):
params['X-Amz-Expires'] = self.args['timeout']
if self.args.get('security_token'):
params['X-Amz-Security-Token'] = self.args['security_token']
req.prepare_url(req.url, params)
def _apply_signature(self, req, credential, signature):
req.prepare_url(req.url, {'X-Amz-Signature': signature})
def _get_params_from_url(url):
"""
Given a URL, return a dict of parameters and their values. If a
parameter appears more than once all but the first value will be lost.
"""
parsed = urlparse.urlparse(url)
params = urlparse.parse_qs(parsed.query, keep_blank_values=True)
return dict((key, vals[0]) for key, vals in params.iteritems())
def _remove_params_from_url(url):
"""
Return a copy of a URL with its parameters, fragments, and query
string removed.
"""
parsed = urlparse.urlparse(url)
return urlparse.urlunparse((parsed[0], parsed[1], parsed[2], '', '', ''))
| isc |
thefinn93/CouchPotatoServer | libs/sqlalchemy/dialects/postgresql/pypostgresql.py | 12 | 2156 | # postgresql/pypostgresql.py
# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Support for the PostgreSQL database via py-postgresql.
Connecting
----------
URLs are of the form ``postgresql+pypostgresql://user:password@host:port/dbname[?key=value&key=value...]``.
"""
from sqlalchemy import util
from sqlalchemy import types as sqltypes
from sqlalchemy.dialects.postgresql.base import PGDialect, PGExecutionContext
from sqlalchemy import processors
class PGNumeric(sqltypes.Numeric):
def bind_processor(self, dialect):
return processors.to_str
def result_processor(self, dialect, coltype):
if self.asdecimal:
return None
else:
return processors.to_float
class PGExecutionContext_pypostgresql(PGExecutionContext):
pass
class PGDialect_pypostgresql(PGDialect):
driver = 'pypostgresql'
supports_unicode_statements = True
supports_unicode_binds = True
description_encoding = None
default_paramstyle = 'pyformat'
# requires trunk version to support sane rowcounts
# TODO: use dbapi version information to set this flag appropariately
supports_sane_rowcount = True
supports_sane_multi_rowcount = False
execution_ctx_cls = PGExecutionContext_pypostgresql
colspecs = util.update_copy(
PGDialect.colspecs,
{
sqltypes.Numeric : PGNumeric,
sqltypes.Float: sqltypes.Float, # prevents PGNumeric from being used
}
)
@classmethod
def dbapi(cls):
from postgresql.driver import dbapi20
return dbapi20
def create_connect_args(self, url):
opts = url.translate_connect_args(username='user')
if 'port' in opts:
opts['port'] = int(opts['port'])
else:
opts['port'] = 5432
opts.update(url.query)
return ([], opts)
def is_disconnect(self, e, connection, cursor):
return "connection is closed" in str(e)
dialect = PGDialect_pypostgresql
| gpl-3.0 |
jawilson/home-assistant | homeassistant/components/media_player/sonos.py | 5 | 13962 | """
Support to interface with Sonos players (via SoCo).
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.sonos/
"""
import datetime
import logging
from os import path
import socket
import voluptuous as vol
from homeassistant.components.media_player import (
ATTR_MEDIA_ENQUEUE, DOMAIN, MEDIA_TYPE_MUSIC, SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE, SUPPORT_PLAY_MEDIA, SUPPORT_PREVIOUS_TRACK, SUPPORT_SEEK,
SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET, SUPPORT_CLEAR_PLAYLIST,
SUPPORT_SELECT_SOURCE, MediaPlayerDevice)
from homeassistant.const import (
STATE_IDLE, STATE_PAUSED, STATE_PLAYING, STATE_UNKNOWN, STATE_OFF,
ATTR_ENTITY_ID)
from homeassistant.config import load_yaml_config_file
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['SoCo==0.11.1']
_LOGGER = logging.getLogger(__name__)
# The soco library is excessively chatty when it comes to logging and
# causes a LOT of spam in the logs due to making a http connection to each
# speaker every 10 seconds. Quiet it down a bit to just actual problems.
_SOCO_LOGGER = logging.getLogger('soco')
_SOCO_LOGGER.setLevel(logging.ERROR)
_REQUESTS_LOGGER = logging.getLogger('requests')
_REQUESTS_LOGGER.setLevel(logging.ERROR)
SUPPORT_SONOS = SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE |\
SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK | SUPPORT_PLAY_MEDIA |\
SUPPORT_SEEK | SUPPORT_CLEAR_PLAYLIST | SUPPORT_SELECT_SOURCE
SERVICE_GROUP_PLAYERS = 'sonos_group_players'
SERVICE_UNJOIN = 'sonos_unjoin'
SERVICE_SNAPSHOT = 'sonos_snapshot'
SERVICE_RESTORE = 'sonos_restore'
SUPPORT_SOURCE_LINEIN = 'Line-in'
SUPPORT_SOURCE_TV = 'TV'
SUPPORT_SOURCE_RADIO = 'Radio'
SONOS_SCHEMA = vol.Schema({
ATTR_ENTITY_ID: cv.entity_ids,
})
# List of devices that have been registered
DEVICES = []
# pylint: disable=unused-argument, too-many-locals
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the Sonos platform."""
import soco
global DEVICES
if discovery_info:
player = soco.SoCo(discovery_info)
if player.is_visible:
device = SonosDevice(hass, player)
add_devices([device])
if not DEVICES:
register_services(hass)
DEVICES.append(device)
return True
return False
players = None
hosts = config.get('hosts', None)
if hosts:
# Support retro compatibility with comma separated list of hosts
# from config
hosts = hosts.split(',') if isinstance(hosts, str) else hosts
players = []
for host in hosts:
players.append(soco.SoCo(socket.gethostbyname(host)))
if not players:
players = soco.discover(interface_addr=config.get('interface_addr',
None))
if not players:
_LOGGER.warning('No Sonos speakers found.')
return False
DEVICES = [SonosDevice(hass, p) for p in players]
add_devices(DEVICES)
register_services(hass)
_LOGGER.info('Added %s Sonos speakers', len(players))
return True
def register_services(hass):
"""Register all services for sonos devices."""
descriptions = load_yaml_config_file(
path.join(path.dirname(__file__), 'services.yaml'))
hass.services.register(DOMAIN, SERVICE_GROUP_PLAYERS,
_group_players_service,
descriptions.get(SERVICE_GROUP_PLAYERS),
schema=SONOS_SCHEMA)
hass.services.register(DOMAIN, SERVICE_UNJOIN,
_unjoin_service,
descriptions.get(SERVICE_UNJOIN),
schema=SONOS_SCHEMA)
hass.services.register(DOMAIN, SERVICE_SNAPSHOT,
_snapshot_service,
descriptions.get(SERVICE_SNAPSHOT),
schema=SONOS_SCHEMA)
hass.services.register(DOMAIN, SERVICE_RESTORE,
_restore_service,
descriptions.get(SERVICE_RESTORE),
schema=SONOS_SCHEMA)
def _apply_service(service, service_func, *service_func_args):
"""Internal func for applying a service."""
entity_ids = service.data.get('entity_id')
if entity_ids:
_devices = [device for device in DEVICES
if device.entity_id in entity_ids]
else:
_devices = DEVICES
for device in _devices:
service_func(device, *service_func_args)
device.update_ha_state(True)
def _group_players_service(service):
"""Group media players, use player as coordinator."""
_apply_service(service, SonosDevice.group_players)
def _unjoin_service(service):
"""Unjoin the player from a group."""
_apply_service(service, SonosDevice.unjoin)
def _snapshot_service(service):
"""Take a snapshot."""
_apply_service(service, SonosDevice.snapshot)
def _restore_service(service):
"""Restore a snapshot."""
_apply_service(service, SonosDevice.restore)
def only_if_coordinator(func):
"""Decorator for coordinator.
If used as decorator, avoid calling the decorated method if player is not
a coordinator. If not, a grouped speaker (not in coordinator role) will
throw soco.exceptions.SoCoSlaveException.
Also, partially catch exceptions like:
soco.exceptions.SoCoUPnPException: UPnP Error 701 received:
Transition not available from <player ip address>
"""
def wrapper(*args, **kwargs):
"""Decorator wrapper."""
if args[0].is_coordinator:
from soco.exceptions import SoCoUPnPException
try:
func(*args, **kwargs)
except SoCoUPnPException:
_LOGGER.error('command "%s" for Sonos device "%s" '
'not available in this mode',
func.__name__, args[0].name)
else:
_LOGGER.debug('Ignore command "%s" for Sonos device "%s" (%s)',
func.__name__, args[0].name, 'not coordinator')
return wrapper
# pylint: disable=too-many-instance-attributes, too-many-public-methods
# pylint: disable=abstract-method
class SonosDevice(MediaPlayerDevice):
"""Representation of a Sonos device."""
# pylint: disable=too-many-arguments
def __init__(self, hass, player):
"""Initialize the Sonos device."""
from soco.snapshot import Snapshot
self.hass = hass
self.volume_increment = 5
self._player = player
self._name = None
self.update()
self.soco_snapshot = Snapshot(self._player)
@property
def should_poll(self):
"""Polling needed."""
return True
def update_sonos(self, now):
"""Update state, called by track_utc_time_change."""
self.update_ha_state(True)
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
if self._status == 'PAUSED_PLAYBACK':
return STATE_PAUSED
if self._status == 'PLAYING':
return STATE_PLAYING
if self._status == 'STOPPED':
return STATE_IDLE
return STATE_UNKNOWN
@property
def is_coordinator(self):
"""Return true if player is a coordinator."""
return self._player.is_coordinator
def update(self):
"""Retrieve latest state."""
self._name = self._player.get_speaker_info()['zone_name'].replace(
' (R)', '').replace(' (L)', '')
if self.available:
self._status = self._player.get_current_transport_info().get(
'current_transport_state')
self._trackinfo = self._player.get_current_track_info()
else:
self._status = STATE_OFF
self._trackinfo = {}
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._player.volume / 100.0
@property
def is_volume_muted(self):
"""Return true if volume is muted."""
return self._player.mute
@property
def media_content_id(self):
"""Content ID of current playing media."""
return self._trackinfo.get('title', None)
@property
def media_content_type(self):
"""Content type of current playing media."""
return MEDIA_TYPE_MUSIC
@property
def media_duration(self):
"""Duration of current playing media in seconds."""
dur = self._trackinfo.get('duration', '0:00')
# If the speaker is playing from the "line-in" source, getting
# track metadata can return NOT_IMPLEMENTED, which breaks the
# volume logic below
if dur == 'NOT_IMPLEMENTED':
return None
return sum(60 ** x[0] * int(x[1]) for x in
enumerate(reversed(dur.split(':'))))
@property
def media_image_url(self):
"""Image url of current playing media."""
if 'album_art' in self._trackinfo:
return self._trackinfo['album_art']
@property
def media_title(self):
"""Title of current playing media."""
if self._player.is_playing_line_in:
return SUPPORT_SOURCE_LINEIN
if self._player.is_playing_tv:
return SUPPORT_SOURCE_TV
if 'artist' in self._trackinfo and 'title' in self._trackinfo:
return '{artist} - {title}'.format(
artist=self._trackinfo['artist'],
title=self._trackinfo['title']
)
if 'title' in self._status:
return self._trackinfo['title']
@property
def supported_media_commands(self):
"""Flag of media commands that are supported."""
return SUPPORT_SONOS
def volume_up(self):
"""Volume up media player."""
self._player.volume += self.volume_increment
def volume_down(self):
"""Volume down media player."""
self._player.volume -= self.volume_increment
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self._player.volume = str(int(volume * 100))
def mute_volume(self, mute):
"""Mute (true) or unmute (false) media player."""
self._player.mute = mute
def select_source(self, source):
"""Select input source."""
if source == SUPPORT_SOURCE_LINEIN:
self._player.switch_to_line_in()
elif source == SUPPORT_SOURCE_TV:
self._player.switch_to_tv()
@property
def source_list(self):
"""List of available input sources."""
source = []
# generate list of supported device
source.append(SUPPORT_SOURCE_LINEIN)
source.append(SUPPORT_SOURCE_TV)
source.append(SUPPORT_SOURCE_RADIO)
return source
@property
def source(self):
"""Name of the current input source."""
if self._player.is_playing_line_in:
return SUPPORT_SOURCE_LINEIN
if self._player.is_playing_tv:
return SUPPORT_SOURCE_TV
if self._player.is_playing_radio:
return SUPPORT_SOURCE_RADIO
return None
@only_if_coordinator
def turn_off(self):
"""Turn off media player."""
self._player.pause()
@only_if_coordinator
def media_play(self):
"""Send play command."""
self._player.play()
@only_if_coordinator
def media_pause(self):
"""Send pause command."""
self._player.pause()
@only_if_coordinator
def media_next_track(self):
"""Send next track command."""
self._player.next()
@only_if_coordinator
def media_previous_track(self):
"""Send next track command."""
self._player.previous()
@only_if_coordinator
def media_seek(self, position):
"""Send seek command."""
self._player.seek(str(datetime.timedelta(seconds=int(position))))
@only_if_coordinator
def clear_playlist(self):
"""Clear players playlist."""
self._player.clear_queue()
@only_if_coordinator
def turn_on(self):
"""Turn the media player on."""
self._player.play()
@only_if_coordinator
def play_media(self, media_type, media_id, **kwargs):
"""
Send the play_media command to the media player.
If ATTR_MEDIA_ENQUEUE is True, add `media_id` to the queue.
"""
if kwargs.get(ATTR_MEDIA_ENQUEUE):
from soco.exceptions import SoCoUPnPException
try:
self._player.add_uri_to_queue(media_id)
except SoCoUPnPException:
_LOGGER.error('Error parsing media uri "%s", '
"please check it's a valid media resource "
'supported by Sonos', media_id)
else:
self._player.play_uri(media_id)
@only_if_coordinator
def group_players(self):
"""Group all players under this coordinator."""
self._player.partymode()
@only_if_coordinator
def unjoin(self):
"""Unjoin the player from a group."""
self._player.unjoin()
@only_if_coordinator
def snapshot(self):
"""Snapshot the player."""
self.soco_snapshot.snapshot()
@only_if_coordinator
def restore(self):
"""Restore snapshot for the player."""
self.soco_snapshot.restore(True)
@property
def available(self):
"""Return True if player is reachable, False otherwise."""
try:
sock = socket.create_connection(
address=(self._player.ip_address, 1443),
timeout=3)
sock.close()
return True
except socket.error:
return False
| mit |
40223125/w16btest1 | static/Brython3.1.1-20150328-091302/Lib/site-packages/highlight.py | 617 | 2518 | import keyword
import _jsre as re
from browser import html
letters = 'abcdefghijklmnopqrstuvwxyz'
letters += letters.upper()+'_'
digits = '0123456789'
builtin_funcs = ("abs|divmod|input|open|staticmethod|all|enumerate|int|ord|str|any|" +
"eval|isinstance|pow|sum|basestring|execfile|issubclass|print|super|" +
"binfile|iter|property|tuple|bool|filter|len|range|type|bytearray|" +
"float|list|raw_input|unichr|callable|format|locals|reduce|unicode|" +
"chr|frozenset|long|reload|vars|classmethod|getattr|map|repr|xrange|" +
"cmp|globals|max|reversed|zip|compile|hasattr|memoryview|round|" +
"__import__|complex|hash|min|set|apply|delattr|help|next|setattr|" +
"buffer|dict|hex|object|slice|coerce|dir|id|oct|sorted|intern")
kw_pattern = '^('+'|'.join(keyword.kwlist)+')$'
bf_pattern = '^('+builtin_funcs+')$'
def highlight(txt, string_color="blue", comment_color="green",
keyword_color="purple"):
res = html.PRE()
i = 0
name = ''
while i<len(txt):
car = txt[i]
if car in ["'",'"']:
k = i+1
while k<len(txt):
if txt[k]==car:
nb_as = 0
j = k-1
while True:
if txt[j]=='\\':
nb_as+=1
j -= 1
else:
break
if nb_as % 2 == 0:
res <= html.SPAN(txt[i:k+1],
style=dict(color=string_color))
i = k
break
k += 1
elif car == '#': # comment
end = txt.find('\n', i)
if end== -1:
res <= html.SPAN(txt[i:],style=dict(color=comment_color))
break
else:
res <= html.SPAN(txt[i:end],style=dict(color=comment_color))
i = end-1
elif car in letters:
name += car
elif car in digits and name:
name += car
else:
if name:
if re.search(kw_pattern,name):
res <= html.SPAN(name,style=dict(color=keyword_color))
elif re.search(bf_pattern,name):
res <= html.SPAN(name,style=dict(color=keyword_color))
else:
res <= name
name = ''
res <= car
i += 1
res <= name
return res | agpl-3.0 |
AlexBoogaard/Sick-Beard-Torrent-Edition | lib/hachoir_metadata/metadata_item.py | 90 | 4916 | from lib.hachoir_core.tools import makeUnicode, normalizeNewline
from lib.hachoir_core.error import HACHOIR_ERRORS
from lib.hachoir_metadata import config
from lib.hachoir_metadata.setter import normalizeString
MIN_PRIORITY = 100
MAX_PRIORITY = 999
QUALITY_FASTEST = 0.0
QUALITY_FAST = 0.25
QUALITY_NORMAL = 0.5
QUALITY_GOOD = 0.75
QUALITY_BEST = 1.0
class DataValue:
def __init__(self, value, text):
self.value = value
self.text = text
class Data:
def __init__(self, key, priority, description,
text_handler=None, type=None, filter=None, conversion=None):
"""
handler is only used if value is not string nor unicode, prototype:
def handler(value) -> str/unicode
"""
assert MIN_PRIORITY <= priority <= MAX_PRIORITY
assert isinstance(description, unicode)
self.metadata = None
self.key = key
self.description = description
self.values = []
if type and not isinstance(type, (tuple, list)):
type = (type,)
self.type = type
self.text_handler = text_handler
self.filter = filter
self.priority = priority
self.conversion = conversion
def _createItem(self, value, text=None):
if text is None:
if isinstance(value, unicode):
text = value
elif self.text_handler:
text = self.text_handler(value)
assert isinstance(text, unicode)
else:
text = makeUnicode(value)
return DataValue(value, text)
def add(self, value):
if isinstance(value, tuple):
if len(value) != 2:
raise ValueError("Data.add() only accept tuple of 2 elements: (value,text)")
value, text = value
else:
text = None
# Skip value 'None'
if value is None:
return
if isinstance(value, (str, unicode)):
value = normalizeString(value)
if not value:
return
# Convert string to Unicode string using charset ISO-8859-1
if self.conversion:
try:
new_value = self.conversion(self.metadata, self.key, value)
except HACHOIR_ERRORS, err:
self.metadata.warning("Error during conversion of %r value: %s" % (
self.key, err))
return
if new_value is None:
dest_types = " or ".join(str(item.__name__) for item in self.type)
self.metadata.warning("Unable to convert %s=%r (%s) to %s" % (
self.key, value, type(value).__name__, dest_types))
return
if isinstance(new_value, tuple):
if text:
value = new_value[0]
else:
value, text = new_value
else:
value = new_value
elif isinstance(value, str):
value = unicode(value, "ISO-8859-1")
if self.type and not isinstance(value, self.type):
dest_types = " or ".join(str(item.__name__) for item in self.type)
self.metadata.warning("Key %r: value %r type (%s) is not %s" % (
self.key, value, type(value).__name__, dest_types))
return
# Skip empty strings
if isinstance(value, unicode):
value = normalizeNewline(value)
if config.MAX_STR_LENGTH \
and config.MAX_STR_LENGTH < len(value):
value = value[:config.MAX_STR_LENGTH] + "(...)"
# Skip duplicates
if value in self:
return
# Use filter
if self.filter and not self.filter(value):
self.metadata.warning("Skip value %s=%r (filter)" % (self.key, value))
return
# For string, if you have "verlongtext" and "verylo",
# keep the longer value
if isinstance(value, unicode):
for index, item in enumerate(self.values):
item = item.value
if not isinstance(item, unicode):
continue
if value.startswith(item):
# Find longer value, replace the old one
self.values[index] = self._createItem(value, text)
return
if item.startswith(value):
# Find truncated value, skip it
return
# Add new value
self.values.append(self._createItem(value, text))
def __len__(self):
return len(self.values)
def __getitem__(self, index):
return self.values[index]
def __contains__(self, value):
for item in self.values:
if value == item.value:
return True
return False
def __cmp__(self, other):
return cmp(self.priority, other.priority)
| gpl-3.0 |
nopjmp/SickRage | lib/sqlalchemy/orm/interfaces.py | 77 | 19010 | # orm/interfaces.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
Contains various base classes used throughout the ORM.
Defines the now deprecated ORM extension classes as well
as ORM internals.
Other than the deprecated extensions, this module and the
classes within should be considered mostly private.
"""
from __future__ import absolute_import
from .. import exc as sa_exc, util, inspect
from ..sql import operators
from collections import deque
from .base import ONETOMANY, MANYTOONE, MANYTOMANY, EXT_CONTINUE, EXT_STOP, NOT_EXTENSION
from .base import _InspectionAttr, _MappedAttribute
from .path_registry import PathRegistry
import collections
__all__ = (
'AttributeExtension',
'EXT_CONTINUE',
'EXT_STOP',
'ONETOMANY',
'MANYTOMANY',
'MANYTOONE',
'NOT_EXTENSION',
'LoaderStrategy',
'MapperExtension',
'MapperOption',
'MapperProperty',
'PropComparator',
'SessionExtension',
'StrategizedProperty',
)
class MapperProperty(_MappedAttribute, _InspectionAttr):
"""Manage the relationship of a ``Mapper`` to a single class
attribute, as well as that attribute as it appears on individual
instances of the class, including attribute instrumentation,
attribute access, loading behavior, and dependency calculations.
The most common occurrences of :class:`.MapperProperty` are the
mapped :class:`.Column`, which is represented in a mapping as
an instance of :class:`.ColumnProperty`,
and a reference to another class produced by :func:`.relationship`,
represented in the mapping as an instance of
:class:`.RelationshipProperty`.
"""
cascade = frozenset()
"""The set of 'cascade' attribute names.
This collection is checked before the 'cascade_iterator' method is called.
"""
is_property = True
def setup(self, context, entity, path, adapter, **kwargs):
"""Called by Query for the purposes of constructing a SQL statement.
Each MapperProperty associated with the target mapper processes the
statement referenced by the query context, adding columns and/or
criterion as appropriate.
"""
pass
def create_row_processor(self, context, path,
mapper, row, adapter):
"""Return a 3-tuple consisting of three row processing functions.
"""
return None, None, None
def cascade_iterator(self, type_, state, visited_instances=None,
halt_on=None):
"""Iterate through instances related to the given instance for
a particular 'cascade', starting with this MapperProperty.
Return an iterator3-tuples (instance, mapper, state).
Note that the 'cascade' collection on this MapperProperty is
checked first for the given type before cascade_iterator is called.
See PropertyLoader for the related instance implementation.
"""
return iter(())
def set_parent(self, parent, init):
self.parent = parent
def instrument_class(self, mapper): # pragma: no-coverage
raise NotImplementedError()
@util.memoized_property
def info(self):
"""Info dictionary associated with the object, allowing user-defined
data to be associated with this :class:`.MapperProperty`.
The dictionary is generated when first accessed. Alternatively,
it can be specified as a constructor argument to the
:func:`.column_property`, :func:`.relationship`, or :func:`.composite`
functions.
.. versionadded:: 0.8 Added support for .info to all
:class:`.MapperProperty` subclasses.
.. seealso::
:attr:`.QueryableAttribute.info`
:attr:`.SchemaItem.info`
"""
return {}
_configure_started = False
_configure_finished = False
def init(self):
"""Called after all mappers are created to assemble
relationships between mappers and perform other post-mapper-creation
initialization steps.
"""
self._configure_started = True
self.do_init()
self._configure_finished = True
@property
def class_attribute(self):
"""Return the class-bound descriptor corresponding to this
:class:`.MapperProperty`.
This is basically a ``getattr()`` call::
return getattr(self.parent.class_, self.key)
I.e. if this :class:`.MapperProperty` were named ``addresses``,
and the class to which it is mapped is ``User``, this sequence
is possible::
>>> from sqlalchemy import inspect
>>> mapper = inspect(User)
>>> addresses_property = mapper.attrs.addresses
>>> addresses_property.class_attribute is User.addresses
True
>>> User.addresses.property is addresses_property
True
"""
return getattr(self.parent.class_, self.key)
def do_init(self):
"""Perform subclass-specific initialization post-mapper-creation
steps.
This is a template method called by the ``MapperProperty``
object's init() method.
"""
pass
def post_instrument_class(self, mapper):
"""Perform instrumentation adjustments that need to occur
after init() has completed.
"""
pass
def is_primary(self):
"""Return True if this ``MapperProperty``'s mapper is the
primary mapper for its class.
This flag is used to indicate that the ``MapperProperty`` can
define attribute instrumentation for the class at the class
level (as opposed to the individual instance level).
"""
return not self.parent.non_primary
def merge(self, session, source_state, source_dict, dest_state,
dest_dict, load, _recursive):
"""Merge the attribute represented by this ``MapperProperty``
from source to destination object"""
pass
def compare(self, operator, value, **kw):
"""Return a compare operation for the columns represented by
this ``MapperProperty`` to the given value, which may be a
column value or an instance. 'operator' is an operator from
the operators module, or from sql.Comparator.
By default uses the PropComparator attached to this MapperProperty
under the attribute name "comparator".
"""
return operator(self.comparator, value)
def __repr__(self):
return '<%s at 0x%x; %s>' % (
self.__class__.__name__,
id(self), getattr(self, 'key', 'no key'))
class PropComparator(operators.ColumnOperators):
"""Defines boolean, comparison, and other operators for
:class:`.MapperProperty` objects.
SQLAlchemy allows for operators to
be redefined at both the Core and ORM level. :class:`.PropComparator`
is the base class of operator redefinition for ORM-level operations,
including those of :class:`.ColumnProperty`,
:class:`.RelationshipProperty`, and :class:`.CompositeProperty`.
.. note:: With the advent of Hybrid properties introduced in SQLAlchemy
0.7, as well as Core-level operator redefinition in
SQLAlchemy 0.8, the use case for user-defined :class:`.PropComparator`
instances is extremely rare. See :ref:`hybrids_toplevel` as well
as :ref:`types_operators`.
User-defined subclasses of :class:`.PropComparator` may be created. The
built-in Python comparison and math operator methods, such as
:meth:`.operators.ColumnOperators.__eq__`,
:meth:`.operators.ColumnOperators.__lt__`, and
:meth:`.operators.ColumnOperators.__add__`, can be overridden to provide
new operator behavior. The custom :class:`.PropComparator` is passed to
the :class:`.MapperProperty` instance via the ``comparator_factory``
argument. In each case,
the appropriate subclass of :class:`.PropComparator` should be used::
# definition of custom PropComparator subclasses
from sqlalchemy.orm.properties import \\
ColumnProperty,\\
CompositeProperty,\\
RelationshipProperty
class MyColumnComparator(ColumnProperty.Comparator):
def __eq__(self, other):
return self.__clause_element__() == other
class MyRelationshipComparator(RelationshipProperty.Comparator):
def any(self, expression):
"define the 'any' operation"
# ...
class MyCompositeComparator(CompositeProperty.Comparator):
def __gt__(self, other):
"redefine the 'greater than' operation"
return sql.and_(*[a>b for a, b in
zip(self.__clause_element__().clauses,
other.__composite_values__())])
# application of custom PropComparator subclasses
from sqlalchemy.orm import column_property, relationship, composite
from sqlalchemy import Column, String
class SomeMappedClass(Base):
some_column = column_property(Column("some_column", String),
comparator_factory=MyColumnComparator)
some_relationship = relationship(SomeOtherClass,
comparator_factory=MyRelationshipComparator)
some_composite = composite(
Column("a", String), Column("b", String),
comparator_factory=MyCompositeComparator
)
Note that for column-level operator redefinition, it's usually
simpler to define the operators at the Core level, using the
:attr:`.TypeEngine.comparator_factory` attribute. See
:ref:`types_operators` for more detail.
See also:
:class:`.ColumnProperty.Comparator`
:class:`.RelationshipProperty.Comparator`
:class:`.CompositeProperty.Comparator`
:class:`.ColumnOperators`
:ref:`types_operators`
:attr:`.TypeEngine.comparator_factory`
"""
def __init__(self, prop, parentmapper, adapt_to_entity=None):
self.prop = self.property = prop
self._parentmapper = parentmapper
self._adapt_to_entity = adapt_to_entity
def __clause_element__(self):
raise NotImplementedError("%r" % self)
def _query_clause_element(self):
return self.__clause_element__()
def adapt_to_entity(self, adapt_to_entity):
"""Return a copy of this PropComparator which will use the given
:class:`.AliasedInsp` to produce corresponding expressions.
"""
return self.__class__(self.prop, self._parentmapper, adapt_to_entity)
@property
def adapter(self):
"""Produce a callable that adapts column expressions
to suit an aliased version of this comparator.
"""
if self._adapt_to_entity is None:
return None
else:
return self._adapt_to_entity._adapt_element
@util.memoized_property
def info(self):
return self.property.info
@staticmethod
def any_op(a, b, **kwargs):
return a.any(b, **kwargs)
@staticmethod
def has_op(a, b, **kwargs):
return a.has(b, **kwargs)
@staticmethod
def of_type_op(a, class_):
return a.of_type(class_)
def of_type(self, class_):
"""Redefine this object in terms of a polymorphic subclass.
Returns a new PropComparator from which further criterion can be
evaluated.
e.g.::
query.join(Company.employees.of_type(Engineer)).\\
filter(Engineer.name=='foo')
:param \class_: a class or mapper indicating that criterion will be
against this specific subclass.
"""
return self.operate(PropComparator.of_type_op, class_)
def any(self, criterion=None, **kwargs):
"""Return true if this collection contains any member that meets the
given criterion.
The usual implementation of ``any()`` is
:meth:`.RelationshipProperty.Comparator.any`.
:param criterion: an optional ClauseElement formulated against the
member class' table or attributes.
:param \**kwargs: key/value pairs corresponding to member class
attribute names which will be compared via equality to the
corresponding values.
"""
return self.operate(PropComparator.any_op, criterion, **kwargs)
def has(self, criterion=None, **kwargs):
"""Return true if this element references a member which meets the
given criterion.
The usual implementation of ``has()`` is
:meth:`.RelationshipProperty.Comparator.has`.
:param criterion: an optional ClauseElement formulated against the
member class' table or attributes.
:param \**kwargs: key/value pairs corresponding to member class
attribute names which will be compared via equality to the
corresponding values.
"""
return self.operate(PropComparator.has_op, criterion, **kwargs)
class StrategizedProperty(MapperProperty):
"""A MapperProperty which uses selectable strategies to affect
loading behavior.
There is a single strategy selected by default. Alternate
strategies can be selected at Query time through the usage of
``StrategizedOption`` objects via the Query.options() method.
"""
strategy_wildcard_key = None
def _get_context_loader(self, context, path):
load = None
# use EntityRegistry.__getitem__()->PropRegistry here so
# that the path is stated in terms of our base
search_path = dict.__getitem__(path, self)
# search among: exact match, "attr.*", "default" strategy
# if any.
for path_key in (
search_path._loader_key,
search_path._wildcard_path_loader_key,
search_path._default_path_loader_key
):
if path_key in context.attributes:
load = context.attributes[path_key]
break
return load
def _get_strategy(self, key):
try:
return self._strategies[key]
except KeyError:
cls = self._strategy_lookup(*key)
self._strategies[key] = self._strategies[cls] = strategy = cls(self)
return strategy
def _get_strategy_by_cls(self, cls):
return self._get_strategy(cls._strategy_keys[0])
def setup(self, context, entity, path, adapter, **kwargs):
loader = self._get_context_loader(context, path)
if loader and loader.strategy:
strat = self._get_strategy(loader.strategy)
else:
strat = self.strategy
strat.setup_query(context, entity, path, loader, adapter, **kwargs)
def create_row_processor(self, context, path, mapper, row, adapter):
loader = self._get_context_loader(context, path)
if loader and loader.strategy:
strat = self._get_strategy(loader.strategy)
else:
strat = self.strategy
return strat.create_row_processor(context, path, loader,
mapper, row, adapter)
def do_init(self):
self._strategies = {}
self.strategy = self._get_strategy_by_cls(self.strategy_class)
def post_instrument_class(self, mapper):
if self.is_primary() and \
not mapper.class_manager._attr_has_impl(self.key):
self.strategy.init_class_attribute(mapper)
_strategies = collections.defaultdict(dict)
@classmethod
def strategy_for(cls, **kw):
def decorate(dec_cls):
dec_cls._strategy_keys = []
key = tuple(sorted(kw.items()))
cls._strategies[cls][key] = dec_cls
dec_cls._strategy_keys.append(key)
return dec_cls
return decorate
@classmethod
def _strategy_lookup(cls, *key):
for prop_cls in cls.__mro__:
if prop_cls in cls._strategies:
strategies = cls._strategies[prop_cls]
try:
return strategies[key]
except KeyError:
pass
raise Exception("can't locate strategy for %s %s" % (cls, key))
class MapperOption(object):
"""Describe a modification to a Query."""
propagate_to_loaders = False
"""if True, indicate this option should be carried along
Query object generated by scalar or object lazy loaders.
"""
def process_query(self, query):
pass
def process_query_conditionally(self, query):
"""same as process_query(), except that this option may not
apply to the given query.
Used when secondary loaders resend existing options to a new
Query."""
self.process_query(query)
class LoaderStrategy(object):
"""Describe the loading behavior of a StrategizedProperty object.
The ``LoaderStrategy`` interacts with the querying process in three
ways:
* it controls the configuration of the ``InstrumentedAttribute``
placed on a class to handle the behavior of the attribute. this
may involve setting up class-level callable functions to fire
off a select operation when the attribute is first accessed
(i.e. a lazy load)
* it processes the ``QueryContext`` at statement construction time,
where it can modify the SQL statement that is being produced.
Simple column attributes may add their represented column to the
list of selected columns, *eager loading* properties may add
``LEFT OUTER JOIN`` clauses to the statement.
* It produces "row processor" functions at result fetching time.
These "row processor" functions populate a particular attribute
on a particular mapped instance.
"""
def __init__(self, parent):
self.parent_property = parent
self.is_class_level = False
self.parent = self.parent_property.parent
self.key = self.parent_property.key
def init_class_attribute(self, mapper):
pass
def setup_query(self, context, entity, path, loadopt, adapter, **kwargs):
pass
def create_row_processor(self, context, path, loadopt, mapper,
row, adapter):
"""Return row processing functions which fulfill the contract
specified by MapperProperty.create_row_processor.
StrategizedProperty delegates its create_row_processor method
directly to this method. """
return None, None, None
def __str__(self):
return str(self.parent_property)
| gpl-3.0 |
Hakuba/youtube-dl | youtube_dl/extractor/dramafever.py | 8 | 7481 | # encoding: utf-8
from __future__ import unicode_literals
import itertools
from .amp import AMPIE
from ..compat import (
compat_HTTPError,
compat_urllib_parse,
compat_urlparse,
)
from ..utils import (
ExtractorError,
clean_html,
int_or_none,
sanitized_Request,
)
class DramaFeverBaseIE(AMPIE):
_LOGIN_URL = 'https://www.dramafever.com/accounts/login/'
_NETRC_MACHINE = 'dramafever'
_CONSUMER_SECRET = 'DA59dtVXYLxajktV'
_consumer_secret = None
def _get_consumer_secret(self):
mainjs = self._download_webpage(
'http://www.dramafever.com/static/51afe95/df2014/scripts/main.js',
None, 'Downloading main.js', fatal=False)
if not mainjs:
return self._CONSUMER_SECRET
return self._search_regex(
r"var\s+cs\s*=\s*'([^']+)'", mainjs,
'consumer secret', default=self._CONSUMER_SECRET)
def _real_initialize(self):
self._login()
self._consumer_secret = self._get_consumer_secret()
def _login(self):
(username, password) = self._get_login_info()
if username is None:
return
login_form = {
'username': username,
'password': password,
}
request = sanitized_Request(
self._LOGIN_URL, compat_urllib_parse.urlencode(login_form).encode('utf-8'))
response = self._download_webpage(
request, None, 'Logging in as %s' % username)
if all(logout_pattern not in response
for logout_pattern in ['href="/accounts/logout/"', '>Log out<']):
error = self._html_search_regex(
r'(?s)class="hidden-xs prompt"[^>]*>(.+?)<',
response, 'error message', default=None)
if error:
raise ExtractorError('Unable to login: %s' % error, expected=True)
raise ExtractorError('Unable to log in')
class DramaFeverIE(DramaFeverBaseIE):
IE_NAME = 'dramafever'
_VALID_URL = r'https?://(?:www\.)?dramafever\.com/drama/(?P<id>[0-9]+/[0-9]+)(?:/|$)'
_TESTS = [{
'url': 'http://www.dramafever.com/drama/4512/1/Cooking_with_Shin/',
'info_dict': {
'id': '4512.1',
'ext': 'mp4',
'title': 'Cooking with Shin 4512.1',
'description': 'md5:a8eec7942e1664a6896fcd5e1287bfd0',
'episode': 'Episode 1',
'episode_number': 1,
'thumbnail': 're:^https?://.*\.jpg',
'timestamp': 1404336058,
'upload_date': '20140702',
'duration': 343,
},
'params': {
# m3u8 download
'skip_download': True,
},
}, {
'url': 'http://www.dramafever.com/drama/4826/4/Mnet_Asian_Music_Awards_2015/?ap=1',
'info_dict': {
'id': '4826.4',
'ext': 'mp4',
'title': 'Mnet Asian Music Awards 2015 4826.4',
'description': 'md5:3ff2ee8fedaef86e076791c909cf2e91',
'episode': 'Mnet Asian Music Awards 2015 - Part 3',
'episode_number': 4,
'thumbnail': 're:^https?://.*\.jpg',
'timestamp': 1450213200,
'upload_date': '20151215',
'duration': 5602,
},
'params': {
# m3u8 download
'skip_download': True,
},
}]
def _real_extract(self, url):
video_id = self._match_id(url).replace('/', '.')
try:
info = self._extract_feed_info(
'http://www.dramafever.com/amp/episode/feed.json?guid=%s' % video_id)
except ExtractorError as e:
if isinstance(e.cause, compat_HTTPError):
raise ExtractorError(
'Currently unavailable in your country.', expected=True)
raise
series_id, episode_number = video_id.split('.')
episode_info = self._download_json(
# We only need a single episode info, so restricting page size to one episode
# and dealing with page number as with episode number
r'http://www.dramafever.com/api/4/episode/series/?cs=%s&series_id=%s&page_number=%s&page_size=1'
% (self._consumer_secret, series_id, episode_number),
video_id, 'Downloading episode info JSON', fatal=False)
if episode_info:
value = episode_info.get('value')
if isinstance(value, list):
for v in value:
if v.get('type') == 'Episode':
subfile = v.get('subfile') or v.get('new_subfile')
if subfile and subfile != 'http://www.dramafever.com/st/':
info.setdefault('subtitles', {}).setdefault('English', []).append({
'ext': 'srt',
'url': subfile,
})
episode_number = int_or_none(v.get('number'))
episode_fallback = 'Episode'
if episode_number:
episode_fallback += ' %d' % episode_number
info['episode'] = v.get('title') or episode_fallback
info['episode_number'] = episode_number
break
return info
class DramaFeverSeriesIE(DramaFeverBaseIE):
IE_NAME = 'dramafever:series'
_VALID_URL = r'https?://(?:www\.)?dramafever\.com/drama/(?P<id>[0-9]+)(?:/(?:(?!\d+(?:/|$)).+)?)?$'
_TESTS = [{
'url': 'http://www.dramafever.com/drama/4512/Cooking_with_Shin/',
'info_dict': {
'id': '4512',
'title': 'Cooking with Shin',
'description': 'md5:84a3f26e3cdc3fb7f500211b3593b5c1',
},
'playlist_count': 4,
}, {
'url': 'http://www.dramafever.com/drama/124/IRIS/',
'info_dict': {
'id': '124',
'title': 'IRIS',
'description': 'md5:b3a30e587cf20c59bd1c01ec0ee1b862',
},
'playlist_count': 20,
}]
_PAGE_SIZE = 60 # max is 60 (see http://api.drama9.com/#get--api-4-episode-series-)
def _real_extract(self, url):
series_id = self._match_id(url)
series = self._download_json(
'http://www.dramafever.com/api/4/series/query/?cs=%s&series_id=%s'
% (self._consumer_secret, series_id),
series_id, 'Downloading series JSON')['series'][series_id]
title = clean_html(series['name'])
description = clean_html(series.get('description') or series.get('description_short'))
entries = []
for page_num in itertools.count(1):
episodes = self._download_json(
'http://www.dramafever.com/api/4/episode/series/?cs=%s&series_id=%s&page_size=%d&page_number=%d'
% (self._consumer_secret, series_id, self._PAGE_SIZE, page_num),
series_id, 'Downloading episodes JSON page #%d' % page_num)
for episode in episodes.get('value', []):
episode_url = episode.get('episode_url')
if not episode_url:
continue
entries.append(self.url_result(
compat_urlparse.urljoin(url, episode_url),
'DramaFever', episode.get('guid')))
if page_num == episodes['num_pages']:
break
return self.playlist_result(entries, series_id, title, description)
| unlicense |
silenceli/nova | nova/api/openstack/compute/schemas/v3/flavors_extraspecs.py | 109 | 1195 | # Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from nova.api.validation import parameter_types
# NOTE(oomichi): The metadata of flavor_extraspecs should accept numbers
# as its values.
metadata = copy.deepcopy(parameter_types.metadata)
metadata['patternProperties']['^[a-zA-Z0-9-_:. ]{1,255}$']['type'] = \
['string', 'number']
create = {
'type': 'object',
'properties': {
'extra_specs': metadata
},
'required': ['extra_specs'],
'additionalProperties': False,
}
update = copy.deepcopy(metadata)
update.update({
'minProperties': 1,
'maxProperties': 1
})
| apache-2.0 |
CodyKochmann/sync_lab | simple_notepad_server/cherrypy/test/test_json.py | 22 | 3048 | import cherrypy
from cherrypy.test import helper
from cherrypy._cpcompat import json
class JsonTest(helper.CPWebCase):
def setup_server():
class Root(object):
def plain(self):
return 'hello'
plain.exposed = True
def json_string(self):
return 'hello'
json_string.exposed = True
json_string._cp_config = {'tools.json_out.on': True}
def json_list(self):
return ['a', 'b', 42]
json_list.exposed = True
json_list._cp_config = {'tools.json_out.on': True}
def json_dict(self):
return {'answer': 42}
json_dict.exposed = True
json_dict._cp_config = {'tools.json_out.on': True}
def json_post(self):
if cherrypy.request.json == [13, 'c']:
return 'ok'
else:
return 'nok'
json_post.exposed = True
json_post._cp_config = {'tools.json_in.on': True}
def json_cached(self):
return 'hello there'
json_cached.exposed = True
json_cached._cp_config = {
'tools.json_out.on': True,
'tools.caching.on': True,
}
root = Root()
cherrypy.tree.mount(root)
setup_server = staticmethod(setup_server)
def test_json_output(self):
if json is None:
self.skip("json not found ")
return
self.getPage("/plain")
self.assertBody("hello")
self.getPage("/json_string")
self.assertBody('"hello"')
self.getPage("/json_list")
self.assertBody('["a", "b", 42]')
self.getPage("/json_dict")
self.assertBody('{"answer": 42}')
def test_json_input(self):
if json is None:
self.skip("json not found ")
return
body = '[13, "c"]'
headers = [('Content-Type', 'application/json'),
('Content-Length', str(len(body)))]
self.getPage("/json_post", method="POST", headers=headers, body=body)
self.assertBody('ok')
body = '[13, "c"]'
headers = [('Content-Type', 'text/plain'),
('Content-Length', str(len(body)))]
self.getPage("/json_post", method="POST", headers=headers, body=body)
self.assertStatus(415, 'Expected an application/json content type')
body = '[13, -]'
headers = [('Content-Type', 'application/json'),
('Content-Length', str(len(body)))]
self.getPage("/json_post", method="POST", headers=headers, body=body)
self.assertStatus(400, 'Invalid JSON document')
def test_cached(self):
if json is None:
self.skip("json not found ")
return
self.getPage("/json_cached")
self.assertStatus(200, '"hello"')
self.getPage("/json_cached") # 2'nd time to hit cache
self.assertStatus(200, '"hello"')
| mit |
sebastic/QGIS | python/ext-libs/jinja2/utils.py | 598 | 16165 | # -*- coding: utf-8 -*-
"""
jinja2.utils
~~~~~~~~~~~~
Utility functions.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import re
import errno
from collections import deque
from jinja2._compat import text_type, string_types, implements_iterator, \
allocate_lock, url_quote
_word_split_re = re.compile(r'(\s+)')
_punctuation_re = re.compile(
'^(?P<lead>(?:%s)*)(?P<middle>.*?)(?P<trail>(?:%s)*)$' % (
'|'.join(map(re.escape, ('(', '<', '<'))),
'|'.join(map(re.escape, ('.', ',', ')', '>', '\n', '>')))
)
)
_simple_email_re = re.compile(r'^\S+@[a-zA-Z0-9._-]+\.[a-zA-Z0-9._-]+$')
_striptags_re = re.compile(r'(<!--.*?-->|<[^>]*>)')
_entity_re = re.compile(r'&([^;]+);')
_letters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
_digits = '0123456789'
# special singleton representing missing values for the runtime
missing = type('MissingType', (), {'__repr__': lambda x: 'missing'})()
# internal code
internal_code = set()
concat = u''.join
def contextfunction(f):
"""This decorator can be used to mark a function or method context callable.
A context callable is passed the active :class:`Context` as first argument when
called from the template. This is useful if a function wants to get access
to the context or functions provided on the context object. For example
a function that returns a sorted list of template variables the current
template exports could look like this::
@contextfunction
def get_exported_names(context):
return sorted(context.exported_vars)
"""
f.contextfunction = True
return f
def evalcontextfunction(f):
"""This decorator can be used to mark a function or method as an eval
context callable. This is similar to the :func:`contextfunction`
but instead of passing the context, an evaluation context object is
passed. For more information about the eval context, see
:ref:`eval-context`.
.. versionadded:: 2.4
"""
f.evalcontextfunction = True
return f
def environmentfunction(f):
"""This decorator can be used to mark a function or method as environment
callable. This decorator works exactly like the :func:`contextfunction`
decorator just that the first argument is the active :class:`Environment`
and not context.
"""
f.environmentfunction = True
return f
def internalcode(f):
"""Marks the function as internally used"""
internal_code.add(f.__code__)
return f
def is_undefined(obj):
"""Check if the object passed is undefined. This does nothing more than
performing an instance check against :class:`Undefined` but looks nicer.
This can be used for custom filters or tests that want to react to
undefined variables. For example a custom default filter can look like
this::
def default(var, default=''):
if is_undefined(var):
return default
return var
"""
from jinja2.runtime import Undefined
return isinstance(obj, Undefined)
def consume(iterable):
"""Consumes an iterable without doing anything with it."""
for event in iterable:
pass
def clear_caches():
"""Jinja2 keeps internal caches for environments and lexers. These are
used so that Jinja2 doesn't have to recreate environments and lexers all
the time. Normally you don't have to care about that but if you are
messuring memory consumption you may want to clean the caches.
"""
from jinja2.environment import _spontaneous_environments
from jinja2.lexer import _lexer_cache
_spontaneous_environments.clear()
_lexer_cache.clear()
def import_string(import_name, silent=False):
"""Imports an object based on a string. This is useful if you want to
use import paths as endpoints or something similar. An import path can
be specified either in dotted notation (``xml.sax.saxutils.escape``)
or with a colon as object delimiter (``xml.sax.saxutils:escape``).
If the `silent` is True the return value will be `None` if the import
fails.
:return: imported object
"""
try:
if ':' in import_name:
module, obj = import_name.split(':', 1)
elif '.' in import_name:
items = import_name.split('.')
module = '.'.join(items[:-1])
obj = items[-1]
else:
return __import__(import_name)
return getattr(__import__(module, None, None, [obj]), obj)
except (ImportError, AttributeError):
if not silent:
raise
def open_if_exists(filename, mode='rb'):
"""Returns a file descriptor for the filename if that file exists,
otherwise `None`.
"""
try:
return open(filename, mode)
except IOError as e:
if e.errno not in (errno.ENOENT, errno.EISDIR):
raise
def object_type_repr(obj):
"""Returns the name of the object's type. For some recognized
singletons the name of the object is returned instead. (For
example for `None` and `Ellipsis`).
"""
if obj is None:
return 'None'
elif obj is Ellipsis:
return 'Ellipsis'
# __builtin__ in 2.x, builtins in 3.x
if obj.__class__.__module__ in ('__builtin__', 'builtins'):
name = obj.__class__.__name__
else:
name = obj.__class__.__module__ + '.' + obj.__class__.__name__
return '%s object' % name
def pformat(obj, verbose=False):
"""Prettyprint an object. Either use the `pretty` library or the
builtin `pprint`.
"""
try:
from pretty import pretty
return pretty(obj, verbose=verbose)
except ImportError:
from pprint import pformat
return pformat(obj)
def urlize(text, trim_url_limit=None, nofollow=False):
"""Converts any URLs in text into clickable links. Works on http://,
https:// and www. links. Links can have trailing punctuation (periods,
commas, close-parens) and leading punctuation (opening parens) and
it'll still do the right thing.
If trim_url_limit is not None, the URLs in link text will be limited
to trim_url_limit characters.
If nofollow is True, the URLs in link text will get a rel="nofollow"
attribute.
"""
trim_url = lambda x, limit=trim_url_limit: limit is not None \
and (x[:limit] + (len(x) >=limit and '...'
or '')) or x
words = _word_split_re.split(text_type(escape(text)))
nofollow_attr = nofollow and ' rel="nofollow"' or ''
for i, word in enumerate(words):
match = _punctuation_re.match(word)
if match:
lead, middle, trail = match.groups()
if middle.startswith('www.') or (
'@' not in middle and
not middle.startswith('http://') and
not middle.startswith('https://') and
len(middle) > 0 and
middle[0] in _letters + _digits and (
middle.endswith('.org') or
middle.endswith('.net') or
middle.endswith('.com')
)):
middle = '<a href="http://%s"%s>%s</a>' % (middle,
nofollow_attr, trim_url(middle))
if middle.startswith('http://') or \
middle.startswith('https://'):
middle = '<a href="%s"%s>%s</a>' % (middle,
nofollow_attr, trim_url(middle))
if '@' in middle and not middle.startswith('www.') and \
not ':' in middle and _simple_email_re.match(middle):
middle = '<a href="mailto:%s">%s</a>' % (middle, middle)
if lead + middle + trail != word:
words[i] = lead + middle + trail
return u''.join(words)
def generate_lorem_ipsum(n=5, html=True, min=20, max=100):
"""Generate some lorem impsum for the template."""
from jinja2.constants import LOREM_IPSUM_WORDS
from random import choice, randrange
words = LOREM_IPSUM_WORDS.split()
result = []
for _ in range(n):
next_capitalized = True
last_comma = last_fullstop = 0
word = None
last = None
p = []
# each paragraph contains out of 20 to 100 words.
for idx, _ in enumerate(range(randrange(min, max))):
while True:
word = choice(words)
if word != last:
last = word
break
if next_capitalized:
word = word.capitalize()
next_capitalized = False
# add commas
if idx - randrange(3, 8) > last_comma:
last_comma = idx
last_fullstop += 2
word += ','
# add end of sentences
if idx - randrange(10, 20) > last_fullstop:
last_comma = last_fullstop = idx
word += '.'
next_capitalized = True
p.append(word)
# ensure that the paragraph ends with a dot.
p = u' '.join(p)
if p.endswith(','):
p = p[:-1] + '.'
elif not p.endswith('.'):
p += '.'
result.append(p)
if not html:
return u'\n\n'.join(result)
return Markup(u'\n'.join(u'<p>%s</p>' % escape(x) for x in result))
def unicode_urlencode(obj, charset='utf-8'):
"""URL escapes a single bytestring or unicode string with the
given charset if applicable to URL safe quoting under all rules
that need to be considered under all supported Python versions.
If non strings are provided they are converted to their unicode
representation first.
"""
if not isinstance(obj, string_types):
obj = text_type(obj)
if isinstance(obj, text_type):
obj = obj.encode(charset)
return text_type(url_quote(obj))
class LRUCache(object):
"""A simple LRU Cache implementation."""
# this is fast for small capacities (something below 1000) but doesn't
# scale. But as long as it's only used as storage for templates this
# won't do any harm.
def __init__(self, capacity):
self.capacity = capacity
self._mapping = {}
self._queue = deque()
self._postinit()
def _postinit(self):
# alias all queue methods for faster lookup
self._popleft = self._queue.popleft
self._pop = self._queue.pop
self._remove = self._queue.remove
self._wlock = allocate_lock()
self._append = self._queue.append
def __getstate__(self):
return {
'capacity': self.capacity,
'_mapping': self._mapping,
'_queue': self._queue
}
def __setstate__(self, d):
self.__dict__.update(d)
self._postinit()
def __getnewargs__(self):
return (self.capacity,)
def copy(self):
"""Return a shallow copy of the instance."""
rv = self.__class__(self.capacity)
rv._mapping.update(self._mapping)
rv._queue = deque(self._queue)
return rv
def get(self, key, default=None):
"""Return an item from the cache dict or `default`"""
try:
return self[key]
except KeyError:
return default
def setdefault(self, key, default=None):
"""Set `default` if the key is not in the cache otherwise
leave unchanged. Return the value of this key.
"""
self._wlock.acquire()
try:
try:
return self[key]
except KeyError:
self[key] = default
return default
finally:
self._wlock.release()
def clear(self):
"""Clear the cache."""
self._wlock.acquire()
try:
self._mapping.clear()
self._queue.clear()
finally:
self._wlock.release()
def __contains__(self, key):
"""Check if a key exists in this cache."""
return key in self._mapping
def __len__(self):
"""Return the current size of the cache."""
return len(self._mapping)
def __repr__(self):
return '<%s %r>' % (
self.__class__.__name__,
self._mapping
)
def __getitem__(self, key):
"""Get an item from the cache. Moves the item up so that it has the
highest priority then.
Raise a `KeyError` if it does not exist.
"""
self._wlock.acquire()
try:
rv = self._mapping[key]
if self._queue[-1] != key:
try:
self._remove(key)
except ValueError:
# if something removed the key from the container
# when we read, ignore the ValueError that we would
# get otherwise.
pass
self._append(key)
return rv
finally:
self._wlock.release()
def __setitem__(self, key, value):
"""Sets the value for an item. Moves the item up so that it
has the highest priority then.
"""
self._wlock.acquire()
try:
if key in self._mapping:
self._remove(key)
elif len(self._mapping) == self.capacity:
del self._mapping[self._popleft()]
self._append(key)
self._mapping[key] = value
finally:
self._wlock.release()
def __delitem__(self, key):
"""Remove an item from the cache dict.
Raise a `KeyError` if it does not exist.
"""
self._wlock.acquire()
try:
del self._mapping[key]
try:
self._remove(key)
except ValueError:
# __getitem__ is not locked, it might happen
pass
finally:
self._wlock.release()
def items(self):
"""Return a list of items."""
result = [(key, self._mapping[key]) for key in list(self._queue)]
result.reverse()
return result
def iteritems(self):
"""Iterate over all items."""
return iter(self.items())
def values(self):
"""Return a list of all values."""
return [x[1] for x in self.items()]
def itervalue(self):
"""Iterate over all values."""
return iter(self.values())
def keys(self):
"""Return a list of all keys ordered by most recent usage."""
return list(self)
def iterkeys(self):
"""Iterate over all keys in the cache dict, ordered by
the most recent usage.
"""
return reversed(tuple(self._queue))
__iter__ = iterkeys
def __reversed__(self):
"""Iterate over the values in the cache dict, oldest items
coming first.
"""
return iter(tuple(self._queue))
__copy__ = copy
# register the LRU cache as mutable mapping if possible
try:
from collections import MutableMapping
MutableMapping.register(LRUCache)
except ImportError:
pass
@implements_iterator
class Cycler(object):
"""A cycle helper for templates."""
def __init__(self, *items):
if not items:
raise RuntimeError('at least one item has to be provided')
self.items = items
self.reset()
def reset(self):
"""Resets the cycle."""
self.pos = 0
@property
def current(self):
"""Returns the current item."""
return self.items[self.pos]
def __next__(self):
"""Goes one item ahead and returns it."""
rv = self.current
self.pos = (self.pos + 1) % len(self.items)
return rv
class Joiner(object):
"""A joining helper for templates."""
def __init__(self, sep=u', '):
self.sep = sep
self.used = False
def __call__(self):
if not self.used:
self.used = True
return u''
return self.sep
# Imported here because that's where it was in the past
from markupsafe import Markup, escape, soft_unicode
| gpl-2.0 |
open-synergy/account-financial-tools | account_reset_chart/__openerp__.py | 13 | 1335 | # -*- coding: utf-8 -*-
##############################################################################
#
# Odoo, an open source suite of business apps
# This module copyright (C) 2014-2015 Therp BV (<http://therp.nl>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Reset a chart of accounts",
"summary": ("Delete the accounting setup from an otherwise reusable "
"database"),
"version": "8.0.1.0.0",
"author": "Therp BV,Odoo Community Association (OCA)",
"category": 'Accounting & Finance',
"depends": [
'account',
],
'license': 'AGPL-3'
}
| agpl-3.0 |
tafia/servo | tests/wpt/web-platform-tests/tools/pywebsocket/src/mod_pywebsocket/standalone.py | 410 | 46785 | #!/usr/bin/env python
#
# Copyright 2012, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Standalone WebSocket server.
Use this file to launch pywebsocket without Apache HTTP Server.
BASIC USAGE
===========
Go to the src directory and run
$ python mod_pywebsocket/standalone.py [-p <ws_port>]
[-w <websock_handlers>]
[-d <document_root>]
<ws_port> is the port number to use for ws:// connection.
<document_root> is the path to the root directory of HTML files.
<websock_handlers> is the path to the root directory of WebSocket handlers.
If not specified, <document_root> will be used. See __init__.py (or
run $ pydoc mod_pywebsocket) for how to write WebSocket handlers.
For more detail and other options, run
$ python mod_pywebsocket/standalone.py --help
or see _build_option_parser method below.
For trouble shooting, adding "--log_level debug" might help you.
TRY DEMO
========
Go to the src directory and run standalone.py with -d option to set the
document root to the directory containing example HTMLs and handlers like this:
$ cd src
$ PYTHONPATH=. python mod_pywebsocket/standalone.py -d example
to launch pywebsocket with the sample handler and html on port 80. Open
http://localhost/console.html, click the connect button, type something into
the text box next to the send button and click the send button. If everything
is working, you'll see the message you typed echoed by the server.
USING TLS
=========
To run the standalone server with TLS support, run it with -t, -k, and -c
options. When TLS is enabled, the standalone server accepts only TLS connection.
Note that when ssl module is used and the key/cert location is incorrect,
TLS connection silently fails while pyOpenSSL fails on startup.
Example:
$ PYTHONPATH=. python mod_pywebsocket/standalone.py \
-d example \
-p 10443 \
-t \
-c ../test/cert/cert.pem \
-k ../test/cert/key.pem \
Note that when passing a relative path to -c and -k option, it will be resolved
using the document root directory as the base.
USING CLIENT AUTHENTICATION
===========================
To run the standalone server with TLS client authentication support, run it with
--tls-client-auth and --tls-client-ca options in addition to ones required for
TLS support.
Example:
$ PYTHONPATH=. python mod_pywebsocket/standalone.py -d example -p 10443 -t \
-c ../test/cert/cert.pem -k ../test/cert/key.pem \
--tls-client-auth \
--tls-client-ca=../test/cert/cacert.pem
Note that when passing a relative path to --tls-client-ca option, it will be
resolved using the document root directory as the base.
CONFIGURATION FILE
==================
You can also write a configuration file and use it by specifying the path to
the configuration file by --config option. Please write a configuration file
following the documentation of the Python ConfigParser library. Name of each
entry must be the long version argument name. E.g. to set log level to debug,
add the following line:
log_level=debug
For options which doesn't take value, please add some fake value. E.g. for
--tls option, add the following line:
tls=True
Note that tls will be enabled even if you write tls=False as the value part is
fake.
When both a command line argument and a configuration file entry are set for
the same configuration item, the command line value will override one in the
configuration file.
THREADING
=========
This server is derived from SocketServer.ThreadingMixIn. Hence a thread is
used for each request.
SECURITY WARNING
================
This uses CGIHTTPServer and CGIHTTPServer is not secure.
It may execute arbitrary Python code or external programs. It should not be
used outside a firewall.
"""
import BaseHTTPServer
import CGIHTTPServer
import SimpleHTTPServer
import SocketServer
import ConfigParser
import base64
import httplib
import logging
import logging.handlers
import optparse
import os
import re
import select
import socket
import sys
import threading
import time
from mod_pywebsocket import common
from mod_pywebsocket import dispatch
from mod_pywebsocket import handshake
from mod_pywebsocket import http_header_util
from mod_pywebsocket import memorizingfile
from mod_pywebsocket import util
from mod_pywebsocket.xhr_benchmark_handler import XHRBenchmarkHandler
_DEFAULT_LOG_MAX_BYTES = 1024 * 256
_DEFAULT_LOG_BACKUP_COUNT = 5
_DEFAULT_REQUEST_QUEUE_SIZE = 128
# 1024 is practically large enough to contain WebSocket handshake lines.
_MAX_MEMORIZED_LINES = 1024
# Constants for the --tls_module flag.
_TLS_BY_STANDARD_MODULE = 'ssl'
_TLS_BY_PYOPENSSL = 'pyopenssl'
class _StandaloneConnection(object):
"""Mimic mod_python mp_conn."""
def __init__(self, request_handler):
"""Construct an instance.
Args:
request_handler: A WebSocketRequestHandler instance.
"""
self._request_handler = request_handler
def get_local_addr(self):
"""Getter to mimic mp_conn.local_addr."""
return (self._request_handler.server.server_name,
self._request_handler.server.server_port)
local_addr = property(get_local_addr)
def get_remote_addr(self):
"""Getter to mimic mp_conn.remote_addr.
Setting the property in __init__ won't work because the request
handler is not initialized yet there."""
return self._request_handler.client_address
remote_addr = property(get_remote_addr)
def write(self, data):
"""Mimic mp_conn.write()."""
return self._request_handler.wfile.write(data)
def read(self, length):
"""Mimic mp_conn.read()."""
return self._request_handler.rfile.read(length)
def get_memorized_lines(self):
"""Get memorized lines."""
return self._request_handler.rfile.get_memorized_lines()
class _StandaloneRequest(object):
"""Mimic mod_python request."""
def __init__(self, request_handler, use_tls):
"""Construct an instance.
Args:
request_handler: A WebSocketRequestHandler instance.
"""
self._logger = util.get_class_logger(self)
self._request_handler = request_handler
self.connection = _StandaloneConnection(request_handler)
self._use_tls = use_tls
self.headers_in = request_handler.headers
def get_uri(self):
"""Getter to mimic request.uri.
This method returns the raw data at the Request-URI part of the
Request-Line, while the uri method on the request object of mod_python
returns the path portion after parsing the raw data. This behavior is
kept for compatibility.
"""
return self._request_handler.path
uri = property(get_uri)
def get_unparsed_uri(self):
"""Getter to mimic request.unparsed_uri."""
return self._request_handler.path
unparsed_uri = property(get_unparsed_uri)
def get_method(self):
"""Getter to mimic request.method."""
return self._request_handler.command
method = property(get_method)
def get_protocol(self):
"""Getter to mimic request.protocol."""
return self._request_handler.request_version
protocol = property(get_protocol)
def is_https(self):
"""Mimic request.is_https()."""
return self._use_tls
def _import_ssl():
global ssl
try:
import ssl
return True
except ImportError:
return False
def _import_pyopenssl():
global OpenSSL
try:
import OpenSSL.SSL
return True
except ImportError:
return False
class _StandaloneSSLConnection(object):
"""A wrapper class for OpenSSL.SSL.Connection to
- provide makefile method which is not supported by the class
- tweak shutdown method since OpenSSL.SSL.Connection.shutdown doesn't
accept the "how" argument.
- convert SysCallError exceptions that its recv method may raise into a
return value of '', meaning EOF. We cannot overwrite the recv method on
self._connection since it's immutable.
"""
_OVERRIDDEN_ATTRIBUTES = ['_connection', 'makefile', 'shutdown', 'recv']
def __init__(self, connection):
self._connection = connection
def __getattribute__(self, name):
if name in _StandaloneSSLConnection._OVERRIDDEN_ATTRIBUTES:
return object.__getattribute__(self, name)
return self._connection.__getattribute__(name)
def __setattr__(self, name, value):
if name in _StandaloneSSLConnection._OVERRIDDEN_ATTRIBUTES:
return object.__setattr__(self, name, value)
return self._connection.__setattr__(name, value)
def makefile(self, mode='r', bufsize=-1):
return socket._fileobject(self, mode, bufsize)
def shutdown(self, unused_how):
self._connection.shutdown()
def recv(self, bufsize, flags=0):
if flags != 0:
raise ValueError('Non-zero flags not allowed')
try:
return self._connection.recv(bufsize)
except OpenSSL.SSL.SysCallError, (err, message):
if err == -1:
# Suppress "unexpected EOF" exception. See the OpenSSL document
# for SSL_get_error.
return ''
raise
def _alias_handlers(dispatcher, websock_handlers_map_file):
"""Set aliases specified in websock_handler_map_file in dispatcher.
Args:
dispatcher: dispatch.Dispatcher instance
websock_handler_map_file: alias map file
"""
fp = open(websock_handlers_map_file)
try:
for line in fp:
if line[0] == '#' or line.isspace():
continue
m = re.match('(\S+)\s+(\S+)', line)
if not m:
logging.warning('Wrong format in map file:' + line)
continue
try:
dispatcher.add_resource_path_alias(
m.group(1), m.group(2))
except dispatch.DispatchException, e:
logging.error(str(e))
finally:
fp.close()
class WebSocketServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer):
"""HTTPServer specialized for WebSocket."""
# Overrides SocketServer.ThreadingMixIn.daemon_threads
daemon_threads = True
# Overrides BaseHTTPServer.HTTPServer.allow_reuse_address
allow_reuse_address = True
def __init__(self, options):
"""Override SocketServer.TCPServer.__init__ to set SSL enabled
socket object to self.socket before server_bind and server_activate,
if necessary.
"""
# Share a Dispatcher among request handlers to save time for
# instantiation. Dispatcher can be shared because it is thread-safe.
options.dispatcher = dispatch.Dispatcher(
options.websock_handlers,
options.scan_dir,
options.allow_handlers_outside_root_dir)
if options.websock_handlers_map_file:
_alias_handlers(options.dispatcher,
options.websock_handlers_map_file)
warnings = options.dispatcher.source_warnings()
if warnings:
for warning in warnings:
logging.warning('Warning in source loading: %s' % warning)
self._logger = util.get_class_logger(self)
self.request_queue_size = options.request_queue_size
self.__ws_is_shut_down = threading.Event()
self.__ws_serving = False
SocketServer.BaseServer.__init__(
self, (options.server_host, options.port), WebSocketRequestHandler)
# Expose the options object to allow handler objects access it. We name
# it with websocket_ prefix to avoid conflict.
self.websocket_server_options = options
self._create_sockets()
self.server_bind()
self.server_activate()
def _create_sockets(self):
self.server_name, self.server_port = self.server_address
self._sockets = []
if not self.server_name:
# On platforms that doesn't support IPv6, the first bind fails.
# On platforms that supports IPv6
# - If it binds both IPv4 and IPv6 on call with AF_INET6, the
# first bind succeeds and the second fails (we'll see 'Address
# already in use' error).
# - If it binds only IPv6 on call with AF_INET6, both call are
# expected to succeed to listen both protocol.
addrinfo_array = [
(socket.AF_INET6, socket.SOCK_STREAM, '', '', ''),
(socket.AF_INET, socket.SOCK_STREAM, '', '', '')]
else:
addrinfo_array = socket.getaddrinfo(self.server_name,
self.server_port,
socket.AF_UNSPEC,
socket.SOCK_STREAM,
socket.IPPROTO_TCP)
for addrinfo in addrinfo_array:
self._logger.info('Create socket on: %r', addrinfo)
family, socktype, proto, canonname, sockaddr = addrinfo
try:
socket_ = socket.socket(family, socktype)
except Exception, e:
self._logger.info('Skip by failure: %r', e)
continue
server_options = self.websocket_server_options
if server_options.use_tls:
# For the case of _HAS_OPEN_SSL, we do wrapper setup after
# accept.
if server_options.tls_module == _TLS_BY_STANDARD_MODULE:
if server_options.tls_client_auth:
if server_options.tls_client_cert_optional:
client_cert_ = ssl.CERT_OPTIONAL
else:
client_cert_ = ssl.CERT_REQUIRED
else:
client_cert_ = ssl.CERT_NONE
socket_ = ssl.wrap_socket(socket_,
keyfile=server_options.private_key,
certfile=server_options.certificate,
ssl_version=ssl.PROTOCOL_SSLv23,
ca_certs=server_options.tls_client_ca,
cert_reqs=client_cert_,
do_handshake_on_connect=False)
self._sockets.append((socket_, addrinfo))
def server_bind(self):
"""Override SocketServer.TCPServer.server_bind to enable multiple
sockets bind.
"""
failed_sockets = []
for socketinfo in self._sockets:
socket_, addrinfo = socketinfo
self._logger.info('Bind on: %r', addrinfo)
if self.allow_reuse_address:
socket_.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
try:
socket_.bind(self.server_address)
except Exception, e:
self._logger.info('Skip by failure: %r', e)
socket_.close()
failed_sockets.append(socketinfo)
if self.server_address[1] == 0:
# The operating system assigns the actual port number for port
# number 0. This case, the second and later sockets should use
# the same port number. Also self.server_port is rewritten
# because it is exported, and will be used by external code.
self.server_address = (
self.server_name, socket_.getsockname()[1])
self.server_port = self.server_address[1]
self._logger.info('Port %r is assigned', self.server_port)
for socketinfo in failed_sockets:
self._sockets.remove(socketinfo)
def server_activate(self):
"""Override SocketServer.TCPServer.server_activate to enable multiple
sockets listen.
"""
failed_sockets = []
for socketinfo in self._sockets:
socket_, addrinfo = socketinfo
self._logger.info('Listen on: %r', addrinfo)
try:
socket_.listen(self.request_queue_size)
except Exception, e:
self._logger.info('Skip by failure: %r', e)
socket_.close()
failed_sockets.append(socketinfo)
for socketinfo in failed_sockets:
self._sockets.remove(socketinfo)
if len(self._sockets) == 0:
self._logger.critical(
'No sockets activated. Use info log level to see the reason.')
def server_close(self):
"""Override SocketServer.TCPServer.server_close to enable multiple
sockets close.
"""
for socketinfo in self._sockets:
socket_, addrinfo = socketinfo
self._logger.info('Close on: %r', addrinfo)
socket_.close()
def fileno(self):
"""Override SocketServer.TCPServer.fileno."""
self._logger.critical('Not supported: fileno')
return self._sockets[0][0].fileno()
def handle_error(self, request, client_address):
"""Override SocketServer.handle_error."""
self._logger.error(
'Exception in processing request from: %r\n%s',
client_address,
util.get_stack_trace())
# Note: client_address is a tuple.
def get_request(self):
"""Override TCPServer.get_request to wrap OpenSSL.SSL.Connection
object with _StandaloneSSLConnection to provide makefile method. We
cannot substitute OpenSSL.SSL.Connection.makefile since it's readonly
attribute.
"""
accepted_socket, client_address = self.socket.accept()
server_options = self.websocket_server_options
if server_options.use_tls:
if server_options.tls_module == _TLS_BY_STANDARD_MODULE:
try:
accepted_socket.do_handshake()
except ssl.SSLError, e:
self._logger.debug('%r', e)
raise
# Print cipher in use. Handshake is done on accept.
self._logger.debug('Cipher: %s', accepted_socket.cipher())
self._logger.debug('Client cert: %r',
accepted_socket.getpeercert())
elif server_options.tls_module == _TLS_BY_PYOPENSSL:
# We cannot print the cipher in use. pyOpenSSL doesn't provide
# any method to fetch that.
ctx = OpenSSL.SSL.Context(OpenSSL.SSL.SSLv23_METHOD)
ctx.use_privatekey_file(server_options.private_key)
ctx.use_certificate_file(server_options.certificate)
def default_callback(conn, cert, errnum, errdepth, ok):
return ok == 1
# See the OpenSSL document for SSL_CTX_set_verify.
if server_options.tls_client_auth:
verify_mode = OpenSSL.SSL.VERIFY_PEER
if not server_options.tls_client_cert_optional:
verify_mode |= OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT
ctx.set_verify(verify_mode, default_callback)
ctx.load_verify_locations(server_options.tls_client_ca,
None)
else:
ctx.set_verify(OpenSSL.SSL.VERIFY_NONE, default_callback)
accepted_socket = OpenSSL.SSL.Connection(ctx, accepted_socket)
accepted_socket.set_accept_state()
# Convert SSL related error into socket.error so that
# SocketServer ignores them and keeps running.
#
# TODO(tyoshino): Convert all kinds of errors.
try:
accepted_socket.do_handshake()
except OpenSSL.SSL.Error, e:
# Set errno part to 1 (SSL_ERROR_SSL) like the ssl module
# does.
self._logger.debug('%r', e)
raise socket.error(1, '%r' % e)
cert = accepted_socket.get_peer_certificate()
if cert is not None:
self._logger.debug('Client cert subject: %r',
cert.get_subject().get_components())
accepted_socket = _StandaloneSSLConnection(accepted_socket)
else:
raise ValueError('No TLS support module is available')
return accepted_socket, client_address
def serve_forever(self, poll_interval=0.5):
"""Override SocketServer.BaseServer.serve_forever."""
self.__ws_serving = True
self.__ws_is_shut_down.clear()
handle_request = self.handle_request
if hasattr(self, '_handle_request_noblock'):
handle_request = self._handle_request_noblock
else:
self._logger.warning('Fallback to blocking request handler')
try:
while self.__ws_serving:
r, w, e = select.select(
[socket_[0] for socket_ in self._sockets],
[], [], poll_interval)
for socket_ in r:
self.socket = socket_
handle_request()
self.socket = None
finally:
self.__ws_is_shut_down.set()
def shutdown(self):
"""Override SocketServer.BaseServer.shutdown."""
self.__ws_serving = False
self.__ws_is_shut_down.wait()
class WebSocketRequestHandler(CGIHTTPServer.CGIHTTPRequestHandler):
"""CGIHTTPRequestHandler specialized for WebSocket."""
# Use httplib.HTTPMessage instead of mimetools.Message.
MessageClass = httplib.HTTPMessage
protocol_version = "HTTP/1.1"
def setup(self):
"""Override SocketServer.StreamRequestHandler.setup to wrap rfile
with MemorizingFile.
This method will be called by BaseRequestHandler's constructor
before calling BaseHTTPRequestHandler.handle.
BaseHTTPRequestHandler.handle will call
BaseHTTPRequestHandler.handle_one_request and it will call
WebSocketRequestHandler.parse_request.
"""
# Call superclass's setup to prepare rfile, wfile, etc. See setup
# definition on the root class SocketServer.StreamRequestHandler to
# understand what this does.
CGIHTTPServer.CGIHTTPRequestHandler.setup(self)
self.rfile = memorizingfile.MemorizingFile(
self.rfile,
max_memorized_lines=_MAX_MEMORIZED_LINES)
def __init__(self, request, client_address, server):
self._logger = util.get_class_logger(self)
self._options = server.websocket_server_options
# Overrides CGIHTTPServerRequestHandler.cgi_directories.
self.cgi_directories = self._options.cgi_directories
# Replace CGIHTTPRequestHandler.is_executable method.
if self._options.is_executable_method is not None:
self.is_executable = self._options.is_executable_method
# This actually calls BaseRequestHandler.__init__.
CGIHTTPServer.CGIHTTPRequestHandler.__init__(
self, request, client_address, server)
def parse_request(self):
"""Override BaseHTTPServer.BaseHTTPRequestHandler.parse_request.
Return True to continue processing for HTTP(S), False otherwise.
See BaseHTTPRequestHandler.handle_one_request method which calls
this method to understand how the return value will be handled.
"""
# We hook parse_request method, but also call the original
# CGIHTTPRequestHandler.parse_request since when we return False,
# CGIHTTPRequestHandler.handle_one_request continues processing and
# it needs variables set by CGIHTTPRequestHandler.parse_request.
#
# Variables set by this method will be also used by WebSocket request
# handling (self.path, self.command, self.requestline, etc. See also
# how _StandaloneRequest's members are implemented using these
# attributes).
if not CGIHTTPServer.CGIHTTPRequestHandler.parse_request(self):
return False
if self.command == "CONNECT":
self.send_response(200, "Connected")
self.send_header("Connection", "keep-alive")
self.end_headers()
return False
if self._options.use_basic_auth:
auth = self.headers.getheader('Authorization')
if auth != self._options.basic_auth_credential:
self.send_response(401)
self.send_header('WWW-Authenticate',
'Basic realm="Pywebsocket"')
self.end_headers()
self._logger.info('Request basic authentication')
return False
host, port, resource = http_header_util.parse_uri(self.path)
# Special paths for XMLHttpRequest benchmark
xhr_benchmark_helper_prefix = '/073be001e10950692ccbf3a2ad21c245'
if resource == (xhr_benchmark_helper_prefix + '_send'):
xhr_benchmark_handler = XHRBenchmarkHandler(
self.headers, self.rfile, self.wfile)
xhr_benchmark_handler.do_send()
return False
if resource == (xhr_benchmark_helper_prefix + '_receive'):
xhr_benchmark_handler = XHRBenchmarkHandler(
self.headers, self.rfile, self.wfile)
xhr_benchmark_handler.do_receive()
return False
if resource is None:
self._logger.info('Invalid URI: %r', self.path)
self._logger.info('Fallback to CGIHTTPRequestHandler')
return True
server_options = self.server.websocket_server_options
if host is not None:
validation_host = server_options.validation_host
if validation_host is not None and host != validation_host:
self._logger.info('Invalid host: %r (expected: %r)',
host,
validation_host)
self._logger.info('Fallback to CGIHTTPRequestHandler')
return True
if port is not None:
validation_port = server_options.validation_port
if validation_port is not None and port != validation_port:
self._logger.info('Invalid port: %r (expected: %r)',
port,
validation_port)
self._logger.info('Fallback to CGIHTTPRequestHandler')
return True
self.path = resource
request = _StandaloneRequest(self, self._options.use_tls)
try:
# Fallback to default http handler for request paths for which
# we don't have request handlers.
if not self._options.dispatcher.get_handler_suite(self.path):
self._logger.info('No handler for resource: %r',
self.path)
self._logger.info('Fallback to CGIHTTPRequestHandler')
return True
except dispatch.DispatchException, e:
self._logger.info('Dispatch failed for error: %s', e)
self.send_error(e.status)
return False
# If any Exceptions without except clause setup (including
# DispatchException) is raised below this point, it will be caught
# and logged by WebSocketServer.
try:
try:
handshake.do_handshake(
request,
self._options.dispatcher,
allowDraft75=self._options.allow_draft75,
strict=self._options.strict)
except handshake.VersionException, e:
self._logger.info('Handshake failed for version error: %s', e)
self.send_response(common.HTTP_STATUS_BAD_REQUEST)
self.send_header(common.SEC_WEBSOCKET_VERSION_HEADER,
e.supported_versions)
self.end_headers()
return False
except handshake.HandshakeException, e:
# Handshake for ws(s) failed.
self._logger.info('Handshake failed for error: %s', e)
self.send_error(e.status)
return False
request._dispatcher = self._options.dispatcher
self._options.dispatcher.transfer_data(request)
except handshake.AbortedByUserException, e:
self._logger.info('Aborted: %s', e)
return False
def log_request(self, code='-', size='-'):
"""Override BaseHTTPServer.log_request."""
self._logger.info('"%s" %s %s',
self.requestline, str(code), str(size))
def log_error(self, *args):
"""Override BaseHTTPServer.log_error."""
# Despite the name, this method is for warnings than for errors.
# For example, HTTP status code is logged by this method.
self._logger.warning('%s - %s',
self.address_string(),
args[0] % args[1:])
def is_cgi(self):
"""Test whether self.path corresponds to a CGI script.
Add extra check that self.path doesn't contains ..
Also check if the file is a executable file or not.
If the file is not executable, it is handled as static file or dir
rather than a CGI script.
"""
if CGIHTTPServer.CGIHTTPRequestHandler.is_cgi(self):
if '..' in self.path:
return False
# strip query parameter from request path
resource_name = self.path.split('?', 2)[0]
# convert resource_name into real path name in filesystem.
scriptfile = self.translate_path(resource_name)
if not os.path.isfile(scriptfile):
return False
if not self.is_executable(scriptfile):
return False
return True
return False
def _get_logger_from_class(c):
return logging.getLogger('%s.%s' % (c.__module__, c.__name__))
def _configure_logging(options):
logging.addLevelName(common.LOGLEVEL_FINE, 'FINE')
logger = logging.getLogger()
logger.setLevel(logging.getLevelName(options.log_level.upper()))
if options.log_file:
handler = logging.handlers.RotatingFileHandler(
options.log_file, 'a', options.log_max, options.log_count)
else:
handler = logging.StreamHandler()
formatter = logging.Formatter(
'[%(asctime)s] [%(levelname)s] %(name)s: %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
deflate_log_level_name = logging.getLevelName(
options.deflate_log_level.upper())
_get_logger_from_class(util._Deflater).setLevel(
deflate_log_level_name)
_get_logger_from_class(util._Inflater).setLevel(
deflate_log_level_name)
def _build_option_parser():
parser = optparse.OptionParser()
parser.add_option('--config', dest='config_file', type='string',
default=None,
help=('Path to configuration file. See the file comment '
'at the top of this file for the configuration '
'file format'))
parser.add_option('-H', '--server-host', '--server_host',
dest='server_host',
default='',
help='server hostname to listen to')
parser.add_option('-V', '--validation-host', '--validation_host',
dest='validation_host',
default=None,
help='server hostname to validate in absolute path.')
parser.add_option('-p', '--port', dest='port', type='int',
default=common.DEFAULT_WEB_SOCKET_PORT,
help='port to listen to')
parser.add_option('-P', '--validation-port', '--validation_port',
dest='validation_port', type='int',
default=None,
help='server port to validate in absolute path.')
parser.add_option('-w', '--websock-handlers', '--websock_handlers',
dest='websock_handlers',
default='.',
help=('The root directory of WebSocket handler files. '
'If the path is relative, --document-root is used '
'as the base.'))
parser.add_option('-m', '--websock-handlers-map-file',
'--websock_handlers_map_file',
dest='websock_handlers_map_file',
default=None,
help=('WebSocket handlers map file. '
'Each line consists of alias_resource_path and '
'existing_resource_path, separated by spaces.'))
parser.add_option('-s', '--scan-dir', '--scan_dir', dest='scan_dir',
default=None,
help=('Must be a directory under --websock-handlers. '
'Only handlers under this directory are scanned '
'and registered to the server. '
'Useful for saving scan time when the handler '
'root directory contains lots of files that are '
'not handler file or are handler files but you '
'don\'t want them to be registered. '))
parser.add_option('--allow-handlers-outside-root-dir',
'--allow_handlers_outside_root_dir',
dest='allow_handlers_outside_root_dir',
action='store_true',
default=False,
help=('Scans WebSocket handlers even if their canonical '
'path is not under --websock-handlers.'))
parser.add_option('-d', '--document-root', '--document_root',
dest='document_root', default='.',
help='Document root directory.')
parser.add_option('-x', '--cgi-paths', '--cgi_paths', dest='cgi_paths',
default=None,
help=('CGI paths relative to document_root.'
'Comma-separated. (e.g -x /cgi,/htbin) '
'Files under document_root/cgi_path are handled '
'as CGI programs. Must be executable.'))
parser.add_option('-t', '--tls', dest='use_tls', action='store_true',
default=False, help='use TLS (wss://)')
parser.add_option('--tls-module', '--tls_module', dest='tls_module',
type='choice',
choices = [_TLS_BY_STANDARD_MODULE, _TLS_BY_PYOPENSSL],
help='Use ssl module if "%s" is specified. '
'Use pyOpenSSL module if "%s" is specified' %
(_TLS_BY_STANDARD_MODULE, _TLS_BY_PYOPENSSL))
parser.add_option('-k', '--private-key', '--private_key',
dest='private_key',
default='', help='TLS private key file.')
parser.add_option('-c', '--certificate', dest='certificate',
default='', help='TLS certificate file.')
parser.add_option('--tls-client-auth', dest='tls_client_auth',
action='store_true', default=False,
help='Requests TLS client auth on every connection.')
parser.add_option('--tls-client-cert-optional',
dest='tls_client_cert_optional',
action='store_true', default=False,
help=('Makes client certificate optional even though '
'TLS client auth is enabled.'))
parser.add_option('--tls-client-ca', dest='tls_client_ca', default='',
help=('Specifies a pem file which contains a set of '
'concatenated CA certificates which are used to '
'validate certificates passed from clients'))
parser.add_option('--basic-auth', dest='use_basic_auth',
action='store_true', default=False,
help='Requires Basic authentication.')
parser.add_option('--basic-auth-credential',
dest='basic_auth_credential', default='test:test',
help='Specifies the credential of basic authentication '
'by username:password pair (e.g. test:test).')
parser.add_option('-l', '--log-file', '--log_file', dest='log_file',
default='', help='Log file.')
# Custom log level:
# - FINE: Prints status of each frame processing step
parser.add_option('--log-level', '--log_level', type='choice',
dest='log_level', default='warn',
choices=['fine',
'debug', 'info', 'warning', 'warn', 'error',
'critical'],
help='Log level.')
parser.add_option('--deflate-log-level', '--deflate_log_level',
type='choice',
dest='deflate_log_level', default='warn',
choices=['debug', 'info', 'warning', 'warn', 'error',
'critical'],
help='Log level for _Deflater and _Inflater.')
parser.add_option('--thread-monitor-interval-in-sec',
'--thread_monitor_interval_in_sec',
dest='thread_monitor_interval_in_sec',
type='int', default=-1,
help=('If positive integer is specified, run a thread '
'monitor to show the status of server threads '
'periodically in the specified inteval in '
'second. If non-positive integer is specified, '
'disable the thread monitor.'))
parser.add_option('--log-max', '--log_max', dest='log_max', type='int',
default=_DEFAULT_LOG_MAX_BYTES,
help='Log maximum bytes')
parser.add_option('--log-count', '--log_count', dest='log_count',
type='int', default=_DEFAULT_LOG_BACKUP_COUNT,
help='Log backup count')
parser.add_option('--allow-draft75', dest='allow_draft75',
action='store_true', default=False,
help='Obsolete option. Ignored.')
parser.add_option('--strict', dest='strict', action='store_true',
default=False, help='Obsolete option. Ignored.')
parser.add_option('-q', '--queue', dest='request_queue_size', type='int',
default=_DEFAULT_REQUEST_QUEUE_SIZE,
help='request queue size')
return parser
class ThreadMonitor(threading.Thread):
daemon = True
def __init__(self, interval_in_sec):
threading.Thread.__init__(self, name='ThreadMonitor')
self._logger = util.get_class_logger(self)
self._interval_in_sec = interval_in_sec
def run(self):
while True:
thread_name_list = []
for thread in threading.enumerate():
thread_name_list.append(thread.name)
self._logger.info(
"%d active threads: %s",
threading.active_count(),
', '.join(thread_name_list))
time.sleep(self._interval_in_sec)
def _parse_args_and_config(args):
parser = _build_option_parser()
# First, parse options without configuration file.
temporary_options, temporary_args = parser.parse_args(args=args)
if temporary_args:
logging.critical(
'Unrecognized positional arguments: %r', temporary_args)
sys.exit(1)
if temporary_options.config_file:
try:
config_fp = open(temporary_options.config_file, 'r')
except IOError, e:
logging.critical(
'Failed to open configuration file %r: %r',
temporary_options.config_file,
e)
sys.exit(1)
config_parser = ConfigParser.SafeConfigParser()
config_parser.readfp(config_fp)
config_fp.close()
args_from_config = []
for name, value in config_parser.items('pywebsocket'):
args_from_config.append('--' + name)
args_from_config.append(value)
if args is None:
args = args_from_config
else:
args = args_from_config + args
return parser.parse_args(args=args)
else:
return temporary_options, temporary_args
def _main(args=None):
"""You can call this function from your own program, but please note that
this function has some side-effects that might affect your program. For
example, util.wrap_popen3_for_win use in this method replaces implementation
of os.popen3.
"""
options, args = _parse_args_and_config(args=args)
os.chdir(options.document_root)
_configure_logging(options)
if options.allow_draft75:
logging.warning('--allow_draft75 option is obsolete.')
if options.strict:
logging.warning('--strict option is obsolete.')
# TODO(tyoshino): Clean up initialization of CGI related values. Move some
# of code here to WebSocketRequestHandler class if it's better.
options.cgi_directories = []
options.is_executable_method = None
if options.cgi_paths:
options.cgi_directories = options.cgi_paths.split(',')
if sys.platform in ('cygwin', 'win32'):
cygwin_path = None
# For Win32 Python, it is expected that CYGWIN_PATH
# is set to a directory of cygwin binaries.
# For example, websocket_server.py in Chromium sets CYGWIN_PATH to
# full path of third_party/cygwin/bin.
if 'CYGWIN_PATH' in os.environ:
cygwin_path = os.environ['CYGWIN_PATH']
util.wrap_popen3_for_win(cygwin_path)
def __check_script(scriptpath):
return util.get_script_interp(scriptpath, cygwin_path)
options.is_executable_method = __check_script
if options.use_tls:
if options.tls_module is None:
if _import_ssl():
options.tls_module = _TLS_BY_STANDARD_MODULE
logging.debug('Using ssl module')
elif _import_pyopenssl():
options.tls_module = _TLS_BY_PYOPENSSL
logging.debug('Using pyOpenSSL module')
else:
logging.critical(
'TLS support requires ssl or pyOpenSSL module.')
sys.exit(1)
elif options.tls_module == _TLS_BY_STANDARD_MODULE:
if not _import_ssl():
logging.critical('ssl module is not available')
sys.exit(1)
elif options.tls_module == _TLS_BY_PYOPENSSL:
if not _import_pyopenssl():
logging.critical('pyOpenSSL module is not available')
sys.exit(1)
else:
logging.critical('Invalid --tls-module option: %r',
options.tls_module)
sys.exit(1)
if not options.private_key or not options.certificate:
logging.critical(
'To use TLS, specify private_key and certificate.')
sys.exit(1)
if (options.tls_client_cert_optional and
not options.tls_client_auth):
logging.critical('Client authentication must be enabled to '
'specify tls_client_cert_optional')
sys.exit(1)
else:
if options.tls_module is not None:
logging.critical('Use --tls-module option only together with '
'--use-tls option.')
sys.exit(1)
if options.tls_client_auth:
logging.critical('TLS must be enabled for client authentication.')
sys.exit(1)
if options.tls_client_cert_optional:
logging.critical('TLS must be enabled for client authentication.')
sys.exit(1)
if not options.scan_dir:
options.scan_dir = options.websock_handlers
if options.use_basic_auth:
options.basic_auth_credential = 'Basic ' + base64.b64encode(
options.basic_auth_credential)
try:
if options.thread_monitor_interval_in_sec > 0:
# Run a thread monitor to show the status of server threads for
# debugging.
ThreadMonitor(options.thread_monitor_interval_in_sec).start()
server = WebSocketServer(options)
server.serve_forever()
except Exception, e:
logging.critical('mod_pywebsocket: %s' % e)
logging.critical('mod_pywebsocket: %s' % util.get_stack_trace())
sys.exit(1)
if __name__ == '__main__':
_main(sys.argv[1:])
# vi:sts=4 sw=4 et
| mpl-2.0 |
Lujeni/ansible | lib/ansible/modules/windows/win_msg.py | 52 | 3109 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Jon Hawkesworth (@jhawkesworth) <figs@unity.demon.co.uk>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_msg
version_added: "2.3"
short_description: Sends a message to logged in users on Windows hosts
description:
- Wraps the msg.exe command in order to send messages to Windows hosts.
options:
to:
description:
- Who to send the message to. Can be a username, sessionname or sessionid.
type: str
default: '*'
display_seconds:
description:
- How long to wait for receiver to acknowledge message, in seconds.
type: int
default: 10
wait:
description:
- Whether to wait for users to respond. Module will only wait for the number of seconds specified in display_seconds or 10 seconds if not specified.
However, if I(wait) is C(yes), the message is sent to each logged on user in turn, waiting for the user to either press 'ok' or for
the timeout to elapse before moving on to the next user.
type: bool
default: 'no'
msg:
description:
- The text of the message to be displayed.
- The message must be less than 256 characters.
type: str
default: Hello world!
notes:
- This module must run on a windows host, so ensure your play targets windows
hosts, or delegates to a windows host.
- Messages are only sent to the local host where the module is run.
- The module does not support sending to users listed in a file.
- Setting wait to C(yes) can result in long run times on systems with many logged in users.
seealso:
- module: win_say
- module: win_toast
author:
- Jon Hawkesworth (@jhawkesworth)
'''
EXAMPLES = r'''
- name: Warn logged in users of impending upgrade
win_msg:
display_seconds: 60
msg: Automated upgrade about to start. Please save your work and log off before {{ deployment_start_time }}
'''
RETURN = r'''
msg:
description: Test of the message that was sent.
returned: changed
type: str
sample: Automated upgrade about to start. Please save your work and log off before 22 July 2016 18:00:00
display_seconds:
description: Value of display_seconds module parameter.
returned: success
type: str
sample: 10
rc:
description: The return code of the API call.
returned: always
type: int
sample: 0
runtime_seconds:
description: How long the module took to run on the remote windows host.
returned: success
type: str
sample: 22 July 2016 17:45:51
sent_localtime:
description: local time from windows host when the message was sent.
returned: success
type: str
sample: 22 July 2016 17:45:51
wait:
description: Value of wait module parameter.
returned: success
type: bool
sample: false
'''
| gpl-3.0 |
GheRivero/ansible | test/units/template/test_vars.py | 84 | 2886 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import MagicMock
from ansible.template.vars import AnsibleJ2Vars
class TestVars(unittest.TestCase):
def setUp(self):
self.mock_templar = MagicMock(name='mock_templar')
def test(self):
ajvars = AnsibleJ2Vars(None, None)
print(ajvars)
def test_globals_empty_2_8(self):
ajvars = AnsibleJ2Vars(self.mock_templar, {})
res28 = self._dict_jinja28(ajvars)
self.assertIsInstance(res28, dict)
def test_globals_empty_2_9(self):
ajvars = AnsibleJ2Vars(self.mock_templar, {})
res29 = self._dict_jinja29(ajvars)
self.assertIsInstance(res29, dict)
def _assert_globals(self, res):
self.assertIsInstance(res, dict)
self.assertIn('foo', res)
self.assertEqual(res['foo'], 'bar')
def test_globals_2_8(self):
ajvars = AnsibleJ2Vars(self.mock_templar, {'foo': 'bar', 'blip': [1, 2, 3]})
res28 = self._dict_jinja28(ajvars)
self._assert_globals(res28)
def test_globals_2_9(self):
ajvars = AnsibleJ2Vars(self.mock_templar, {'foo': 'bar', 'blip': [1, 2, 3]})
res29 = self._dict_jinja29(ajvars)
self._assert_globals(res29)
def _dicts(self, ajvars):
print(ajvars)
res28 = self._dict_jinja28(ajvars)
res29 = self._dict_jinja29(ajvars)
# res28_other = self._dict_jinja28(ajvars, {'other_key': 'other_value'})
# other = {'other_key': 'other_value'}
# res29_other = self._dict_jinja29(ajvars, *other)
print('res28: %s' % res28)
print('res29: %s' % res29)
# print('res28_other: %s' % res28_other)
# print('res29_other: %s' % res29_other)
# return (res28, res29, res28_other, res29_other)
# assert ajvars == res28
# assert ajvars == res29
return (res28, res29)
def _dict_jinja28(self, *args, **kwargs):
return dict(*args, **kwargs)
def _dict_jinja29(self, the_vars):
return dict(the_vars)
| gpl-3.0 |
lnielsen/invenio | invenio/modules/formatter/template_context_functions/tfn_get_fulltext_snippets.py | 4 | 2108 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Template context function to get fulltext snippets via Solr."""
from invenio.config import CFG_WEBSEARCH_FULLTEXT_SNIPPETS
from invenio.ext.logging import register_exception
from invenio.modules.formatter.utils import get_pdf_snippets
from invenio.legacy.search_engine import get_fulltext_terms_from_search_pattern
from invenio.modules.search.cache import get_pattern_from_cache
def template_context_function(id_bibrec, pattern, qid, current_user):
"""
@param id_bibrec ID of record
@param pattern search pattern
@param current_user user object
@param qid query id
@return HTML containing snippet
"""
if not pattern: pattern = get_pattern_from_cache(qid)
if id_bibrec and pattern and current_user:
# Requires search in fulltext field
if CFG_WEBSEARCH_FULLTEXT_SNIPPETS and 'fulltext:' in pattern:
terms = get_fulltext_terms_from_search_pattern(pattern)
if terms:
snippets = ''
try:
snippets = get_pdf_snippets(id_bibrec, terms, current_user).decode('utf8')
if snippets: return ' ... ' + snippets + ' ... '
except:
register_exception()
return ''
else:
return ''
else:
return None
| gpl-2.0 |
unseenlaser/python-for-android | python3-alpha/python3-src/Lib/encodings/iso8859_6.py | 272 | 10833 | """ Python Character Mapping Codec iso8859_6 generated from 'MAPPINGS/ISO8859/8859-6.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-6',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x04' # 0x04 -> END OF TRANSMISSION
'\x05' # 0x05 -> ENQUIRY
'\x06' # 0x06 -> ACKNOWLEDGE
'\x07' # 0x07 -> BELL
'\x08' # 0x08 -> BACKSPACE
'\t' # 0x09 -> HORIZONTAL TABULATION
'\n' # 0x0A -> LINE FEED
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x14' # 0x14 -> DEVICE CONTROL FOUR
'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x16 -> SYNCHRONOUS IDLE
'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x1a' # 0x1A -> SUBSTITUTE
'\x1b' # 0x1B -> ESCAPE
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> DELETE
'\x80' # 0x80 -> <control>
'\x81' # 0x81 -> <control>
'\x82' # 0x82 -> <control>
'\x83' # 0x83 -> <control>
'\x84' # 0x84 -> <control>
'\x85' # 0x85 -> <control>
'\x86' # 0x86 -> <control>
'\x87' # 0x87 -> <control>
'\x88' # 0x88 -> <control>
'\x89' # 0x89 -> <control>
'\x8a' # 0x8A -> <control>
'\x8b' # 0x8B -> <control>
'\x8c' # 0x8C -> <control>
'\x8d' # 0x8D -> <control>
'\x8e' # 0x8E -> <control>
'\x8f' # 0x8F -> <control>
'\x90' # 0x90 -> <control>
'\x91' # 0x91 -> <control>
'\x92' # 0x92 -> <control>
'\x93' # 0x93 -> <control>
'\x94' # 0x94 -> <control>
'\x95' # 0x95 -> <control>
'\x96' # 0x96 -> <control>
'\x97' # 0x97 -> <control>
'\x98' # 0x98 -> <control>
'\x99' # 0x99 -> <control>
'\x9a' # 0x9A -> <control>
'\x9b' # 0x9B -> <control>
'\x9c' # 0x9C -> <control>
'\x9d' # 0x9D -> <control>
'\x9e' # 0x9E -> <control>
'\x9f' # 0x9F -> <control>
'\xa0' # 0xA0 -> NO-BREAK SPACE
'\ufffe'
'\ufffe'
'\ufffe'
'\xa4' # 0xA4 -> CURRENCY SIGN
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\u060c' # 0xAC -> ARABIC COMMA
'\xad' # 0xAD -> SOFT HYPHEN
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\u061b' # 0xBB -> ARABIC SEMICOLON
'\ufffe'
'\ufffe'
'\ufffe'
'\u061f' # 0xBF -> ARABIC QUESTION MARK
'\ufffe'
'\u0621' # 0xC1 -> ARABIC LETTER HAMZA
'\u0622' # 0xC2 -> ARABIC LETTER ALEF WITH MADDA ABOVE
'\u0623' # 0xC3 -> ARABIC LETTER ALEF WITH HAMZA ABOVE
'\u0624' # 0xC4 -> ARABIC LETTER WAW WITH HAMZA ABOVE
'\u0625' # 0xC5 -> ARABIC LETTER ALEF WITH HAMZA BELOW
'\u0626' # 0xC6 -> ARABIC LETTER YEH WITH HAMZA ABOVE
'\u0627' # 0xC7 -> ARABIC LETTER ALEF
'\u0628' # 0xC8 -> ARABIC LETTER BEH
'\u0629' # 0xC9 -> ARABIC LETTER TEH MARBUTA
'\u062a' # 0xCA -> ARABIC LETTER TEH
'\u062b' # 0xCB -> ARABIC LETTER THEH
'\u062c' # 0xCC -> ARABIC LETTER JEEM
'\u062d' # 0xCD -> ARABIC LETTER HAH
'\u062e' # 0xCE -> ARABIC LETTER KHAH
'\u062f' # 0xCF -> ARABIC LETTER DAL
'\u0630' # 0xD0 -> ARABIC LETTER THAL
'\u0631' # 0xD1 -> ARABIC LETTER REH
'\u0632' # 0xD2 -> ARABIC LETTER ZAIN
'\u0633' # 0xD3 -> ARABIC LETTER SEEN
'\u0634' # 0xD4 -> ARABIC LETTER SHEEN
'\u0635' # 0xD5 -> ARABIC LETTER SAD
'\u0636' # 0xD6 -> ARABIC LETTER DAD
'\u0637' # 0xD7 -> ARABIC LETTER TAH
'\u0638' # 0xD8 -> ARABIC LETTER ZAH
'\u0639' # 0xD9 -> ARABIC LETTER AIN
'\u063a' # 0xDA -> ARABIC LETTER GHAIN
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\u0640' # 0xE0 -> ARABIC TATWEEL
'\u0641' # 0xE1 -> ARABIC LETTER FEH
'\u0642' # 0xE2 -> ARABIC LETTER QAF
'\u0643' # 0xE3 -> ARABIC LETTER KAF
'\u0644' # 0xE4 -> ARABIC LETTER LAM
'\u0645' # 0xE5 -> ARABIC LETTER MEEM
'\u0646' # 0xE6 -> ARABIC LETTER NOON
'\u0647' # 0xE7 -> ARABIC LETTER HEH
'\u0648' # 0xE8 -> ARABIC LETTER WAW
'\u0649' # 0xE9 -> ARABIC LETTER ALEF MAKSURA
'\u064a' # 0xEA -> ARABIC LETTER YEH
'\u064b' # 0xEB -> ARABIC FATHATAN
'\u064c' # 0xEC -> ARABIC DAMMATAN
'\u064d' # 0xED -> ARABIC KASRATAN
'\u064e' # 0xEE -> ARABIC FATHA
'\u064f' # 0xEF -> ARABIC DAMMA
'\u0650' # 0xF0 -> ARABIC KASRA
'\u0651' # 0xF1 -> ARABIC SHADDA
'\u0652' # 0xF2 -> ARABIC SUKUN
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
'\ufffe'
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| apache-2.0 |
ryepdx/shipping_api_fedex | helpers/fedex/services/rate_service.py | 1 | 5479 | """
Rate Service Module
===================
This package contains classes to request pre-ship rating information and to
determine estimated or courtesy billing quotes. Time in Transit can be
returned with the rates if it is specified in the request.
"""
from datetime import datetime
from .. base_service import FedexBaseService
class FedexRateServiceRequest(FedexBaseService):
"""
This class allows you to get the shipping charges for a particular address.
You will need to populate the data structures in self.RequestedShipment,
then send the request.
"""
def __init__(self, config_obj, *args, **kwargs):
"""
The optional keyword args detailed on L{FedexBaseService}
apply here as well.
@type config_obj: L{FedexConfig}
@param config_obj: A valid FedexConfig object.
"""
self._config_obj = config_obj
# Holds version info for the VersionId SOAP object.
self._version_info = {'service_id': 'crs', 'major': '8',
'intermediate': '0', 'minor': '0'}
self.RequestedShipment = None
"""@ivar: Holds the RequestedShipment WSDL object."""
# Call the parent FedexBaseService class for basic setup work.
super(FedexRateServiceRequest, self).__init__(self._config_obj,
'RateService_v8.wsdl',
*args, **kwargs)
self.ClientDetail.Region = config_obj.express_region_code
def _prepare_wsdl_objects(self):
"""
This is the data that will be used to create your shipment. Create
the data structure and get it ready for the WSDL request.
"""
# Default behavior is to not request transit information
self.ReturnTransitAndCommit = False
# This is the primary data structure for processShipment requests.
self.RequestedShipment = self.client.factory.create('RequestedShipment')
self.RequestedShipment.ShipTimestamp = datetime.now()
TotalWeight = self.client.factory.create('Weight')
# Start at nothing.
TotalWeight.Value = 0.0
# Default to pounds.
TotalWeight.Units = 'LB'
# This is the total weight of the entire shipment. Shipments may
# contain more than one package.
self.RequestedShipment.TotalWeight = TotalWeight
# This is the top level data structure for Shipper information.
ShipperParty = self.client.factory.create('Party')
ShipperParty.Address = self.client.factory.create('Address')
ShipperParty.Contact = self.client.factory.create('Contact')
# Link the ShipperParty to our master data structure.
self.RequestedShipment.Shipper = ShipperParty
# This is the top level data structure for Recipient information.
RecipientParty = self.client.factory.create('Party')
RecipientParty.Contact = self.client.factory.create('Contact')
RecipientParty.Address = self.client.factory.create('Address')
# Link the RecipientParty object to our master data structure.
self.RequestedShipment.Recipient = RecipientParty
Payor = self.client.factory.create('Payor')
# Grab the account number from the FedexConfig object by default.
Payor.AccountNumber = self._config_obj.account_number
# Assume US.
Payor.CountryCode = 'US'
# ACCOUNT or LIST
self.RequestedShipment.RateRequestTypes = ['ACCOUNT']
# Start with no packages, user must add them.
self.RequestedShipment.PackageCount = 0
self.RequestedShipment.RequestedPackageLineItems = []
# This is good to review if you'd like to see what the data structure
# looks like.
self.logger.debug(self.RequestedShipment)
def _assemble_and_send_request(self):
"""
Fires off the Fedex request.
@warning: NEVER CALL THIS METHOD DIRECTLY. CALL send_request(),
WHICH RESIDES ON FedexBaseService AND IS INHERITED.
"""
# Fire off the query.
response = self.client.service.getRates(WebAuthenticationDetail=self.WebAuthenticationDetail,
ClientDetail=self.ClientDetail,
TransactionDetail=self.TransactionDetail,
Version=self.VersionId,
RequestedShipment=self.RequestedShipment,
ReturnTransitAndCommit=self.ReturnTransitAndCommit)
return response
def add_package(self, package_item):
"""
Adds a package to the ship request.
@type package_item: WSDL object, type of RequestedPackageLineItem
WSDL object.
@keyword package_item: A RequestedPackageLineItem, created by
calling create_wsdl_object_of_type('RequestedPackageLineItem') on
this ShipmentRequest object. See examples/create_shipment.py for
more details.
"""
self.RequestedShipment.RequestedPackageLineItems.append(package_item)
package_weight = package_item.Weight.Value
self.RequestedShipment.TotalWeight.Value += package_weight
self.RequestedShipment.PackageCount += 1
| agpl-3.0 |
robinro/ansible | lib/ansible/modules/cloud/amazon/cloudfront_facts.py | 53 | 31932 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cloudfront_facts
short_description: Obtain facts about an AWS CloudFront distribution
description:
- Gets information about an AWS CloudFront distribution
requirements:
- boto3 >= 1.0.0
- python >= 2.6
version_added: "2.3"
author: Willem van Ketwich (@wilvk)
options:
distribution_id:
description:
- The id of the CloudFront distribution. Used with I(distribution), I(distribution_config),
I(invalidation), I(streaming_distribution), I(streaming_distribution_config), I(list_invalidations).
required: false
invalidation_id:
description:
- The id of the invalidation to get information about. Used with I(invalidation).
required: false
origin_access_identity_id:
description:
- The id of the cloudfront origin access identity to get information about.
required: false
web_acl_id:
description:
- Used with I(list_distributions_by_web_acl_id).
required: false
domain_name_alias:
description:
- Can be used instead of I(distribution_id) - uses the aliased CNAME for the cloudfront
distribution to get the distribution id where required.
required: false
all_lists:
description:
- Get all cloudfront lists that do not require parameters.
required: false
default: false
origin_access_identity:
description:
- Get information about an origin access identity. Requires I(origin_access_identity_id)
to be specified.
required: false
default: false
origin_access_identity_config:
description:
- Get the configuration information about an origin access identity. Requires
I(origin_access_identity_id) to be specified.
required: false
default: false
distribution:
description:
- Get information about a distribution. Requires I(distribution_id) or I(domain_name_alias)
to be specified.
required: false
default: false
distribution_config:
description:
- Get the configuration information about a distribution. Requires I(distribution_id)
or I(domain_name_alias) to be specified.
required: false
default: false
invalidation:
description:
- Get information about an invalidation. Requires I(invalidation_id) to be specified.
required: false
default: false
streaming_distribution:
description:
- Get information about a specified RTMP distribution. Requires I(distribution_id) or
I(domain_name_alias) to be specified.
required: false
default: false
streaming_distribution_configuration:
description:
- Get the configuration information about a specified RTMP distribution.
Requires I(distribution_id) or I(domain_name_alias) to be specified.
required: false
default: false
list_origin_access_identities:
description:
- Get a list of cloudfront origin access identities. Requires I(origin_access_identity_id) to be set.
required: false
default: false
list_distributions:
description:
- Get a list of cloudfront distributions.
required: false
default: false
list_distributions_by_web_acl_id:
description:
- Get a list of distributions using web acl id as a filter. Requires I(web_acl_id) to be set.
required: false
default: false
list_invalidations:
description:
- Get a list of invalidations. Requires I(distribution_id) or I(domain_name_alias) to be specified.
required: false
default: false
list_streaming_distributions:
description:
- Get a list of streaming distributions.
required: false
default: false
summary:
description:
- Returns a summary of all distributions, streaming distributions and origin_access_identities.
This is the default behaviour if no option is selected.
required: false
default: false
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Get a summary of distributions
- cloudfront_facts:
summary: true
# Get information about a distribution
- cloudfront_facts:
distribution: true
distribution_id: my-cloudfront-distribution-id
# Get information about a distribution using the CNAME of the cloudfront distribution.
- cloudfront_facts:
distribution: true
domain_name_alias: www.my-website.com
# Facts are published in ansible_facts['cloudfront'][<distribution_name>]
- debug:
msg: "{{ ansible_facts['cloudfront']['my-cloudfront-distribution-id'] }}"
- debug:
msg: "{{ ansible_facts['cloudfront']['www.my-website.com'] }}"
# Get all information about an invalidation for a distribution.
- cloudfront_facts:
invalidation: true
distribution_id: my-cloudfront-distribution-id
invalidation_id: my-cloudfront-invalidation-id
# Get all information about a cloudfront origin access identity.
- cloudfront_facts:
origin_access_identity: true
origin_access_identity_id: my-cloudfront-origin-access-identity-id
# Get all information about lists not requiring parameters (ie. list_origin_access_identities, list_distributions, list_streaming_distributions)
- cloudfront_facts:
origin_access_identity: true
origin_access_identity_id: my-cloudfront-origin-access-identity-id
# Get all information about lists not requiring parameters (ie. list_origin_access_identities, list_distributions, list_streaming_distributions)
- cloudfront_facts:
all_lists: true
'''
RETURN = '''
origin_access_identity:
description: Describes the origin access identity information. Requires I(origin_access_identity_id) to be set.
returned: only if I(origin_access_identity) is true
type: dict
origin_access_identity_configuration:
description: Describes the origin access identity information configuration information. Requires I(origin_access_identity_id) to be set.
returned: only if I(origin_access_identity_configuration) is true
type: dict
distribution:
description: >
Facts about a cloudfront distribution. Requires I(distribution_id) or I(domain_name_alias)
to be specified. Requires I(origin_access_identity_id) to be set.
returned: only if distribution is true
type: dict
distribution_config:
description: >
Facts about a cloudfront distribution's config. Requires I(distribution_id) or I(domain_name_alias)
to be specified.
returned: only if I(distribution_config) is true
type: dict
invalidation:
description: >
Describes the invalidation information for the distribution. Requires
I(invalidation_id) to be specified and either I(distribution_id) or I(domain_name_alias.)
returned: only if invalidation is true
type: dict
streaming_distribution:
description: >
Describes the streaming information for the distribution. Requires
I(distribution_id) or I(domain_name_alias) to be specified.
returned: only if I(streaming_distribution) is true
type: dict
streaming_distribution_configuration:
description: >
Describes the streaming configuration information for the distribution.
Requires I(distribution_id) or I(domain_name_alias) to be specified.
returned: only if I(streaming_distribution_configuration) is true
type: dict
summary:
description: Gives a summary of distributions, streaming distributions and origin access identities.
returned: as default or if summary is true
type: dict
'''
from ansible.module_utils.ec2 import get_aws_connection_info, ec2_argument_spec, boto3_conn, HAS_BOTO3
from ansible.module_utils.ec2 import boto3_tag_list_to_ansible_dict, camel_dict_to_snake_dict
from ansible.module_utils.basic import AnsibleModule
from functools import partial
import traceback
try:
import botocore
except ImportError:
pass # will be caught by imported HAS_BOTO3
class CloudFrontServiceManager:
"""Handles CloudFront Services"""
def __init__(self, module):
self.module = module
try:
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=True)
self.client = boto3_conn(module, conn_type='client',
resource='cloudfront', region=region,
endpoint=ec2_url, **aws_connect_kwargs)
except botocore.exceptions.NoRegionError:
self.module.fail_json(msg="Region must be specified as a parameter, in AWS_DEFAULT_REGION "
"environment variable or in boto configuration file")
except botocore.exceptions.ClientError as e:
self.module.fail_json(msg="Can't establish connection - " + str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
def get_distribution(self, distribution_id):
try:
func = partial(self.client.get_distribution,Id=distribution_id)
return self.paginated_response(func)
except botocore.exceptions.ClientError as e:
self.module.fail_json(msg="Error describing distribution - " + str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
def get_distribution_config(self, distribution_id):
try:
func = partial(self.client.get_distribution_config,Id=distribution_id)
return self.paginated_response(func)
except botocore.exceptions.ClientError as e:
self.module.fail_json(msg="Error describing distribution configuration - " + str(e),
exception=traceback.format_exec(e),
**camel_dict_to_snake_dict(e.response))
def get_origin_access_identity(self, origin_access_identity_id):
try:
func = partial(self.client.get_cloud_front_origin_access_identity,Id=origin_access_identity_id)
return self.paginated_response(func)
except botocore.exceptions.ClientError as e:
self.module.fail_json(msg="Error describing origin access identity - " + str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
def get_origin_access_identity_config(self, origin_access_identity_id):
try:
func = partial(self.client.get_cloud_front_origin_access_identity_config,Id=origin_access_identity_id)
return self.paginated_response(func)
except botocore.exceptions.ClientError as e:
self.module.fail_json(msg="Error describing origin access identity configuration - " + str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
def get_invalidation(self, distribution_id, invalidation_id):
try:
func = partial(self.client.get_invalidation,DistributionId=distribution_id,Id=invalidation_id)
return self.paginated_response(func)
except botocore.exceptions.ClientError as e:
self.module.fail_json(msg="Error describing invalidation - " + str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
def get_streaming_distribution(self, distribution_id):
try:
func = partial(self.client.get_streaming_distribution,Id=distribution_id)
return self.paginated_response(func)
except botocore.exceptions.ClientError as e:
self.module.fail_json(msg="Error describing streaming distribution - " + str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
def get_streaming_distribution_config(self, distribution_id):
try:
func = partial(self.client.get_streaming_distribution_config,Id=distribution_id)
return self.paginated_response(func)
except botocore.exceptions.ClientError as e:
self.module.fail_json(msg="Error describing streaming distribution - " + str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
def list_origin_access_identities(self):
try:
func = partial(self.client.list_cloud_front_origin_access_identities)
origin_access_identity_list = self.paginated_response(func, 'CloudFrontOriginAccessIdentityList')
if origin_access_identity_list['Quantity'] > 0:
return origin_access_identity_list['Items']
return {}
except botocore.exceptions.ClientError as e:
self.module.fail_json(msg="Error listing cloud front origin access identities - " + str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
def list_distributions(self, keyed=True):
try:
func = partial(self.client.list_distributions)
distribution_list = self.paginated_response(func, 'DistributionList')
if distribution_list['Quantity'] == 0:
return {}
else:
distribution_list = distribution_list['Items']
if not keyed:
return distribution_list
return self.keyed_list_helper(distribution_list)
except botocore.exceptions.ClientError as e:
self.module.fail_json(msg="Error listing distributions - " + str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
def list_distributions_by_web_acl_id(self, web_acl_id):
try:
func = partial(self.client.list_distributions_by_web_acl_id, WebAclId=web_acl_id)
distribution_list = self.paginated_response(func, 'DistributionList')
if distribution_list['Quantity'] == 0:
return {}
else:
distribution_list = distribution_list['Items']
return self.keyed_list_helper(distribution_list)
except botocore.exceptions.ClientError as e:
self.module.fail_json(msg="Error listing distributions by web acl id - " + str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
def list_invalidations(self, distribution_id):
try:
func = partial(self.client.list_invalidations, DistributionId=distribution_id)
invalidation_list = self.paginated_response(func, 'InvalidationList')
if invalidation_list['Quantity'] > 0:
return invalidation_list['Items']
return {}
except botocore.exceptions.ClientError as e:
self.module.fail_json(msg="Error listing invalidations - " + str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
def list_streaming_distributions(self, keyed=True):
try:
func = partial(self.client.list_streaming_distributions)
streaming_distribution_list = self.paginated_response(func, 'StreamingDistributionList')
if streaming_distribution_list['Quantity'] == 0:
return {}
else:
streaming_distribution_list = streaming_distribution_list['Items']
if not keyed:
return streaming_distribution_list
return self.keyed_list_helper(streaming_distribution_list)
except botocore.exceptions.ClientError as e:
self.module.fail_json(msg="Error listing streaming distributions - " + str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
def summary(self):
summary_dict = {}
summary_dict.update(self.summary_get_distribution_list(False))
summary_dict.update(self.summary_get_distribution_list(True))
summary_dict.update(self.summary_get_origin_access_identity_list())
return summary_dict
def summary_get_origin_access_identity_list(self):
try:
origin_access_identity_list = { 'origin_access_identities': [] }
origin_access_identities = self.list_origin_access_identities()
for origin_access_identity in origin_access_identities:
oai_id = origin_access_identity['Id']
oai_full_response = self.get_origin_access_identity(oai_id)
oai_summary = { 'Id': oai_id, 'ETag': oai_full_response['ETag'] }
origin_access_identity_list['origin_access_identities'].append( oai_summary )
return origin_access_identity_list
except botocore.exceptions.ClientError as e:
self.module.fail_json(msg="Error generating summary of origin access identities - " + str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
def summary_get_distribution_list(self, streaming=False):
try:
list_name = 'streaming_distributions' if streaming else 'distributions'
key_list = ['Id', 'ARN', 'Status', 'LastModifiedTime', 'DomainName', 'Comment', 'PriceClass', 'Enabled' ]
distribution_list = { list_name: [] }
distributions = self.list_streaming_distributions(False) if streaming else self.list_distributions(False)
for dist in distributions:
temp_distribution = {}
for key_name in key_list:
temp_distribution[key_name] = dist[key_name]
temp_distribution['Aliases'] = [alias for alias in dist['Aliases'].get('Items', [])]
temp_distribution['ETag'] = self.get_etag_from_distribution_id(dist['Id'], streaming)
if not streaming:
temp_distribution['WebACLId'] = dist['WebACLId']
invalidation_ids = self.get_list_of_invalidation_ids_from_distribution_id(dist['Id'])
if invalidation_ids:
temp_distribution['Invalidations'] = invalidation_ids
resource_tags = self.client.list_tags_for_resource(Resource=dist['ARN'])
temp_distribution['Tags'] = boto3_tag_list_to_ansible_dict(resource_tags['Tags'].get('Items', []))
distribution_list[list_name].append(temp_distribution)
return distribution_list
except botocore.exceptions.ClientError as e:
self.module.fail_json(msg="Error generating summary of distributions - " + str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
except Exception as e:
self.module.fail_json(msg="Error generating summary of distributions - " + str(e),
exception=traceback.format_exc())
def get_etag_from_distribution_id(self, distribution_id, streaming):
distribution = {}
if not streaming:
distribution = self.get_distribution(distribution_id)
else:
distribution = self.get_streaming_distribution(distribution_id)
return distribution['ETag']
def get_list_of_invalidation_ids_from_distribution_id(self, distribution_id):
try:
invalidation_ids = []
invalidations = self.list_invalidations(distribution_id)
for invalidation in invalidations:
invalidation_ids.append(invalidation['Id'])
return invalidation_ids
except botocore.exceptions.ClientError as e:
self.module.fail_json(msg="Error getting list of invalidation ids - " + str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
def get_distribution_id_from_domain_name(self, domain_name):
try:
distribution_id = ""
distributions = self.list_distributions(False)
distributions += self.list_streaming_distributions(False)
for dist in distributions:
if 'Items' in dist['Aliases']:
for alias in dist['Aliases']['Items']:
if str(alias).lower() == domain_name.lower():
distribution_id = dist['Id']
break
return distribution_id
except botocore.exceptions.ClientError as e:
self.module.fail_json(msg="Error getting distribution id from domain name - " + str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
def get_aliases_from_distribution_id(self, distribution_id):
aliases = []
try:
distributions = self.list_distributions(False)
for dist in distributions:
if dist['Id'] == distribution_id and 'Items' in dist['Aliases']:
for alias in dist['Aliases']['Items']:
aliases.append(alias)
break
return aliases
except botocore.exceptions.ClientError as e:
self.module.fail_json(msg="Error getting list of aliases from distribution_id - " + str(e),
exception=traceback.format_exc(),
**camel_dict_to_snake_dict(e.response))
def paginated_response(self, func, result_key=""):
'''
Returns expanded response for paginated operations.
The 'result_key' is used to define the concatenated results that are combined from each paginated response.
'''
args = dict()
results = dict()
loop = True
while loop:
response = func(**args)
if result_key == "":
result = response
result.pop('ResponseMetadata', None)
else:
result = response.get(result_key)
results.update(result)
args['NextToken'] = response.get('NextToken')
loop = args['NextToken'] is not None
return results
def keyed_list_helper(self, list_to_key):
keyed_list = dict()
for item in list_to_key:
distribution_id = item['Id']
if 'Items' in item['Aliases']:
aliases = item['Aliases']['Items']
for alias in aliases:
keyed_list.update( { alias: item } )
keyed_list.update( { distribution_id: item } )
return keyed_list
def set_facts_for_distribution_id_and_alias(details, facts, distribution_id, aliases):
facts[distribution_id].update(details)
for alias in aliases:
facts[alias].update(details)
return facts
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
distribution_id=dict(required=False, type='str'),
invalidation_id=dict(required=False, type='str'),
origin_access_identity_id=dict(required=False, type='str'),
domain_name_alias=dict(required=False, type='str'),
all_lists=dict(required=False, default=False, type='bool'),
distribution=dict(required=False, default=False, type='bool'),
distribution_config=dict(required=False, default=False, type='bool'),
origin_access_identity=dict(required=False, default=False, type='bool'),
origin_access_identity_config=dict(required=False, default=False, type='bool'),
invalidation=dict(required=False, default=False, type='bool'),
streaming_distribution=dict(required=False, default=False, type='bool'),
streaming_distribution_config=dict(required=False, default=False, type='bool'),
list_origin_access_identities=dict(required=False, default=False, type='bool'),
list_distributions=dict(required=False, default=False, type='bool'),
list_distributions_by_web_acl_id=dict(required=False, default=False, type='bool'),
list_invalidations=dict(required=False, default=False, type='bool'),
list_streaming_distributions=dict(required=False, default=False, type='bool'),
summary=dict(required=False, default=False, type='bool')
))
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False)
if not HAS_BOTO3:
module.fail_json(msg='boto3 is required.')
service_mgr = CloudFrontServiceManager(module)
distribution_id = module.params.get('distribution_id')
invalidation_id = module.params.get('invalidation_id')
origin_access_identity_id = module.params.get('origin_access_identity_id')
web_acl_id = module.params.get('web_acl_id')
domain_name_alias = module.params.get('domain_name_alias')
all_lists = module.params.get('all_lists')
distribution = module.params.get('distribution')
distribution_config = module.params.get('distribution_config')
origin_access_identity = module.params.get('origin_access_identity')
origin_access_identity_config = module.params.get('origin_access_identity_config')
invalidation = module.params.get('invalidation')
streaming_distribution = module.params.get('streaming_distribution')
streaming_distribution_config = module.params.get('streaming_distribution_config')
list_origin_access_identities = module.params.get('list_origin_access_identities')
list_distributions = module.params.get('list_distributions')
list_distributions_by_web_acl_id = module.params.get('list_distributions_by_web_acl_id')
list_invalidations = module.params.get('list_invalidations')
list_streaming_distributions = module.params.get('list_streaming_distributions')
summary = module.params.get('summary')
aliases = []
result = { 'cloudfront': {} }
facts = {}
require_distribution_id = (distribution or distribution_config or invalidation or streaming_distribution or
streaming_distribution_config or list_invalidations)
# set default to summary if no option specified
summary = summary or not (distribution or distribution_config or origin_access_identity or
origin_access_identity_config or invalidation or streaming_distribution or streaming_distribution_config or
list_origin_access_identities or list_distributions_by_web_acl_id or list_invalidations or
list_streaming_distributions or list_distributions)
# validations
if require_distribution_id and distribution_id is None and domain_name_alias is None:
module.fail_json(msg='Error distribution_id or domain_name_alias have not been specified.')
if (invalidation and invalidation_id is None):
module.fail_json(msg='Error invalidation_id has not been specified.')
if (origin_access_identity or origin_access_identity_config) and origin_access_identity_id is None:
module.fail_json(msg='Error origin_access_identity_id has not been specified.')
if list_distributions_by_web_acl_id and web_acl_id is None:
module.fail_json(msg='Error web_acl_id has not been specified.')
# get distribution id from domain name alias
if require_distribution_id and distribution_id is None:
distribution_id = service_mgr.get_distribution_id_from_domain_name(domain_name_alias)
if not distribution_id:
module.fail_json(msg='Error unable to source a distribution id from domain_name_alias')
# set appropriate cloudfront id
if distribution_id and not list_invalidations:
facts = { distribution_id: {} }
aliases = service_mgr.get_aliases_from_distribution_id(distribution_id)
for alias in aliases:
facts.update( { alias: {} } )
if invalidation_id:
facts.update( { invalidation_id: {} } )
elif distribution_id and list_invalidations:
facts = { distribution_id: {} }
aliases = service_mgr.get_aliases_from_distribution_id(distribution_id)
for alias in aliases:
facts.update( { alias: {} } )
elif origin_access_identity_id:
facts = { origin_access_identity_id: {} }
elif web_acl_id:
facts = { web_acl_id: {} }
# get details based on options
if distribution:
facts_to_set = service_mgr.get_distribution(distribution_id)
if distribution_config:
facts_to_set = service_mgr.get_distribution_config(distribution_id)
if origin_access_identity:
facts[origin_access_identity_id].update(service_mgr.get_origin_access_identity(origin_access_identity_id))
if origin_access_identity_config:
facts[origin_access_identity_id].update(service_mgr.get_origin_access_identity_config(origin_access_identity_id))
if invalidation:
facts_to_set = service_mgr.get_invalidation(distribution_id, invalidation_id)
facts[invalidation_id].update(facts_to_set)
if streaming_distribution:
facts_to_set = service_mgr.get_streaming_distribution(distribution_id)
if streaming_distribution_config:
facts_to_set = service_mgr.get_streaming_distribution_config(distribution_id)
if list_invalidations:
facts_to_set = {'invalidations': service_mgr.list_invalidations(distribution_id) }
if 'facts_to_set' in vars():
facts = set_facts_for_distribution_id_and_alias(facts_to_set, facts, distribution_id, aliases)
# get list based on options
if all_lists or list_origin_access_identities:
facts['origin_access_identities'] = service_mgr.list_origin_access_identities()
if all_lists or list_distributions:
facts['distributions'] = service_mgr.list_distributions()
if all_lists or list_streaming_distributions:
facts['streaming_distributions'] = service_mgr.list_streaming_distributions()
if list_distributions_by_web_acl_id:
facts['distributions_by_web_acl_id'] = service_mgr.list_distributions_by_web_acl_id(web_acl_id)
if list_invalidations:
facts['invalidations'] = service_mgr.list_invalidations(distribution_id)
# default summary option
if summary:
facts['summary'] = service_mgr.summary()
result['changed'] = False
result['cloudfront'].update(facts)
module.exit_json(msg="Retrieved cloudfront facts.", ansible_facts=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
tmurph/todo-sync | todo_sync/edit_script.py | 1 | 5505 | import copy
import todo_sync.node as node
import todo_sync.helpers as helpers
def lcs2(X, Y, equal):
"""
apply the greedy lcs/ses algorithm between X and Y sequence
(should be any Python's sequence)
equal is a function to compare X and Y which must return 0 if
X and Y are different, 1 if they are identical
return a list of matched pairs in tuplesthe greedy lcs/ses algorithm
"""
N, M = len(X), len(Y)
if not X or not Y:
return []
max = N + M
v = [0 for i in range(2*max+1)]
common = [[] for i in range(2*max+1)]
for D in range(max+1):
for k in range(-D, D+1, 2):
if k == -D or k != D and v[k-1] < v[k+1]:
x = v[k+1]
common[k] = common[k+1][:]
else:
x = v[k-1] + 1
common[k] = common[k-1][:]
y = x - k
while x < N and y < M and equal(X[x], Y[y]):
common[k].append((x, y))
x += 1
y += 1
v[k] = x
if x >= N and y >= M:
return [(X[x], Y[y]) for x, y in common[k]]
def edit_script(s_tree, t_tree,
s_maps_to_t_p, s_equals_t_p, make_s_from_t,
no_delete=False):
"""Update S_TREE to match T_TREE.
Implements an algorithm as described in "Change detection in
hierarchically structured information" by S. Chawathe, A. Rajaraman,
H. Garcia-Molina and J. Widom ([CRGMW95])
"""
if not isinstance(s_tree, node.RootNode):
raise Exception("Source tree is not rooted.")
if not isinstance(t_tree, node.RootNode):
raise Exception("Target tree is not rooted.")
# initialize mapping dictionaries
s_from_t, t_from_s = {}, {}
s_from_t[t_tree] = s_tree
t_from_s[s_tree] = t_tree
s_list = helpers.breadth_first_order(s_tree)[1:]
t_list = helpers.breadth_first_order(t_tree)[1:]
for s_node in s_list:
for t_node in t_list:
if s_maps_to_t_p(s_node, t_node):
s_from_t[t_node] = s_node
t_from_s[s_node] = t_node
t_list.remove(t_node)
break
# define helper functions
def mapped_nodes_p(s_node, t_node):
return s_node is s_from_t.get(t_node)
def s_left_sibling_id_from_t(target_node):
target_parent = target_node.parent
target_index = target_parent.children.index(target_node)
target_ordered_nodes = [t_node for t_node
in target_parent.children[:target_index]
if t_node.in_order]
result = None
if target_ordered_nodes:
if target_node is target_ordered_nodes[0]:
result = None
else:
result = getattr(
s_from_t.get(target_ordered_nodes[-1]), 'id')
return result
for t_node in helpers.breadth_first_order(t_tree):
s_node = s_from_t.get(t_node)
t_parent = t_node.parent
s_parent = s_from_t.get(t_parent)
# insert
if not s_node:
t_node.in_order = True
s_left_sibling_id = s_left_sibling_id_from_t(t_node)
s_node = make_s_from_t(t_node)
s_from_t[t_node] = s_node
t_from_s[s_node] = t_node
s_node.external_insert_as_child(s_left_sibling_id, s_parent)
s_node.insert_as_child(s_left_sibling_id, s_parent)
s_node.in_order = True
elif t_parent:
s_node = s_from_t.get(t_node)
s_parent = s_node.parent
# update
if not s_equals_t_p(s_node, t_node):
model_s_node = make_s_from_t(t_node)
s_node.external_update(model_s_node)
s_node.update(model_s_node)
# move
if not mapped_nodes_p(s_parent, t_node.parent):
t_node.in_order = True
s_parent = s_from_t.get(t_node.parent)
s_left_sibling_id = s_left_sibling_id_from_t(t_node)
s_node.external_move_to(s_left_sibling_id, s_parent)
s_node.move_to(s_left_sibling_id, s_parent)
s_node.in_order = True
# align
s_list, t_list = [], []
for s_child in s_node.children:
s_child.in_order = False
t_child = t_from_s.get(s_child)
if t_child and t_child.parent is t_node:
s_list.append(s_child)
for t_child in t_node.children:
t_child.in_order = False
s_child = s_from_t.get(t_child)
if s_child and s_child.parent is s_node:
t_list.append(t_child)
s = lcs2(s_list, t_list, mapped_nodes_p)
for s_child, t_child in s:
s_child.in_order = t_child.in_order = True
for s_child in s_list:
t_child = t_from_s.get(s_child)
if (t_child not in t_list) or ((s_child, t_child) in s):
continue
s_left_sibling_id = s_left_sibling_id_from_t(t_child)
s_child.external_move_to(s_left_sibling_id, s_node)
s_child.move_to(s_left_sibling_id, s_node)
s_child.in_order = t_child.in_order = True
# delete
if not no_delete:
s_list = helpers.breadth_first_order(s_tree)
s_list.reverse()
for s_node in s_list:
if not t_from_s.get(s_node):
s_node.external_delete()
s_node.delete()
| gpl-3.0 |
BonexGu/Blik2D-SDK | Blik2D/addon/tensorflow-1.2.1_for_blik/tensorflow/contrib/opt/python/training/nadam_optimizer_test.py | 4 | 6265 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Nadam."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.contrib.opt.python.training import nadam_optimizer
def nadam_update_numpy(param,
g_t,
t,
m,
v,
alpha=0.001,
beta1=0.9,
beta2=0.999,
epsilon=1e-8):
alpha_t = alpha * np.sqrt(1 - beta2**t) / (1 - beta1**t)
m_t = beta1 * m + (1 - beta1) * g_t
v_t = beta2 * v + (1 - beta2) * g_t * g_t
m_bar = (1 - beta1) * g_t + beta1 * m_t
param_t = param - alpha_t * m_bar / (np.sqrt(v_t) + epsilon)
return param_t, m_t, v_t
class NadamOptimizerTest(test.TestCase):
def doTestSparse(self, use_resource=False):
for dtype in [dtypes.half, dtypes.float32, dtypes.float64]:
with self.test_session():
# Initialize variables for numpy implementation.
m0, v0, m1, v1 = 0.0, 0.0, 0.0, 0.0
var0_np = np.array([1.0, 2.0], dtype=dtype.as_numpy_dtype)
grads0_np = np.array([0.1, 0.1], dtype=dtype.as_numpy_dtype)
var1_np = np.array([3.0, 4.0], dtype=dtype.as_numpy_dtype)
grads1_np = np.array([0.01, 0.01], dtype=dtype.as_numpy_dtype)
if use_resource:
var0 = resource_variable_ops.ResourceVariable(var0_np)
var1 = resource_variable_ops.ResourceVariable(var1_np)
else:
var0 = variables.Variable(var0_np)
var1 = variables.Variable(var1_np)
grads0_np_indices = np.array([0, 1], dtype=np.int32)
grads0 = ops.IndexedSlices(
constant_op.constant(grads0_np),
constant_op.constant(grads0_np_indices), constant_op.constant([2]))
grads1_np_indices = np.array([0, 1], dtype=np.int32)
grads1 = ops.IndexedSlices(
constant_op.constant(grads1_np),
constant_op.constant(grads1_np_indices), constant_op.constant([2]))
opt = nadam_optimizer.NadamOptimizer()
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
# Fetch params to validate initial values
self.assertAllClose([1.0, 2.0], var0.eval())
self.assertAllClose([3.0, 4.0], var1.eval())
beta1_power, beta2_power = opt._get_beta_accumulators()
# Run 3 steps of Nadam
for t in range(1, 4):
self.assertAllCloseAccordingToType(0.9**t, beta1_power.eval())
self.assertAllCloseAccordingToType(0.999**t, beta2_power.eval())
update.run()
var0_np, m0, v0 = nadam_update_numpy(var0_np, grads0_np, t, m0, v0)
var1_np, m1, v1 = nadam_update_numpy(var1_np, grads1_np, t, m1, v1)
# Validate updated params
self.assertAllCloseAccordingToType(var0_np, var0.eval())
self.assertAllCloseAccordingToType(var1_np, var1.eval())
def testSparse(self):
self.doTestSparse(use_resource=False)
def testResourceSparse(self):
self.doTestSparse(use_resource=True)
def doTestBasic(self, use_resource=False):
for dtype in [dtypes.half, dtypes.float32, dtypes.float64]:
with self.test_session():
# Initialize variables for numpy implementation.
m0, v0, m1, v1 = 0.0, 0.0, 0.0, 0.0
var0_np = np.array([1.0, 2.0], dtype=dtype.as_numpy_dtype)
grads0_np = np.array([0.1, 0.1], dtype=dtype.as_numpy_dtype)
var1_np = np.array([3.0, 4.0], dtype=dtype.as_numpy_dtype)
grads1_np = np.array([0.01, 0.01], dtype=dtype.as_numpy_dtype)
if use_resource:
var0 = resource_variable_ops.ResourceVariable(var0_np)
var1 = resource_variable_ops.ResourceVariable(var1_np)
else:
var0 = variables.Variable(var0_np)
var1 = variables.Variable(var1_np)
grads0 = constant_op.constant(grads0_np)
grads1 = constant_op.constant(grads1_np)
opt = nadam_optimizer.NadamOptimizer()
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
# Fetch params to validate initial values
self.assertAllClose([1.0, 2.0], var0.eval())
self.assertAllClose([3.0, 4.0], var1.eval())
beta1_power, beta2_power = opt._get_beta_accumulators()
# Run 3 steps of Nadam
for t in range(1, 4):
self.assertAllCloseAccordingToType(0.9**t, beta1_power.eval())
self.assertAllCloseAccordingToType(0.999**t, beta2_power.eval())
update.run()
var0_np, m0, v0 = nadam_update_numpy(var0_np, grads0_np, t, m0, v0)
var1_np, m1, v1 = nadam_update_numpy(var1_np, grads1_np, t, m1, v1)
# Validate updated params
self.assertAllCloseAccordingToType(var0_np, var0.eval())
self.assertAllCloseAccordingToType(var1_np, var1.eval())
def testBasic(self):
self.doTestBasic(use_resource=False)
def testResourceBasic(self):
self.doTestBasic(use_resource=True)
if __name__ == "__main__":
test.main()
| mit |
flyfei/python-for-android | python-modules/twisted/twisted/words/xish/domish.py | 49 | 29568 | # -*- test-case-name: twisted.words.test.test_domish -*-
# Copyright (c) 2001-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
DOM-like XML processing support.
This module provides support for parsing XML into DOM-like object structures
and serializing such structures to an XML string representation, optimized
for use in streaming XML applications.
"""
import types
from zope.interface import implements, Interface, Attribute
def _splitPrefix(name):
""" Internal method for splitting a prefixed Element name into its
respective parts """
ntok = name.split(":", 1)
if len(ntok) == 2:
return ntok
else:
return (None, ntok[0])
# Global map of prefixes that always get injected
# into the serializers prefix map (note, that doesn't
# mean they're always _USED_)
G_PREFIXES = { "http://www.w3.org/XML/1998/namespace":"xml" }
class _ListSerializer:
""" Internal class which serializes an Element tree into a buffer """
def __init__(self, prefixes=None, prefixesInScope=None):
self.writelist = []
self.prefixes = {}
if prefixes:
self.prefixes.update(prefixes)
self.prefixes.update(G_PREFIXES)
self.prefixStack = [G_PREFIXES.values()] + (prefixesInScope or [])
self.prefixCounter = 0
def getValue(self):
return u"".join(self.writelist)
def getPrefix(self, uri):
if not self.prefixes.has_key(uri):
self.prefixes[uri] = "xn%d" % (self.prefixCounter)
self.prefixCounter = self.prefixCounter + 1
return self.prefixes[uri]
def prefixInScope(self, prefix):
stack = self.prefixStack
for i in range(-1, (len(self.prefixStack)+1) * -1, -1):
if prefix in stack[i]:
return True
return False
def serialize(self, elem, closeElement=1, defaultUri=''):
# Optimization shortcuts
write = self.writelist.append
# Shortcut, check to see if elem is actually a chunk o' serialized XML
if isinstance(elem, SerializedXML):
write(elem)
return
# Shortcut, check to see if elem is actually a string (aka Cdata)
if isinstance(elem, types.StringTypes):
write(escapeToXml(elem))
return
# Further optimizations
name = elem.name
uri = elem.uri
defaultUri, currentDefaultUri = elem.defaultUri, defaultUri
for p, u in elem.localPrefixes.iteritems():
self.prefixes[u] = p
self.prefixStack.append(elem.localPrefixes.keys())
# Inherit the default namespace
if defaultUri is None:
defaultUri = currentDefaultUri
if uri is None:
uri = defaultUri
prefix = None
if uri != defaultUri or uri in self.prefixes:
prefix = self.getPrefix(uri)
inScope = self.prefixInScope(prefix)
# Create the starttag
if not prefix:
write("<%s" % (name))
else:
write("<%s:%s" % (prefix, name))
if not inScope:
write(" xmlns:%s='%s'" % (prefix, uri))
self.prefixStack[-1].append(prefix)
inScope = True
if defaultUri != currentDefaultUri and \
(uri != defaultUri or not prefix or not inScope):
write(" xmlns='%s'" % (defaultUri))
for p, u in elem.localPrefixes.iteritems():
write(" xmlns:%s='%s'" % (p, u))
# Serialize attributes
for k,v in elem.attributes.items():
# If the attribute name is a tuple, it's a qualified attribute
if isinstance(k, types.TupleType):
attr_uri, attr_name = k
attr_prefix = self.getPrefix(attr_uri)
if not self.prefixInScope(attr_prefix):
write(" xmlns:%s='%s'" % (attr_prefix, attr_uri))
self.prefixStack[-1].append(attr_prefix)
write(" %s:%s='%s'" % (attr_prefix, attr_name,
escapeToXml(v, 1)))
else:
write((" %s='%s'" % ( k, escapeToXml(v, 1))))
# Shortcut out if this is only going to return
# the element (i.e. no children)
if closeElement == 0:
write(">")
return
# Serialize children
if len(elem.children) > 0:
write(">")
for c in elem.children:
self.serialize(c, defaultUri=defaultUri)
# Add closing tag
if not prefix:
write("</%s>" % (name))
else:
write("</%s:%s>" % (prefix, name))
else:
write("/>")
self.prefixStack.pop()
SerializerClass = _ListSerializer
def escapeToXml(text, isattrib = 0):
""" Escape text to proper XML form, per section 2.3 in the XML specification.
@type text: L{str}
@param text: Text to escape
@type isattrib: L{bool}
@param isattrib: Triggers escaping of characters necessary for use as
attribute values
"""
text = text.replace("&", "&")
text = text.replace("<", "<")
text = text.replace(">", ">")
if isattrib == 1:
text = text.replace("'", "'")
text = text.replace("\"", """)
return text
def unescapeFromXml(text):
text = text.replace("<", "<")
text = text.replace(">", ">")
text = text.replace("'", "'")
text = text.replace(""", "\"")
text = text.replace("&", "&")
return text
def generateOnlyInterface(list, int):
""" Filters items in a list by class
"""
for n in list:
if int.providedBy(n):
yield n
def generateElementsQNamed(list, name, uri):
""" Filters Element items in a list with matching name and URI. """
for n in list:
if IElement.providedBy(n) and n.name == name and n.uri == uri:
yield n
def generateElementsNamed(list, name):
""" Filters Element items in a list with matching name, regardless of URI.
"""
for n in list:
if IElement.providedBy(n) and n.name == name:
yield n
class SerializedXML(unicode):
""" Marker class for pre-serialized XML in the DOM. """
pass
class Namespace:
""" Convenience object for tracking namespace declarations. """
def __init__(self, uri):
self._uri = uri
def __getattr__(self, n):
return (self._uri, n)
def __getitem__(self, n):
return (self._uri, n)
class IElement(Interface):
"""
Interface to XML element nodes.
See L{Element} for a detailed example of its general use.
Warning: this Interface is not yet complete!
"""
uri = Attribute(""" Element's namespace URI """)
name = Attribute(""" Element's local name """)
defaultUri = Attribute(""" Default namespace URI of child elements """)
attributes = Attribute(""" Dictionary of element attributes """)
children = Attribute(""" List of child nodes """)
parent = Attribute(""" Reference to element's parent element """)
localPrefixes = Attribute(""" Dictionary of local prefixes """)
def toXml(prefixes=None, closeElement=1, defaultUri='',
prefixesInScope=None):
""" Serializes object to a (partial) XML document
@param prefixes: dictionary that maps namespace URIs to suggested
prefix names.
@type prefixes: L{dict}
@param closeElement: flag that determines whether to include the
closing tag of the element in the serialized
string. A value of C{0} only generates the
element's start tag. A value of C{1} yields a
complete serialization.
@type closeElement: L{int}
@param defaultUri: Initial default namespace URI. This is most useful
for partial rendering, where the logical parent
element (of which the starttag was already
serialized) declares a default namespace that should
be inherited.
@type defaultUri: L{str}
@param prefixesInScope: list of prefixes that are assumed to be
declared by ancestors.
@type prefixesInScope: L{list}
@return: (partial) serialized XML
@rtype: L{unicode}
"""
def addElement(name, defaultUri = None, content = None):
""" Create an element and add as child.
The new element is added to this element as a child, and will have
this element as its parent.
@param name: element name. This can be either a L{unicode} object that
contains the local name, or a tuple of (uri, local_name)
for a fully qualified name. In the former case,
the namespace URI is inherited from this element.
@type name: L{unicode} or L{tuple} of (L{unicode}, L{unicode})
@param defaultUri: default namespace URI for child elements. If
C{None}, this is inherited from this element.
@type defaultUri: L{unicode}
@param content: text contained by the new element.
@type content: L{unicode}
@return: the created element
@rtype: object providing L{IElement}
"""
def addChild(node):
""" Adds a node as child of this element.
The C{node} will be added to the list of childs of this element, and
will have this element set as its parent when C{node} provides
L{IElement}.
@param node: the child node.
@type node: L{unicode} or object implementing L{IElement}
"""
class Element(object):
""" Represents an XML element node.
An Element contains a series of attributes (name/value pairs), content
(character data), and other child Element objects. When building a document
with markup (such as HTML or XML), use this object as the starting point.
Element objects fully support XML Namespaces. The fully qualified name of
the XML Element it represents is stored in the C{uri} and C{name}
attributes, where C{uri} holds the namespace URI. There is also a default
namespace, for child elements. This is stored in the C{defaultUri}
attribute. Note that C{''} means the empty namespace.
Serialization of Elements through C{toXml()} will use these attributes
for generating proper serialized XML. When both C{uri} and C{defaultUri}
are not None in the Element and all of its descendents, serialization
proceeds as expected:
>>> from twisted.words.xish import domish
>>> root = domish.Element(('myns', 'root'))
>>> root.addElement('child', content='test')
<twisted.words.xish.domish.Element object at 0x83002ac>
>>> root.toXml()
u"<root xmlns='myns'><child>test</child></root>"
For partial serialization, needed for streaming XML, a special value for
namespace URIs can be used: C{None}.
Using C{None} as the value for C{uri} means: this element is in whatever
namespace inherited by the closest logical ancestor when the complete XML
document has been serialized. The serialized start tag will have a
non-prefixed name, and no xmlns declaration will be generated.
Similarly, C{None} for C{defaultUri} means: the default namespace for my
child elements is inherited from the logical ancestors of this element,
when the complete XML document has been serialized.
To illustrate, an example from a Jabber stream. Assume the start tag of the
root element of the stream has already been serialized, along with several
complete child elements, and sent off, looking like this::
<stream:stream xmlns:stream='http://etherx.jabber.org/streams'
xmlns='jabber:client' to='example.com'>
...
Now suppose we want to send a complete element represented by an
object C{message} created like:
>>> message = domish.Element((None, 'message'))
>>> message['to'] = 'user@example.com'
>>> message.addElement('body', content='Hi!')
<twisted.words.xish.domish.Element object at 0x8276e8c>
>>> message.toXml()
u"<message to='user@example.com'><body>Hi!</body></message>"
As, you can see, this XML snippet has no xmlns declaration. When sent
off, it inherits the C{jabber:client} namespace from the root element.
Note that this renders the same as using C{''} instead of C{None}:
>>> presence = domish.Element(('', 'presence'))
>>> presence.toXml()
u"<presence/>"
However, if this object has a parent defined, the difference becomes
clear:
>>> child = message.addElement(('http://example.com/', 'envelope'))
>>> child.addChild(presence)
<twisted.words.xish.domish.Element object at 0x8276fac>
>>> message.toXml()
u"<message to='user@example.com'><body>Hi!</body><envelope xmlns='http://example.com/'><presence xmlns=''/></envelope></message>"
As, you can see, the <presence/> element is now in the empty namespace, not
in the default namespace of the parent or the streams'.
@type uri: L{unicode} or None
@ivar uri: URI of this Element's name
@type name: L{unicode}
@ivar name: Name of this Element
@type defaultUri: L{unicode} or None
@ivar defaultUri: URI this Element exists within
@type children: L{list}
@ivar children: List of child Elements and content
@type parent: L{Element}
@ivar parent: Reference to the parent Element, if any.
@type attributes: L{dict}
@ivar attributes: Dictionary of attributes associated with this Element.
@type localPrefixes: L{dict}
@ivar localPrefixes: Dictionary of namespace declarations on this
element. The key is the prefix to bind the
namespace uri to.
"""
implements(IElement)
_idCounter = 0
def __init__(self, qname, defaultUri=None, attribs=None,
localPrefixes=None):
"""
@param qname: Tuple of (uri, name)
@param defaultUri: The default URI of the element; defaults to the URI
specified in L{qname}
@param attribs: Dictionary of attributes
@param localPrefixes: Dictionary of namespace declarations on this
element. The key is the prefix to bind the
namespace uri to.
"""
self.localPrefixes = localPrefixes or {}
self.uri, self.name = qname
if defaultUri is None and \
self.uri not in self.localPrefixes.itervalues():
self.defaultUri = self.uri
else:
self.defaultUri = defaultUri
self.attributes = attribs or {}
self.children = []
self.parent = None
def __getattr__(self, key):
# Check child list for first Element with a name matching the key
for n in self.children:
if IElement.providedBy(n) and n.name == key:
return n
# Tweak the behaviour so that it's more friendly about not
# finding elements -- we need to document this somewhere :)
if key.startswith('_'):
raise AttributeError(key)
else:
return None
def __getitem__(self, key):
return self.attributes[self._dqa(key)]
def __delitem__(self, key):
del self.attributes[self._dqa(key)];
def __setitem__(self, key, value):
self.attributes[self._dqa(key)] = value
def __str__(self):
""" Retrieve the first CData (content) node
"""
for n in self.children:
if isinstance(n, types.StringTypes): return n
return ""
def _dqa(self, attr):
""" Dequalify an attribute key as needed """
if isinstance(attr, types.TupleType) and not attr[0]:
return attr[1]
else:
return attr
def getAttribute(self, attribname, default = None):
""" Retrieve the value of attribname, if it exists """
return self.attributes.get(attribname, default)
def hasAttribute(self, attrib):
""" Determine if the specified attribute exists """
return self.attributes.has_key(self._dqa(attrib))
def compareAttribute(self, attrib, value):
""" Safely compare the value of an attribute against a provided value.
C{None}-safe.
"""
return self.attributes.get(self._dqa(attrib), None) == value
def swapAttributeValues(self, left, right):
""" Swap the values of two attribute. """
d = self.attributes
l = d[left]
d[left] = d[right]
d[right] = l
def addChild(self, node):
""" Add a child to this Element. """
if IElement.providedBy(node):
node.parent = self
self.children.append(node)
return self.children[-1]
def addContent(self, text):
""" Add some text data to this Element. """
c = self.children
if len(c) > 0 and isinstance(c[-1], types.StringTypes):
c[-1] = c[-1] + text
else:
c.append(text)
return c[-1]
def addElement(self, name, defaultUri = None, content = None):
result = None
if isinstance(name, type(())):
if defaultUri is None:
defaultUri = name[0]
self.children.append(Element(name, defaultUri))
else:
if defaultUri is None:
defaultUri = self.defaultUri
self.children.append(Element((defaultUri, name), defaultUri))
result = self.children[-1]
result.parent = self
if content:
result.children.append(content)
return result
def addRawXml(self, rawxmlstring):
""" Add a pre-serialized chunk o' XML as a child of this Element. """
self.children.append(SerializedXML(rawxmlstring))
def addUniqueId(self):
""" Add a unique (across a given Python session) id attribute to this
Element.
"""
self.attributes["id"] = "H_%d" % Element._idCounter
Element._idCounter = Element._idCounter + 1
def elements(self, uri=None, name=None):
"""
Iterate across all children of this Element that are Elements.
Returns a generator over the child elements. If both the C{uri} and
C{name} parameters are set, the returned generator will only yield
on elements matching the qualified name.
@param uri: Optional element URI.
@type uri: C{unicode}
@param name: Optional element name.
@type name: C{unicode}
@return: Iterator that yields objects implementing L{IElement}.
"""
if name is None:
return generateOnlyInterface(self.children, IElement)
else:
return generateElementsQNamed(self.children, name, uri)
def toXml(self, prefixes=None, closeElement=1, defaultUri='',
prefixesInScope=None):
""" Serialize this Element and all children to a string. """
s = SerializerClass(prefixes=prefixes, prefixesInScope=prefixesInScope)
s.serialize(self, closeElement=closeElement, defaultUri=defaultUri)
return s.getValue()
def firstChildElement(self):
for c in self.children:
if IElement.providedBy(c):
return c
return None
class ParserError(Exception):
""" Exception thrown when a parsing error occurs """
pass
def elementStream():
""" Preferred method to construct an ElementStream
Uses Expat-based stream if available, and falls back to Sux if necessary.
"""
try:
es = ExpatElementStream()
return es
except ImportError:
if SuxElementStream is None:
raise Exception("No parsers available :(")
es = SuxElementStream()
return es
try:
from twisted.web import sux
except:
SuxElementStream = None
else:
class SuxElementStream(sux.XMLParser):
def __init__(self):
self.connectionMade()
self.DocumentStartEvent = None
self.ElementEvent = None
self.DocumentEndEvent = None
self.currElem = None
self.rootElem = None
self.documentStarted = False
self.defaultNsStack = []
self.prefixStack = []
def parse(self, buffer):
try:
self.dataReceived(buffer)
except sux.ParseError, e:
raise ParserError, str(e)
def findUri(self, prefix):
# Walk prefix stack backwards, looking for the uri
# matching the specified prefix
stack = self.prefixStack
for i in range(-1, (len(self.prefixStack)+1) * -1, -1):
if prefix in stack[i]:
return stack[i][prefix]
return None
def gotTagStart(self, name, attributes):
defaultUri = None
localPrefixes = {}
attribs = {}
uri = None
# Pass 1 - Identify namespace decls
for k, v in attributes.items():
if k.startswith("xmlns"):
x, p = _splitPrefix(k)
if (x is None): # I.e. default declaration
defaultUri = v
else:
localPrefixes[p] = v
del attributes[k]
# Push namespace decls onto prefix stack
self.prefixStack.append(localPrefixes)
# Determine default namespace for this element; if there
# is one
if defaultUri is None:
if len(self.defaultNsStack) > 0:
defaultUri = self.defaultNsStack[-1]
else:
defaultUri = ''
# Fix up name
prefix, name = _splitPrefix(name)
if prefix is None: # This element is in the default namespace
uri = defaultUri
else:
# Find the URI for the prefix
uri = self.findUri(prefix)
# Pass 2 - Fix up and escape attributes
for k, v in attributes.items():
p, n = _splitPrefix(k)
if p is None:
attribs[n] = v
else:
attribs[(self.findUri(p)), n] = unescapeFromXml(v)
# Construct the actual Element object
e = Element((uri, name), defaultUri, attribs, localPrefixes)
# Save current default namespace
self.defaultNsStack.append(defaultUri)
# Document already started
if self.documentStarted:
# Starting a new packet
if self.currElem is None:
self.currElem = e
# Adding to existing element
else:
self.currElem = self.currElem.addChild(e)
# New document
else:
self.rootElem = e
self.documentStarted = True
self.DocumentStartEvent(e)
def gotText(self, data):
if self.currElem != None:
self.currElem.addContent(data)
def gotCData(self, data):
if self.currElem != None:
self.currElem.addContent(data)
def gotComment(self, data):
# Ignore comments for the moment
pass
entities = { "amp" : "&",
"lt" : "<",
"gt" : ">",
"apos": "'",
"quot": "\"" }
def gotEntityReference(self, entityRef):
# If this is an entity we know about, add it as content
# to the current element
if entityRef in SuxElementStream.entities:
self.currElem.addContent(SuxElementStream.entities[entityRef])
def gotTagEnd(self, name):
# Ensure the document hasn't already ended
if self.rootElem is None:
# XXX: Write more legible explanation
raise ParserError, "Element closed after end of document."
# Fix up name
prefix, name = _splitPrefix(name)
if prefix is None:
uri = self.defaultNsStack[-1]
else:
uri = self.findUri(prefix)
# End of document
if self.currElem is None:
# Ensure element name and uri matches
if self.rootElem.name != name or self.rootElem.uri != uri:
raise ParserError, "Mismatched root elements"
self.DocumentEndEvent()
self.rootElem = None
# Other elements
else:
# Ensure the tag being closed matches the name of the current
# element
if self.currElem.name != name or self.currElem.uri != uri:
# XXX: Write more legible explanation
raise ParserError, "Malformed element close"
# Pop prefix and default NS stack
self.prefixStack.pop()
self.defaultNsStack.pop()
# Check for parent null parent of current elem;
# that's the top of the stack
if self.currElem.parent is None:
self.currElem.parent = self.rootElem
self.ElementEvent(self.currElem)
self.currElem = None
# Anything else is just some element wrapping up
else:
self.currElem = self.currElem.parent
class ExpatElementStream:
def __init__(self):
import pyexpat
self.DocumentStartEvent = None
self.ElementEvent = None
self.DocumentEndEvent = None
self.error = pyexpat.error
self.parser = pyexpat.ParserCreate("UTF-8", " ")
self.parser.StartElementHandler = self._onStartElement
self.parser.EndElementHandler = self._onEndElement
self.parser.CharacterDataHandler = self._onCdata
self.parser.StartNamespaceDeclHandler = self._onStartNamespace
self.parser.EndNamespaceDeclHandler = self._onEndNamespace
self.currElem = None
self.defaultNsStack = ['']
self.documentStarted = 0
self.localPrefixes = {}
def parse(self, buffer):
try:
self.parser.Parse(buffer)
except self.error, e:
raise ParserError, str(e)
def _onStartElement(self, name, attrs):
# Generate a qname tuple from the provided name. See
# http://docs.python.org/library/pyexpat.html#xml.parsers.expat.ParserCreate
# for an explanation of the formatting of name.
qname = name.rsplit(" ", 1)
if len(qname) == 1:
qname = ('', name)
# Process attributes
for k, v in attrs.items():
if " " in k:
aqname = k.rsplit(" ", 1)
attrs[(aqname[0], aqname[1])] = v
del attrs[k]
# Construct the new element
e = Element(qname, self.defaultNsStack[-1], attrs, self.localPrefixes)
self.localPrefixes = {}
# Document already started
if self.documentStarted == 1:
if self.currElem != None:
self.currElem.children.append(e)
e.parent = self.currElem
self.currElem = e
# New document
else:
self.documentStarted = 1
self.DocumentStartEvent(e)
def _onEndElement(self, _):
# Check for null current elem; end of doc
if self.currElem is None:
self.DocumentEndEvent()
# Check for parent that is None; that's
# the top of the stack
elif self.currElem.parent is None:
self.ElementEvent(self.currElem)
self.currElem = None
# Anything else is just some element in the current
# packet wrapping up
else:
self.currElem = self.currElem.parent
def _onCdata(self, data):
if self.currElem != None:
self.currElem.addContent(data)
def _onStartNamespace(self, prefix, uri):
# If this is the default namespace, put
# it on the stack
if prefix is None:
self.defaultNsStack.append(uri)
else:
self.localPrefixes[prefix] = uri
def _onEndNamespace(self, prefix):
# Remove last element on the stack
if prefix is None:
self.defaultNsStack.pop()
## class FileParser(ElementStream):
## def __init__(self):
## ElementStream.__init__(self)
## self.DocumentStartEvent = self.docStart
## self.ElementEvent = self.elem
## self.DocumentEndEvent = self.docEnd
## self.done = 0
## def docStart(self, elem):
## self.document = elem
## def elem(self, elem):
## self.document.addChild(elem)
## def docEnd(self):
## self.done = 1
## def parse(self, filename):
## for l in open(filename).readlines():
## self.parser.Parse(l)
## assert self.done == 1
## return self.document
## def parseFile(filename):
## return FileParser().parse(filename)
| apache-2.0 |
inveniosoftware/invenio-circulation | tests/helpers.py | 1 | 2547 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2018-2019 CERN.
# Copyright (C) 2018-2019 RERO.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Helper methods for tests."""
import copy
import uuid
from flask import current_app
from invenio_db import db
from invenio_pidstore import current_pidstore
from invenio_records_rest.utils import deny_all
from six.moves import reduce
from invenio_circulation.api import Loan
from invenio_circulation.permissions import has_read_loan_permission
from invenio_circulation.pidstore.pids import CIRCULATION_LOAN_MINTER
class SwappedConfig:
"""Helper to hot-swap a configuration value."""
def __init__(self, key, value):
"""Constructor."""
self.key = key
self.new_value = value
def __enter__(self):
"""Save previous value and swap it with the new."""
self.prev_value = current_app.config[self.key]
current_app.config[self.key] = self.new_value
def __exit__(self, type, value, traceback):
"""Restore previous value."""
current_app.config[self.key] = self.prev_value
class SwappedNestedConfig:
"""Helper to hot-swap a nested configuration value."""
def __init__(self, nested_keys, value):
"""Constructor."""
self.nested_keys = nested_keys
self.new_value = value
def __enter__(self):
"""Save previous value and swap it with the new."""
config_obj = reduce(dict.__getitem__, self.nested_keys[:-1],
current_app.config)
self.prev_value = config_obj[self.nested_keys[-1]]
config_obj[self.nested_keys[-1]] = self.new_value
def __exit__(self, type, value, traceback):
"""Restore previous value."""
config_obj = reduce(dict.__getitem__, self.nested_keys[:-1],
current_app.config)
config_obj[self.nested_keys[-1]] = self.prev_value
def create_loan(data):
"""Create a test record."""
with db.session.begin_nested():
data = copy.deepcopy(data)
rec_uuid = uuid.uuid4()
pid = current_pidstore.minters[CIRCULATION_LOAN_MINTER](rec_uuid, data)
record = Loan.create(data, id_=rec_uuid)
return pid, record
def test_views_permissions_factory(action):
"""Test views permissions factory."""
if action == 'loan-read-access':
return has_read_loan_permission()
else:
return deny_all()
| mit |
redhat-imaging/imagefactory | imgfac/BaseImageImporter.py | 1 | 3593 | import guestfs
import sys
import time
import json
import logging
from .FactoryUtils import launch_inspect_and_mount, qemu_convert_cmd, subprocess_check_output
from .ApplicationConfiguration import ApplicationConfiguration
from .PersistentImageManager import PersistentImageManager
from .BaseImage import BaseImage
from oz.ozutil import copyfile_sparse
class BaseImageImporter(object):
def __init__(self, image_file=None):
"""
@param image_file The name of a local file to be imported as a BaseImage
"""
self.log = logging.getLogger('%s.%s' % (__name__, self.__class__.__name__))
self.image_file = image_file
def do_import(self):
"""
Import file as a base_image and return the resulting BaseImage object
"""
g = launch_inspect_and_mount(self.image_file, readonly=True)
inspection = g.inspect_os()
os_root = inspection[0]
i_type=g.inspect_get_type(os_root)
i_name=g.inspect_get_product_name(os_root)
i_distro=g.inspect_get_distro(os_root)
i_major_version=g.inspect_get_major_version(os_root)
i_minor_version=g.inspect_get_minor_version(os_root)
ins_res = "guestfs inspection result - type: %s - name: %s - distro: %s - major version: %s - minor version: %s" % \
(i_type, i_name, i_distro, i_major_version, i_minor_version)
self.log.debug(ins_res)
if i_type != "linux":
raise Exception("Can only import Linux distros into Factory at the moment")
if i_distro in [ 'centos', 'rhel', 'scientificlinux' ]:
tdl_os_name = "RHEL-%d" % (i_major_version)
tdl_os_version = "%d" % (i_minor_version)
elif i_distro == 'fedora':
tdl_os_name = "Fedora"
tdl_os_version = "%d" % (i_major_version)
elif i_distro == 'ubuntu':
tdl_os_name = "Ubuntu"
tdl_os_version = "%d.%02d" % (i_major_version, i_minor_version)
elif i_distro == 'debian':
tdl_os_name = "Debian"
tdl_os_version = "%d" % (i_major_version)
else:
raise Exception("Unsupported distro for import: %s" % (i_distro))
ftime = time.strftime("%Y-%m-%d--%H:%M:%S", time.localtime())
tname = "%s-%s-import-%s" % (tdl_os_name, tdl_os_version, ftime)
tdl_template="""<template>
<name>%s</name>
<os>
<name>%s</name>
<version>%s</version>
<arch>x86_64</arch>
<install type='url'>
<url>http://foo.com/imported/image/do/not/use/url</url>
</install>
</os>
<description>image imported on %s</description>
</template>
""" % (tname, tdl_os_name, tdl_os_version, ftime)
pim = PersistentImageManager.default_manager()
base_image = BaseImage()
pim.add_image(base_image)
base_image.template=tdl_template
# The input image can be in any format that libguestfs understands
# Here we convert it to qcow2 - If it is already in qcow2 this is benign
# and in some cases can tidy up and serialize it
self.log.debug("Converting and saving intput file %s to final data location %s" % \
(self.image_file, base_image.data))
cmd = qemu_convert_cmd(self.image_file, base_image.data)
(stdout, stderr, retcode) = subprocess_check_output(cmd)
base_image.status="COMPLETE"
base_image.percent_complete=100
pim.save_image(base_image)
return base_image
| apache-2.0 |
calebtrahan/KujiIn_Python | backup/guitemplates/designer2.py | 1 | 47646 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'designer.ui'
#
# Created: Tue Dec 23 17:26:50 2014
# by: PyQt4 UI code generator 4.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(1300, 833)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.frame = QtGui.QFrame(self.centralwidget)
self.frame.setGeometry(QtCore.QRect(0, 0, 1301, 791))
self.frame.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame.setFrameShadow(QtGui.QFrame.Raised)
self.frame.setObjectName(_fromUtf8("frame"))
self.horizontalLayoutWidget_3 = QtGui.QWidget(self.frame)
self.horizontalLayoutWidget_3.setGeometry(QtCore.QRect(20, 80, 731, 51))
self.horizontalLayoutWidget_3.setObjectName(_fromUtf8("horizontalLayoutWidget_3"))
self.DurationLabels_2 = QtGui.QHBoxLayout(self.horizontalLayoutWidget_3)
self.DurationLabels_2.setMargin(0)
self.DurationLabels_2.setObjectName(_fromUtf8("DurationLabels_2"))
self.preLabel_2 = QtGui.QLabel(self.horizontalLayoutWidget_3)
self.preLabel_2.setAlignment(QtCore.Qt.AlignCenter)
self.preLabel_2.setObjectName(_fromUtf8("preLabel_2"))
self.DurationLabels_2.addWidget(self.preLabel_2)
self.rinLabel_2 = QtGui.QLabel(self.horizontalLayoutWidget_3)
self.rinLabel_2.setAlignment(QtCore.Qt.AlignCenter)
self.rinLabel_2.setObjectName(_fromUtf8("rinLabel_2"))
self.DurationLabels_2.addWidget(self.rinLabel_2)
self.kyoLabel_2 = QtGui.QLabel(self.horizontalLayoutWidget_3)
self.kyoLabel_2.setAlignment(QtCore.Qt.AlignCenter)
self.kyoLabel_2.setObjectName(_fromUtf8("kyoLabel_2"))
self.DurationLabels_2.addWidget(self.kyoLabel_2)
self.tohLabel_2 = QtGui.QLabel(self.horizontalLayoutWidget_3)
self.tohLabel_2.setAlignment(QtCore.Qt.AlignCenter)
self.tohLabel_2.setObjectName(_fromUtf8("tohLabel_2"))
self.DurationLabels_2.addWidget(self.tohLabel_2)
self.shaLabel_2 = QtGui.QLabel(self.horizontalLayoutWidget_3)
self.shaLabel_2.setAlignment(QtCore.Qt.AlignCenter)
self.shaLabel_2.setObjectName(_fromUtf8("shaLabel_2"))
self.DurationLabels_2.addWidget(self.shaLabel_2)
self.kaiLabel_2 = QtGui.QLabel(self.horizontalLayoutWidget_3)
self.kaiLabel_2.setAlignment(QtCore.Qt.AlignCenter)
self.kaiLabel_2.setObjectName(_fromUtf8("kaiLabel_2"))
self.DurationLabels_2.addWidget(self.kaiLabel_2)
self.jinLabel_2 = QtGui.QLabel(self.horizontalLayoutWidget_3)
self.jinLabel_2.setAlignment(QtCore.Qt.AlignCenter)
self.jinLabel_2.setObjectName(_fromUtf8("jinLabel_2"))
self.DurationLabels_2.addWidget(self.jinLabel_2)
self.retsuLabel_2 = QtGui.QLabel(self.horizontalLayoutWidget_3)
self.retsuLabel_2.setAlignment(QtCore.Qt.AlignCenter)
self.retsuLabel_2.setObjectName(_fromUtf8("retsuLabel_2"))
self.DurationLabels_2.addWidget(self.retsuLabel_2)
self.zaiLabel_2 = QtGui.QLabel(self.horizontalLayoutWidget_3)
self.zaiLabel_2.setAlignment(QtCore.Qt.AlignCenter)
self.zaiLabel_2.setObjectName(_fromUtf8("zaiLabel_2"))
self.DurationLabels_2.addWidget(self.zaiLabel_2)
self.zenLabel_2 = QtGui.QLabel(self.horizontalLayoutWidget_3)
self.zenLabel_2.setAlignment(QtCore.Qt.AlignCenter)
self.zenLabel_2.setObjectName(_fromUtf8("zenLabel_2"))
self.DurationLabels_2.addWidget(self.zenLabel_2)
self.postLabel_2 = QtGui.QLabel(self.horizontalLayoutWidget_3)
self.postLabel_2.setAlignment(QtCore.Qt.AlignCenter)
self.postLabel_2.setObjectName(_fromUtf8("postLabel_2"))
self.DurationLabels_2.addWidget(self.postLabel_2)
self.sessionslidersLayout = QtGui.QWidget(self.frame)
self.sessionslidersLayout.setGeometry(QtCore.QRect(10, 130, 761, 451))
self.sessionslidersLayout.setObjectName(_fromUtf8("sessionslidersLayout"))
self.DurationSliders_2 = QtGui.QHBoxLayout(self.sessionslidersLayout)
self.DurationSliders_2.setMargin(0)
self.DurationSliders_2.setObjectName(_fromUtf8("DurationSliders_2"))
self.preSlider_2 = QtGui.QSlider(self.sessionslidersLayout)
self.preSlider_2.setMaximum(90)
self.preSlider_2.setSingleStep(5)
self.preSlider_2.setPageStep(5)
self.preSlider_2.setTracking(False)
self.preSlider_2.setOrientation(QtCore.Qt.Vertical)
self.preSlider_2.setTickPosition(QtGui.QSlider.TicksBothSides)
self.preSlider_2.setObjectName(_fromUtf8("preSlider_2"))
self.DurationSliders_2.addWidget(self.preSlider_2)
self.rinSlider_2 = QtGui.QSlider(self.sessionslidersLayout)
self.rinSlider_2.setMaximum(90)
self.rinSlider_2.setSingleStep(5)
self.rinSlider_2.setPageStep(5)
self.rinSlider_2.setOrientation(QtCore.Qt.Vertical)
self.rinSlider_2.setTickPosition(QtGui.QSlider.TicksBothSides)
self.rinSlider_2.setObjectName(_fromUtf8("rinSlider_2"))
self.DurationSliders_2.addWidget(self.rinSlider_2)
self.kyoSlider_2 = QtGui.QSlider(self.sessionslidersLayout)
self.kyoSlider_2.setMaximum(90)
self.kyoSlider_2.setSingleStep(5)
self.kyoSlider_2.setPageStep(5)
self.kyoSlider_2.setOrientation(QtCore.Qt.Vertical)
self.kyoSlider_2.setTickPosition(QtGui.QSlider.TicksBothSides)
self.kyoSlider_2.setObjectName(_fromUtf8("kyoSlider_2"))
self.DurationSliders_2.addWidget(self.kyoSlider_2)
self.tohSlider_2 = QtGui.QSlider(self.sessionslidersLayout)
self.tohSlider_2.setMaximum(90)
self.tohSlider_2.setSingleStep(5)
self.tohSlider_2.setPageStep(5)
self.tohSlider_2.setOrientation(QtCore.Qt.Vertical)
self.tohSlider_2.setTickPosition(QtGui.QSlider.TicksBothSides)
self.tohSlider_2.setObjectName(_fromUtf8("tohSlider_2"))
self.DurationSliders_2.addWidget(self.tohSlider_2)
self.shaSlider_2 = QtGui.QSlider(self.sessionslidersLayout)
self.shaSlider_2.setMaximum(90)
self.shaSlider_2.setSingleStep(5)
self.shaSlider_2.setPageStep(5)
self.shaSlider_2.setOrientation(QtCore.Qt.Vertical)
self.shaSlider_2.setTickPosition(QtGui.QSlider.TicksBothSides)
self.shaSlider_2.setObjectName(_fromUtf8("shaSlider_2"))
self.DurationSliders_2.addWidget(self.shaSlider_2)
self.kaiSlider_2 = QtGui.QSlider(self.sessionslidersLayout)
self.kaiSlider_2.setMaximum(90)
self.kaiSlider_2.setSingleStep(5)
self.kaiSlider_2.setPageStep(5)
self.kaiSlider_2.setOrientation(QtCore.Qt.Vertical)
self.kaiSlider_2.setTickPosition(QtGui.QSlider.TicksBothSides)
self.kaiSlider_2.setObjectName(_fromUtf8("kaiSlider_2"))
self.DurationSliders_2.addWidget(self.kaiSlider_2)
self.jinSlider_2 = QtGui.QSlider(self.sessionslidersLayout)
self.jinSlider_2.setMaximum(90)
self.jinSlider_2.setSingleStep(5)
self.jinSlider_2.setPageStep(5)
self.jinSlider_2.setOrientation(QtCore.Qt.Vertical)
self.jinSlider_2.setTickPosition(QtGui.QSlider.TicksBothSides)
self.jinSlider_2.setObjectName(_fromUtf8("jinSlider_2"))
self.DurationSliders_2.addWidget(self.jinSlider_2)
self.retsuSlider_2 = QtGui.QSlider(self.sessionslidersLayout)
self.retsuSlider_2.setMaximum(90)
self.retsuSlider_2.setSingleStep(5)
self.retsuSlider_2.setPageStep(5)
self.retsuSlider_2.setOrientation(QtCore.Qt.Vertical)
self.retsuSlider_2.setTickPosition(QtGui.QSlider.TicksBothSides)
self.retsuSlider_2.setObjectName(_fromUtf8("retsuSlider_2"))
self.DurationSliders_2.addWidget(self.retsuSlider_2)
self.zaiSlider_2 = QtGui.QSlider(self.sessionslidersLayout)
self.zaiSlider_2.setMaximum(90)
self.zaiSlider_2.setSingleStep(5)
self.zaiSlider_2.setPageStep(5)
self.zaiSlider_2.setOrientation(QtCore.Qt.Vertical)
self.zaiSlider_2.setTickPosition(QtGui.QSlider.TicksBothSides)
self.zaiSlider_2.setObjectName(_fromUtf8("zaiSlider_2"))
self.DurationSliders_2.addWidget(self.zaiSlider_2)
self.zenSlider_2 = QtGui.QSlider(self.sessionslidersLayout)
self.zenSlider_2.setMaximum(90)
self.zenSlider_2.setSingleStep(5)
self.zenSlider_2.setPageStep(5)
self.zenSlider_2.setOrientation(QtCore.Qt.Vertical)
self.zenSlider_2.setTickPosition(QtGui.QSlider.TicksBothSides)
self.zenSlider_2.setObjectName(_fromUtf8("zenSlider_2"))
self.DurationSliders_2.addWidget(self.zenSlider_2)
self.postSlider_2 = QtGui.QSlider(self.sessionslidersLayout)
self.postSlider_2.setMaximum(90)
self.postSlider_2.setSingleStep(5)
self.postSlider_2.setPageStep(5)
self.postSlider_2.setOrientation(QtCore.Qt.Vertical)
self.postSlider_2.setTickPosition(QtGui.QSlider.TicksBothSides)
self.postSlider_2.setObjectName(_fromUtf8("postSlider_2"))
self.DurationSliders_2.addWidget(self.postSlider_2)
self.toptotalsLabel = QtGui.QLabel(self.frame)
self.toptotalsLabel.setGeometry(QtCore.QRect(280, 40, 221, 21))
self.toptotalsLabel.setAlignment(QtCore.Qt.AlignCenter)
self.toptotalsLabel.setObjectName(_fromUtf8("toptotalsLabel"))
self.horizontalLayoutWidget_2 = QtGui.QWidget(self.frame)
self.horizontalLayoutWidget_2.setGeometry(QtCore.QRect(20, 590, 721, 41))
self.horizontalLayoutWidget_2.setObjectName(_fromUtf8("horizontalLayoutWidget_2"))
self.CutDurationDisplays_2 = QtGui.QHBoxLayout(self.horizontalLayoutWidget_2)
self.CutDurationDisplays_2.setMargin(0)
self.CutDurationDisplays_2.setObjectName(_fromUtf8("CutDurationDisplays_2"))
self.preDisplay_2 = QtGui.QLCDNumber(self.horizontalLayoutWidget_2)
self.preDisplay_2.setSmallDecimalPoint(False)
self.preDisplay_2.setNumDigits(2)
self.preDisplay_2.setObjectName(_fromUtf8("preDisplay_2"))
self.CutDurationDisplays_2.addWidget(self.preDisplay_2)
self.rinDisplay_2 = QtGui.QLCDNumber(self.horizontalLayoutWidget_2)
self.rinDisplay_2.setNumDigits(2)
self.rinDisplay_2.setObjectName(_fromUtf8("rinDisplay_2"))
self.CutDurationDisplays_2.addWidget(self.rinDisplay_2)
self.kyoDisplay_2 = QtGui.QLCDNumber(self.horizontalLayoutWidget_2)
self.kyoDisplay_2.setNumDigits(2)
self.kyoDisplay_2.setObjectName(_fromUtf8("kyoDisplay_2"))
self.CutDurationDisplays_2.addWidget(self.kyoDisplay_2)
self.tohDisplay_2 = QtGui.QLCDNumber(self.horizontalLayoutWidget_2)
self.tohDisplay_2.setNumDigits(2)
self.tohDisplay_2.setObjectName(_fromUtf8("tohDisplay_2"))
self.CutDurationDisplays_2.addWidget(self.tohDisplay_2)
self.shaDisplay_2 = QtGui.QLCDNumber(self.horizontalLayoutWidget_2)
self.shaDisplay_2.setNumDigits(2)
self.shaDisplay_2.setObjectName(_fromUtf8("shaDisplay_2"))
self.CutDurationDisplays_2.addWidget(self.shaDisplay_2)
self.kaiDisplay_2 = QtGui.QLCDNumber(self.horizontalLayoutWidget_2)
self.kaiDisplay_2.setNumDigits(2)
self.kaiDisplay_2.setObjectName(_fromUtf8("kaiDisplay_2"))
self.CutDurationDisplays_2.addWidget(self.kaiDisplay_2)
self.jinDisplay_2 = QtGui.QLCDNumber(self.horizontalLayoutWidget_2)
self.jinDisplay_2.setNumDigits(2)
self.jinDisplay_2.setObjectName(_fromUtf8("jinDisplay_2"))
self.CutDurationDisplays_2.addWidget(self.jinDisplay_2)
self.retsuDisplay_2 = QtGui.QLCDNumber(self.horizontalLayoutWidget_2)
self.retsuDisplay_2.setNumDigits(2)
self.retsuDisplay_2.setObjectName(_fromUtf8("retsuDisplay_2"))
self.CutDurationDisplays_2.addWidget(self.retsuDisplay_2)
self.zaiDisplay_2 = QtGui.QLCDNumber(self.horizontalLayoutWidget_2)
self.zaiDisplay_2.setNumDigits(2)
self.zaiDisplay_2.setObjectName(_fromUtf8("zaiDisplay_2"))
self.CutDurationDisplays_2.addWidget(self.zaiDisplay_2)
self.zenDisplay_2 = QtGui.QLCDNumber(self.horizontalLayoutWidget_2)
self.zenDisplay_2.setNumDigits(2)
self.zenDisplay_2.setObjectName(_fromUtf8("zenDisplay_2"))
self.CutDurationDisplays_2.addWidget(self.zenDisplay_2)
self.postDisplay_2 = QtGui.QLCDNumber(self.horizontalLayoutWidget_2)
self.postDisplay_2.setNumDigits(2)
self.postDisplay_2.setObjectName(_fromUtf8("postDisplay_2"))
self.CutDurationDisplays_2.addWidget(self.postDisplay_2)
self.verticalLayoutWidget = QtGui.QWidget(self.frame)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(790, 80, 91, 561))
self.verticalLayoutWidget.setObjectName(_fromUtf8("verticalLayoutWidget"))
self.totalprogressLabels = QtGui.QVBoxLayout(self.verticalLayoutWidget)
self.totalprogressLabels.setMargin(0)
self.totalprogressLabels.setObjectName(_fromUtf8("totalprogressLabels"))
self.rintotalLabel = QtGui.QLabel(self.verticalLayoutWidget)
self.rintotalLabel.setAlignment(QtCore.Qt.AlignCenter)
self.rintotalLabel.setObjectName(_fromUtf8("rintotalLabel"))
self.totalprogressLabels.addWidget(self.rintotalLabel)
self.kyototalLabel = QtGui.QLabel(self.verticalLayoutWidget)
self.kyototalLabel.setAlignment(QtCore.Qt.AlignCenter)
self.kyototalLabel.setObjectName(_fromUtf8("kyototalLabel"))
self.totalprogressLabels.addWidget(self.kyototalLabel)
self.tohtotalLabel = QtGui.QLabel(self.verticalLayoutWidget)
self.tohtotalLabel.setAlignment(QtCore.Qt.AlignCenter)
self.tohtotalLabel.setObjectName(_fromUtf8("tohtotalLabel"))
self.totalprogressLabels.addWidget(self.tohtotalLabel)
self.shatotalLabel = QtGui.QLabel(self.verticalLayoutWidget)
self.shatotalLabel.setAlignment(QtCore.Qt.AlignCenter)
self.shatotalLabel.setObjectName(_fromUtf8("shatotalLabel"))
self.totalprogressLabels.addWidget(self.shatotalLabel)
self.kaitotalLabel = QtGui.QLabel(self.verticalLayoutWidget)
self.kaitotalLabel.setAlignment(QtCore.Qt.AlignCenter)
self.kaitotalLabel.setObjectName(_fromUtf8("kaitotalLabel"))
self.totalprogressLabels.addWidget(self.kaitotalLabel)
self.jintotalLabel = QtGui.QLabel(self.verticalLayoutWidget)
self.jintotalLabel.setAlignment(QtCore.Qt.AlignCenter)
self.jintotalLabel.setObjectName(_fromUtf8("jintotalLabel"))
self.totalprogressLabels.addWidget(self.jintotalLabel)
self.retsutotalLabel = QtGui.QLabel(self.verticalLayoutWidget)
self.retsutotalLabel.setAlignment(QtCore.Qt.AlignCenter)
self.retsutotalLabel.setObjectName(_fromUtf8("retsutotalLabel"))
self.totalprogressLabels.addWidget(self.retsutotalLabel)
self.zaitotalLabel = QtGui.QLabel(self.verticalLayoutWidget)
self.zaitotalLabel.setAlignment(QtCore.Qt.AlignCenter)
self.zaitotalLabel.setObjectName(_fromUtf8("zaitotalLabel"))
self.totalprogressLabels.addWidget(self.zaitotalLabel)
self.zentotalLabel = QtGui.QLabel(self.verticalLayoutWidget)
self.zentotalLabel.setAlignment(QtCore.Qt.AlignCenter)
self.zentotalLabel.setObjectName(_fromUtf8("zentotalLabel"))
self.totalprogressLabels.addWidget(self.zentotalLabel)
self.verticalLayoutWidget_2 = QtGui.QWidget(self.frame)
self.verticalLayoutWidget_2.setGeometry(QtCore.QRect(880, 80, 66, 561))
self.verticalLayoutWidget_2.setObjectName(_fromUtf8("verticalLayoutWidget_2"))
self.totaldaysLayout = QtGui.QVBoxLayout(self.verticalLayoutWidget_2)
self.totaldaysLayout.setMargin(0)
self.totaldaysLayout.setObjectName(_fromUtf8("totaldaysLayout"))
self.rintotaldaysDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_2)
self.rintotaldaysDisplay.setObjectName(_fromUtf8("rintotaldaysDisplay"))
self.totaldaysLayout.addWidget(self.rintotaldaysDisplay)
self.kyototaldaysDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_2)
self.kyototaldaysDisplay.setObjectName(_fromUtf8("kyototaldaysDisplay"))
self.totaldaysLayout.addWidget(self.kyototaldaysDisplay)
self.tohtotaldaysDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_2)
self.tohtotaldaysDisplay.setObjectName(_fromUtf8("tohtotaldaysDisplay"))
self.totaldaysLayout.addWidget(self.tohtotaldaysDisplay)
self.shatotaldaysDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_2)
self.shatotaldaysDisplay.setObjectName(_fromUtf8("shatotaldaysDisplay"))
self.totaldaysLayout.addWidget(self.shatotaldaysDisplay)
self.kaitotaldaysDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_2)
self.kaitotaldaysDisplay.setObjectName(_fromUtf8("kaitotaldaysDisplay"))
self.totaldaysLayout.addWidget(self.kaitotaldaysDisplay)
self.jintotaldaysDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_2)
self.jintotaldaysDisplay.setObjectName(_fromUtf8("jintotaldaysDisplay"))
self.totaldaysLayout.addWidget(self.jintotaldaysDisplay)
self.retsutotaldaysDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_2)
self.retsutotaldaysDisplay.setObjectName(_fromUtf8("retsutotaldaysDisplay"))
self.totaldaysLayout.addWidget(self.retsutotaldaysDisplay)
self.zaitotaldaysDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_2)
self.zaitotaldaysDisplay.setObjectName(_fromUtf8("zaitotaldaysDisplay"))
self.totaldaysLayout.addWidget(self.zaitotaldaysDisplay)
self.zentotaldaysDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_2)
self.zentotaldaysDisplay.setObjectName(_fromUtf8("zentotaldaysDisplay"))
self.totaldaysLayout.addWidget(self.zentotaldaysDisplay)
self.verticalLayoutWidget_3 = QtGui.QWidget(self.frame)
self.verticalLayoutWidget_3.setGeometry(QtCore.QRect(950, 80, 55, 561))
self.verticalLayoutWidget_3.setObjectName(_fromUtf8("verticalLayoutWidget_3"))
self.totaldaysLabels = QtGui.QVBoxLayout(self.verticalLayoutWidget_3)
self.totaldaysLabels.setMargin(0)
self.totaldaysLabels.setObjectName(_fromUtf8("totaldaysLabels"))
self.label_6 = QtGui.QLabel(self.verticalLayoutWidget_3)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.totaldaysLabels.addWidget(self.label_6)
self.label_5 = QtGui.QLabel(self.verticalLayoutWidget_3)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.totaldaysLabels.addWidget(self.label_5)
self.label_4 = QtGui.QLabel(self.verticalLayoutWidget_3)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.totaldaysLabels.addWidget(self.label_4)
self.label_7 = QtGui.QLabel(self.verticalLayoutWidget_3)
self.label_7.setObjectName(_fromUtf8("label_7"))
self.totaldaysLabels.addWidget(self.label_7)
self.label_9 = QtGui.QLabel(self.verticalLayoutWidget_3)
self.label_9.setObjectName(_fromUtf8("label_9"))
self.totaldaysLabels.addWidget(self.label_9)
self.label_8 = QtGui.QLabel(self.verticalLayoutWidget_3)
self.label_8.setObjectName(_fromUtf8("label_8"))
self.totaldaysLabels.addWidget(self.label_8)
self.label_3 = QtGui.QLabel(self.verticalLayoutWidget_3)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.totaldaysLabels.addWidget(self.label_3)
self.label_2 = QtGui.QLabel(self.verticalLayoutWidget_3)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.totaldaysLabels.addWidget(self.label_2)
self.label = QtGui.QLabel(self.verticalLayoutWidget_3)
self.label.setObjectName(_fromUtf8("label"))
self.totaldaysLabels.addWidget(self.label)
self.verticalLayoutWidget_4 = QtGui.QWidget(self.frame)
self.verticalLayoutWidget_4.setGeometry(QtCore.QRect(1010, 80, 66, 561))
self.verticalLayoutWidget_4.setObjectName(_fromUtf8("verticalLayoutWidget_4"))
self.totalhoursLayout = QtGui.QVBoxLayout(self.verticalLayoutWidget_4)
self.totalhoursLayout.setMargin(0)
self.totalhoursLayout.setObjectName(_fromUtf8("totalhoursLayout"))
self.rintotalhoursDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_4)
self.rintotalhoursDisplay.setObjectName(_fromUtf8("rintotalhoursDisplay"))
self.totalhoursLayout.addWidget(self.rintotalhoursDisplay)
self.kyototalhoursDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_4)
self.kyototalhoursDisplay.setObjectName(_fromUtf8("kyototalhoursDisplay"))
self.totalhoursLayout.addWidget(self.kyototalhoursDisplay)
self.tohtotalhoursDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_4)
self.tohtotalhoursDisplay.setObjectName(_fromUtf8("tohtotalhoursDisplay"))
self.totalhoursLayout.addWidget(self.tohtotalhoursDisplay)
self.shatotalhoursDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_4)
self.shatotalhoursDisplay.setObjectName(_fromUtf8("shatotalhoursDisplay"))
self.totalhoursLayout.addWidget(self.shatotalhoursDisplay)
self.kaitotalhoursDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_4)
self.kaitotalhoursDisplay.setObjectName(_fromUtf8("kaitotalhoursDisplay"))
self.totalhoursLayout.addWidget(self.kaitotalhoursDisplay)
self.jintotalhoursDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_4)
self.jintotalhoursDisplay.setObjectName(_fromUtf8("jintotalhoursDisplay"))
self.totalhoursLayout.addWidget(self.jintotalhoursDisplay)
self.retsutotalhoursDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_4)
self.retsutotalhoursDisplay.setObjectName(_fromUtf8("retsutotalhoursDisplay"))
self.totalhoursLayout.addWidget(self.retsutotalhoursDisplay)
self.zaitotalhoursDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_4)
self.zaitotalhoursDisplay.setObjectName(_fromUtf8("zaitotalhoursDisplay"))
self.totalhoursLayout.addWidget(self.zaitotalhoursDisplay)
self.zentotalhoursDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_4)
self.zentotalhoursDisplay.setObjectName(_fromUtf8("zentotalhoursDisplay"))
self.totalhoursLayout.addWidget(self.zentotalhoursDisplay)
self.verticalLayoutWidget_5 = QtGui.QWidget(self.frame)
self.verticalLayoutWidget_5.setGeometry(QtCore.QRect(1080, 80, 55, 561))
self.verticalLayoutWidget_5.setObjectName(_fromUtf8("verticalLayoutWidget_5"))
self.totalhoursLabels = QtGui.QVBoxLayout(self.verticalLayoutWidget_5)
self.totalhoursLabels.setMargin(0)
self.totalhoursLabels.setObjectName(_fromUtf8("totalhoursLabels"))
self.label_11 = QtGui.QLabel(self.verticalLayoutWidget_5)
self.label_11.setObjectName(_fromUtf8("label_11"))
self.totalhoursLabels.addWidget(self.label_11)
self.label_14 = QtGui.QLabel(self.verticalLayoutWidget_5)
self.label_14.setObjectName(_fromUtf8("label_14"))
self.totalhoursLabels.addWidget(self.label_14)
self.label_15 = QtGui.QLabel(self.verticalLayoutWidget_5)
self.label_15.setObjectName(_fromUtf8("label_15"))
self.totalhoursLabels.addWidget(self.label_15)
self.label_18 = QtGui.QLabel(self.verticalLayoutWidget_5)
self.label_18.setObjectName(_fromUtf8("label_18"))
self.totalhoursLabels.addWidget(self.label_18)
self.label_17 = QtGui.QLabel(self.verticalLayoutWidget_5)
self.label_17.setObjectName(_fromUtf8("label_17"))
self.totalhoursLabels.addWidget(self.label_17)
self.label_16 = QtGui.QLabel(self.verticalLayoutWidget_5)
self.label_16.setObjectName(_fromUtf8("label_16"))
self.totalhoursLabels.addWidget(self.label_16)
self.label_13 = QtGui.QLabel(self.verticalLayoutWidget_5)
self.label_13.setObjectName(_fromUtf8("label_13"))
self.totalhoursLabels.addWidget(self.label_13)
self.label_12 = QtGui.QLabel(self.verticalLayoutWidget_5)
self.label_12.setObjectName(_fromUtf8("label_12"))
self.totalhoursLabels.addWidget(self.label_12)
self.label_10 = QtGui.QLabel(self.verticalLayoutWidget_5)
self.label_10.setObjectName(_fromUtf8("label_10"))
self.totalhoursLabels.addWidget(self.label_10)
self.verticalLayoutWidget_6 = QtGui.QWidget(self.frame)
self.verticalLayoutWidget_6.setGeometry(QtCore.QRect(1140, 80, 66, 561))
self.verticalLayoutWidget_6.setObjectName(_fromUtf8("verticalLayoutWidget_6"))
self.totalminutesLayout = QtGui.QVBoxLayout(self.verticalLayoutWidget_6)
self.totalminutesLayout.setMargin(0)
self.totalminutesLayout.setObjectName(_fromUtf8("totalminutesLayout"))
self.rintotalminutesDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_6)
self.rintotalminutesDisplay.setObjectName(_fromUtf8("rintotalminutesDisplay"))
self.totalminutesLayout.addWidget(self.rintotalminutesDisplay)
self.kyototalminutesDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_6)
self.kyototalminutesDisplay.setObjectName(_fromUtf8("kyototalminutesDisplay"))
self.totalminutesLayout.addWidget(self.kyototalminutesDisplay)
self.tohtotalminutesDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_6)
self.tohtotalminutesDisplay.setObjectName(_fromUtf8("tohtotalminutesDisplay"))
self.totalminutesLayout.addWidget(self.tohtotalminutesDisplay)
self.shatotalminutesDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_6)
self.shatotalminutesDisplay.setObjectName(_fromUtf8("shatotalminutesDisplay"))
self.totalminutesLayout.addWidget(self.shatotalminutesDisplay)
self.kaitotalminutesDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_6)
self.kaitotalminutesDisplay.setObjectName(_fromUtf8("kaitotalminutesDisplay"))
self.totalminutesLayout.addWidget(self.kaitotalminutesDisplay)
self.jintotalminutesDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_6)
self.jintotalminutesDisplay.setObjectName(_fromUtf8("jintotalminutesDisplay"))
self.totalminutesLayout.addWidget(self.jintotalminutesDisplay)
self.retsutotalminutesDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_6)
self.retsutotalminutesDisplay.setObjectName(_fromUtf8("retsutotalminutesDisplay"))
self.totalminutesLayout.addWidget(self.retsutotalminutesDisplay)
self.zaitotalminutesDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_6)
self.zaitotalminutesDisplay.setObjectName(_fromUtf8("zaitotalminutesDisplay"))
self.totalminutesLayout.addWidget(self.zaitotalminutesDisplay)
self.zentotalminutesDisplay = QtGui.QLCDNumber(self.verticalLayoutWidget_6)
self.zentotalminutesDisplay.setObjectName(_fromUtf8("zentotalminutesDisplay"))
self.totalminutesLayout.addWidget(self.zentotalminutesDisplay)
self.verticalLayoutWidget_7 = QtGui.QWidget(self.frame)
self.verticalLayoutWidget_7.setGeometry(QtCore.QRect(1210, 80, 71, 561))
self.verticalLayoutWidget_7.setObjectName(_fromUtf8("verticalLayoutWidget_7"))
self.totaldaysLayout_6 = QtGui.QVBoxLayout(self.verticalLayoutWidget_7)
self.totaldaysLayout_6.setMargin(0)
self.totaldaysLayout_6.setObjectName(_fromUtf8("totaldaysLayout_6"))
self.label_19 = QtGui.QLabel(self.verticalLayoutWidget_7)
self.label_19.setObjectName(_fromUtf8("label_19"))
self.totaldaysLayout_6.addWidget(self.label_19)
self.label_23 = QtGui.QLabel(self.verticalLayoutWidget_7)
self.label_23.setObjectName(_fromUtf8("label_23"))
self.totaldaysLayout_6.addWidget(self.label_23)
self.label_22 = QtGui.QLabel(self.verticalLayoutWidget_7)
self.label_22.setObjectName(_fromUtf8("label_22"))
self.totaldaysLayout_6.addWidget(self.label_22)
self.label_21 = QtGui.QLabel(self.verticalLayoutWidget_7)
self.label_21.setObjectName(_fromUtf8("label_21"))
self.totaldaysLayout_6.addWidget(self.label_21)
self.label_25 = QtGui.QLabel(self.verticalLayoutWidget_7)
self.label_25.setObjectName(_fromUtf8("label_25"))
self.totaldaysLayout_6.addWidget(self.label_25)
self.label_20 = QtGui.QLabel(self.verticalLayoutWidget_7)
self.label_20.setObjectName(_fromUtf8("label_20"))
self.totaldaysLayout_6.addWidget(self.label_20)
self.label_26 = QtGui.QLabel(self.verticalLayoutWidget_7)
self.label_26.setObjectName(_fromUtf8("label_26"))
self.totaldaysLayout_6.addWidget(self.label_26)
self.label_24 = QtGui.QLabel(self.verticalLayoutWidget_7)
self.label_24.setObjectName(_fromUtf8("label_24"))
self.totaldaysLayout_6.addWidget(self.label_24)
self.label_27 = QtGui.QLabel(self.verticalLayoutWidget_7)
self.label_27.setObjectName(_fromUtf8("label_27"))
self.totaldaysLayout_6.addWidget(self.label_27)
self.horizontalLayoutWidget_6 = QtGui.QWidget(self.frame)
self.horizontalLayoutWidget_6.setGeometry(QtCore.QRect(790, 19, 491, 61))
self.horizontalLayoutWidget_6.setObjectName(_fromUtf8("horizontalLayoutWidget_6"))
self.horizontalLayout = QtGui.QHBoxLayout(self.horizontalLayoutWidget_6)
self.horizontalLayout.setMargin(0)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.topsessionLabel = QtGui.QLabel(self.horizontalLayoutWidget_6)
self.topsessionLabel.setAlignment(QtCore.Qt.AlignCenter)
self.topsessionLabel.setObjectName(_fromUtf8("topsessionLabel"))
self.horizontalLayout.addWidget(self.topsessionLabel)
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.pushButton = QtGui.QPushButton(self.horizontalLayoutWidget_6)
self.pushButton.setObjectName(_fromUtf8("pushButton"))
self.verticalLayout.addWidget(self.pushButton)
self.pushButton_3 = QtGui.QPushButton(self.horizontalLayoutWidget_6)
self.pushButton_3.setObjectName(_fromUtf8("pushButton_3"))
self.verticalLayout.addWidget(self.pushButton_3)
self.horizontalLayout.addLayout(self.verticalLayout)
self.horizontalLayoutWidget = QtGui.QWidget(self.frame)
self.horizontalLayoutWidget.setGeometry(QtCore.QRect(70, 730, 291, 41))
self.horizontalLayoutWidget.setObjectName(_fromUtf8("horizontalLayoutWidget"))
self.horizontalLayout_2 = QtGui.QHBoxLayout(self.horizontalLayoutWidget)
self.horizontalLayout_2.setMargin(0)
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.AmbienceOption = QtGui.QCheckBox(self.horizontalLayoutWidget)
self.AmbienceOption.setObjectName(_fromUtf8("AmbienceOption"))
self.horizontalLayout_2.addWidget(self.AmbienceOption)
self.ReferebceDisplayOption = QtGui.QCheckBox(self.horizontalLayoutWidget)
self.ReferebceDisplayOption.setObjectName(_fromUtf8("ReferebceDisplayOption"))
self.horizontalLayout_2.addWidget(self.ReferebceDisplayOption)
self.gridLayoutWidget = QtGui.QWidget(self.frame)
self.gridLayoutWidget.setGeometry(QtCore.QRect(390, 720, 351, 61))
self.gridLayoutWidget.setObjectName(_fromUtf8("gridLayoutWidget"))
self.gridLayout = QtGui.QGridLayout(self.gridLayoutWidget)
self.gridLayout.setMargin(0)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.actionsbuttonsLayout = QtGui.QHBoxLayout()
self.actionsbuttonsLayout.setObjectName(_fromUtf8("actionsbuttonsLayout"))
self.CreateButton = QtGui.QPushButton(self.gridLayoutWidget)
self.CreateButton.setObjectName(_fromUtf8("CreateButton"))
self.actionsbuttonsLayout.addWidget(self.CreateButton)
self.exportButton = QtGui.QPushButton(self.gridLayoutWidget)
self.exportButton.setObjectName(_fromUtf8("exportButton"))
self.actionsbuttonsLayout.addWidget(self.exportButton)
self.gridLayout.addLayout(self.actionsbuttonsLayout, 0, 0, 1, 1)
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.PlayButton = QtGui.QPushButton(self.gridLayoutWidget)
self.PlayButton.setObjectName(_fromUtf8("PlayButton"))
self.horizontalLayout_3.addWidget(self.PlayButton)
self.pushButton_2 = QtGui.QPushButton(self.gridLayoutWidget)
self.pushButton_2.setObjectName(_fromUtf8("pushButton_2"))
self.horizontalLayout_3.addWidget(self.pushButton_2)
self.gridLayout.addLayout(self.horizontalLayout_3, 1, 0, 1, 1)
self.horizontalLayoutWidget_7 = QtGui.QWidget(self.frame)
self.horizontalLayoutWidget_7.setGeometry(QtCore.QRect(910, 740, 371, 41))
self.horizontalLayoutWidget_7.setObjectName(_fromUtf8("horizontalLayoutWidget_7"))
self.goalButtonsLayout = QtGui.QHBoxLayout(self.horizontalLayoutWidget_7)
self.goalButtonsLayout.setMargin(0)
self.goalButtonsLayout.setObjectName(_fromUtf8("goalButtonsLayout"))
self.setgoalButton = QtGui.QPushButton(self.horizontalLayoutWidget_7)
self.setgoalButton.setObjectName(_fromUtf8("setgoalButton"))
self.goalButtonsLayout.addWidget(self.setgoalButton)
self.goalpacingButton = QtGui.QPushButton(self.horizontalLayoutWidget_7)
self.goalpacingButton.setObjectName(_fromUtf8("goalpacingButton"))
self.goalButtonsLayout.addWidget(self.goalpacingButton)
self.viewgoalsButton = QtGui.QPushButton(self.horizontalLayoutWidget_7)
self.viewgoalsButton.setObjectName(_fromUtf8("viewgoalsButton"))
self.goalButtonsLayout.addWidget(self.viewgoalsButton)
self.horizontalLayoutWidget_8 = QtGui.QWidget(self.frame)
self.horizontalLayoutWidget_8.setGeometry(QtCore.QRect(390, 650, 271, 51))
self.horizontalLayoutWidget_8.setObjectName(_fromUtf8("horizontalLayoutWidget_8"))
self.totalsessiondisplayLayout = QtGui.QHBoxLayout(self.horizontalLayoutWidget_8)
self.totalsessiondisplayLayout.setMargin(0)
self.totalsessiondisplayLayout.setObjectName(_fromUtf8("totalsessiondisplayLayout"))
self.totalhoursDisplay = QtGui.QLCDNumber(self.horizontalLayoutWidget_8)
self.totalhoursDisplay.setObjectName(_fromUtf8("totalhoursDisplay"))
self.totalsessiondisplayLayout.addWidget(self.totalhoursDisplay)
self.totalhoursLabel = QtGui.QLabel(self.horizontalLayoutWidget_8)
self.totalhoursLabel.setObjectName(_fromUtf8("totalhoursLabel"))
self.totalsessiondisplayLayout.addWidget(self.totalhoursLabel)
self.totalMinutesDisplay = QtGui.QLCDNumber(self.horizontalLayoutWidget_8)
self.totalMinutesDisplay.setObjectName(_fromUtf8("totalMinutesDisplay"))
self.totalsessiondisplayLayout.addWidget(self.totalMinutesDisplay)
self.totalminutesLabel = QtGui.QLabel(self.horizontalLayoutWidget_8)
self.totalminutesLabel.setObjectName(_fromUtf8("totalminutesLabel"))
self.totalsessiondisplayLayout.addWidget(self.totalminutesLabel)
self.calculateTotalSessionTimeButton = QtGui.QPushButton(self.frame)
self.calculateTotalSessionTimeButton.setGeometry(QtCore.QRect(160, 670, 201, 23))
self.calculateTotalSessionTimeButton.setObjectName(_fromUtf8("calculateTotalSessionTimeButton"))
self.layoutWidget = QtGui.QWidget(self.frame)
self.layoutWidget.setGeometry(QtCore.QRect(790, 640, 491, 101))
self.layoutWidget.setObjectName(_fromUtf8("layoutWidget"))
self.goalsVLayout = QtGui.QVBoxLayout(self.layoutWidget)
self.goalsVLayout.setMargin(0)
self.goalsVLayout.setObjectName(_fromUtf8("goalsVLayout"))
self.goallabelsLayout = QtGui.QHBoxLayout()
self.goallabelsLayout.setObjectName(_fromUtf8("goallabelsLayout"))
self.currenttopLabel = QtGui.QLabel(self.layoutWidget)
self.currenttopLabel.setStyleSheet(_fromUtf8("border-top: 1px solid black;\n"
"border-left: 1px solid black;\n"
"border-right: 1px solid black;"))
self.currenttopLabel.setAlignment(QtCore.Qt.AlignCenter)
self.currenttopLabel.setObjectName(_fromUtf8("currenttopLabel"))
self.goallabelsLayout.addWidget(self.currenttopLabel)
self.progresstopLabel = QtGui.QLabel(self.layoutWidget)
self.progresstopLabel.setMinimumSize(QtCore.QSize(290, 0))
self.progresstopLabel.setStyleSheet(_fromUtf8("border-top: 1px solid black;"))
self.progresstopLabel.setAlignment(QtCore.Qt.AlignCenter)
self.progresstopLabel.setObjectName(_fromUtf8("progresstopLabel"))
self.goallabelsLayout.addWidget(self.progresstopLabel)
self.goaltopLabel = QtGui.QLabel(self.layoutWidget)
self.goaltopLabel.setStyleSheet(_fromUtf8("border-top: 1px solid black;\n"
"border-right: 1px solid black;\n"
"border-left: 1px solid black;"))
self.goaltopLabel.setAlignment(QtCore.Qt.AlignCenter)
self.goaltopLabel.setObjectName(_fromUtf8("goaltopLabel"))
self.goallabelsLayout.addWidget(self.goaltopLabel)
self.goalsVLayout.addLayout(self.goallabelsLayout)
self.goalactualLayout = QtGui.QHBoxLayout()
self.goalactualLayout.setObjectName(_fromUtf8("goalactualLayout"))
self.currentgoalLabel = QtGui.QLabel(self.layoutWidget)
self.currentgoalLabel.setMinimumSize(QtCore.QSize(100, 0))
self.currentgoalLabel.setBaseSize(QtCore.QSize(0, 0))
self.currentgoalLabel.setObjectName(_fromUtf8("currentgoalLabel"))
self.currentgoalLabel.setStyleSheet(_fromUtf8("color: #FFFFFF; border-left: 1px solid black;\n"
"border-bottom: 1px solid black;\n"
"border-right: 1px solid black;"))
self.currentgoalLabel.setAlignment(QtCore.Qt.AlignCenter)
self.currentgoalLabel.setMargin(0)
self.goalactualLayout.addWidget(self.currentgoalLabel)
self.goalProgressBar = QtGui.QProgressBar(self.layoutWidget)
self.goalProgressBar.setProperty("value", 24)
self.goalProgressBar.setObjectName(_fromUtf8("goalProgressBar"))
self.goalactualLayout.addWidget(self.goalProgressBar)
self.goalLabel = QtGui.QLabel(self.layoutWidget)
self.goalLabel.setMinimumSize(QtCore.QSize(100, 0))
self.goalLabel.setStyleSheet(_fromUtf8("border-right: 1px solid black;\n"
"border-left: 1px solid black;\n"
"border-bottom: 1px solid black;"))
self.goalLabel.setAlignment(QtCore.Qt.AlignCenter)
self.goalLabel.setObjectName(_fromUtf8("goalLabel"))
self.goalactualLayout.addWidget(self.goalLabel)
self.goalsVLayout.addLayout(self.goalactualLayout)
MainWindow.setCentralWidget(self.centralwidget)
self.statusBar = QtGui.QStatusBar(MainWindow)
self.statusBar.setObjectName(_fromUtf8("statusBar"))
MainWindow.setStatusBar(self.statusBar)
self.menuBar = QtGui.QMenuBar(MainWindow)
self.menuBar.setGeometry(QtCore.QRect(0, 0, 1300, 22))
self.menuBar.setObjectName(_fromUtf8("menuBar"))
self.menuFile = QtGui.QMenu(self.menuBar)
self.menuFile.setObjectName(_fromUtf8("menuFile"))
self.menuTools = QtGui.QMenu(self.menuBar)
self.menuTools.setObjectName(_fromUtf8("menuTools"))
self.menuHelp = QtGui.QMenu(self.menuBar)
self.menuHelp.setObjectName(_fromUtf8("menuHelp"))
MainWindow.setMenuBar(self.menuBar)
self.actionOpen_Session = QtGui.QAction(MainWindow)
self.actionOpen_Session.setObjectName(_fromUtf8("actionOpen_Session"))
self.actionExit = QtGui.QAction(MainWindow)
self.actionExit.setObjectName(_fromUtf8("actionExit"))
self.actionCheck_Integrity = QtGui.QAction(MainWindow)
self.actionCheck_Integrity.setObjectName(_fromUtf8("actionCheck_Integrity"))
self.actionAbout_This_Program = QtGui.QAction(MainWindow)
self.actionAbout_This_Program.setObjectName(_fromUtf8("actionAbout_This_Program"))
self.actionAbout_Qt = QtGui.QAction(MainWindow)
self.actionAbout_Qt.setObjectName(_fromUtf8("actionAbout_Qt"))
self.menuFile.addAction(self.actionOpen_Session)
self.menuFile.addAction(self.actionExit)
self.menuTools.addAction(self.actionCheck_Integrity)
self.menuHelp.addAction(self.actionAbout_This_Program)
self.menuHelp.addAction(self.actionAbout_Qt)
self.menuBar.addAction(self.menuFile.menuAction())
self.menuBar.addAction(self.menuTools.menuAction())
self.menuBar.addAction(self.menuHelp.menuAction())
self.retranslateUi(MainWindow)
QtCore.QObject.connect(self.PlayButton, QtCore.SIGNAL(_fromUtf8("clicked()")), self.statusBar.clearMessage)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow", None))
self.preLabel_2.setText(_translate("MainWindow", "PRE", None))
self.rinLabel_2.setText(_translate("MainWindow", "RIN", None))
self.kyoLabel_2.setText(_translate("MainWindow", "KYO", None))
self.tohLabel_2.setText(_translate("MainWindow", "TOH", None))
self.shaLabel_2.setText(_translate("MainWindow", "SHA", None))
self.kaiLabel_2.setText(_translate("MainWindow", "KAI", None))
self.jinLabel_2.setText(_translate("MainWindow", "JIN", None))
self.retsuLabel_2.setText(_translate("MainWindow", "RETSU", None))
self.zaiLabel_2.setText(_translate("MainWindow", "ZAI", None))
self.zenLabel_2.setText(_translate("MainWindow", "ZEN", None))
self.postLabel_2.setText(_translate("MainWindow", "POST", None))
self.toptotalsLabel.setText(_translate("MainWindow", "THIS SESSION", None))
self.rintotalLabel.setText(_translate("MainWindow", "RIN", None))
self.kyototalLabel.setText(_translate("MainWindow", "KYO", None))
self.tohtotalLabel.setText(_translate("MainWindow", "TOH", None))
self.shatotalLabel.setText(_translate("MainWindow", "SHA", None))
self.kaitotalLabel.setText(_translate("MainWindow", "KAI", None))
self.jintotalLabel.setText(_translate("MainWindow", "JIN", None))
self.retsutotalLabel.setText(_translate("MainWindow", "RETSU", None))
self.zaitotalLabel.setText(_translate("MainWindow", "ZAI", None))
self.zentotalLabel.setText(_translate("MainWindow", "ZEN", None))
self.label_6.setText(_translate("MainWindow", "Days", None))
self.label_5.setText(_translate("MainWindow", "Days", None))
self.label_4.setText(_translate("MainWindow", "Days", None))
self.label_7.setText(_translate("MainWindow", "Days", None))
self.label_9.setText(_translate("MainWindow", "Days", None))
self.label_8.setText(_translate("MainWindow", "Days", None))
self.label_3.setText(_translate("MainWindow", "Days", None))
self.label_2.setText(_translate("MainWindow", "Days", None))
self.label.setText(_translate("MainWindow", "Days", None))
self.label_11.setText(_translate("MainWindow", "Hours", None))
self.label_14.setText(_translate("MainWindow", "Hours", None))
self.label_15.setText(_translate("MainWindow", "Hours", None))
self.label_18.setText(_translate("MainWindow", "Hours", None))
self.label_17.setText(_translate("MainWindow", "Hours", None))
self.label_16.setText(_translate("MainWindow", "Hours", None))
self.label_13.setText(_translate("MainWindow", "Hours", None))
self.label_12.setText(_translate("MainWindow", "Hours", None))
self.label_10.setText(_translate("MainWindow", "Hours", None))
self.label_19.setText(_translate("MainWindow", "Minutes", None))
self.label_23.setText(_translate("MainWindow", "Minutes", None))
self.label_22.setText(_translate("MainWindow", "Minutes", None))
self.label_21.setText(_translate("MainWindow", "Minutes", None))
self.label_25.setText(_translate("MainWindow", "Minutes", None))
self.label_20.setText(_translate("MainWindow", "Minutes", None))
self.label_26.setText(_translate("MainWindow", "Minutes", None))
self.label_24.setText(_translate("MainWindow", "Minutes", None))
self.label_27.setText(_translate("MainWindow", "Minutes", None))
self.topsessionLabel.setText(_translate("MainWindow", "TOTAL PROGRESS", None))
self.pushButton.setText(_translate("MainWindow", "View List Of Sessions", None))
self.pushButton_3.setText(_translate("MainWindow", "View Premature Endings", None))
self.AmbienceOption.setText(_translate("MainWindow", "AMBIENCE", None))
self.ReferebceDisplayOption.setText(_translate("MainWindow", "REFERENCE FILES", None))
self.CreateButton.setText(_translate("MainWindow", "CREATE", None))
self.exportButton.setText(_translate("MainWindow", "EXPORT", None))
self.PlayButton.setText(_translate("MainWindow", "PLAY", None))
self.pushButton_2.setText(_translate("MainWindow", "PAUSE", None))
self.setgoalButton.setText(_translate("MainWindow", "Set New Goal", None))
self.goalpacingButton.setText(_translate("MainWindow", "Goal Pacing", None))
self.viewgoalsButton.setText(_translate("MainWindow", "View Goals", None))
self.totalhoursLabel.setText(_translate("MainWindow", "Hours", None))
self.totalminutesLabel.setText(_translate("MainWindow", "Minutes", None))
self.calculateTotalSessionTimeButton.setText(_translate("MainWindow", "Calculate Total Session Time", None))
self.currenttopLabel.setText(_translate("MainWindow", "Current", None))
self.progresstopLabel.setText(_translate("MainWindow", "Progress To Goal", None))
self.goaltopLabel.setText(_translate("MainWindow", "Goal", None))
self.goalLabel.setText(_translate("MainWindow", "300 hrs", None))
self.menuFile.setTitle(_translate("MainWindow", "File", None))
self.menuTools.setTitle(_translate("MainWindow", "Tools", None))
self.menuHelp.setTitle(_translate("MainWindow", "Help", None))
self.actionOpen_Session.setText(_translate("MainWindow", "Open Session", None))
self.actionExit.setText(_translate("MainWindow", "Exit", None))
self.actionCheck_Integrity.setText(_translate("MainWindow", "Check Integrity", None))
self.actionAbout_This_Program.setText(_translate("MainWindow", "About This Program", None))
self.actionAbout_Qt.setText(_translate("MainWindow", "About Qt", None))
| mit |
mtnman38/Aggregate | Executables/Aggregate 0.8.1 for Macintosh.app/Contents/Resources/lib/python2.7/email/message.py | 235 | 30720 | # Copyright (C) 2001-2006 Python Software Foundation
# Author: Barry Warsaw
# Contact: email-sig@python.org
"""Basic message object for the email package object model."""
__all__ = ['Message']
import re
import uu
import binascii
import warnings
from cStringIO import StringIO
# Intrapackage imports
import email.charset
from email import utils
from email import errors
SEMISPACE = '; '
# Regular expression that matches `special' characters in parameters, the
# existence of which force quoting of the parameter value.
tspecials = re.compile(r'[ \(\)<>@,;:\\"/\[\]\?=]')
# Helper functions
def _splitparam(param):
# Split header parameters. BAW: this may be too simple. It isn't
# strictly RFC 2045 (section 5.1) compliant, but it catches most headers
# found in the wild. We may eventually need a full fledged parser
# eventually.
a, sep, b = param.partition(';')
if not sep:
return a.strip(), None
return a.strip(), b.strip()
def _formatparam(param, value=None, quote=True):
"""Convenience function to format and return a key=value pair.
This will quote the value if needed or if quote is true. If value is a
three tuple (charset, language, value), it will be encoded according
to RFC2231 rules.
"""
if value is not None and len(value) > 0:
# A tuple is used for RFC 2231 encoded parameter values where items
# are (charset, language, value). charset is a string, not a Charset
# instance.
if isinstance(value, tuple):
# Encode as per RFC 2231
param += '*'
value = utils.encode_rfc2231(value[2], value[0], value[1])
# BAW: Please check this. I think that if quote is set it should
# force quoting even if not necessary.
if quote or tspecials.search(value):
return '%s="%s"' % (param, utils.quote(value))
else:
return '%s=%s' % (param, value)
else:
return param
def _parseparam(s):
plist = []
while s[:1] == ';':
s = s[1:]
end = s.find(';')
while end > 0 and (s.count('"', 0, end) - s.count('\\"', 0, end)) % 2:
end = s.find(';', end + 1)
if end < 0:
end = len(s)
f = s[:end]
if '=' in f:
i = f.index('=')
f = f[:i].strip().lower() + '=' + f[i+1:].strip()
plist.append(f.strip())
s = s[end:]
return plist
def _unquotevalue(value):
# This is different than utils.collapse_rfc2231_value() because it doesn't
# try to convert the value to a unicode. Message.get_param() and
# Message.get_params() are both currently defined to return the tuple in
# the face of RFC 2231 parameters.
if isinstance(value, tuple):
return value[0], value[1], utils.unquote(value[2])
else:
return utils.unquote(value)
class Message:
"""Basic message object.
A message object is defined as something that has a bunch of RFC 2822
headers and a payload. It may optionally have an envelope header
(a.k.a. Unix-From or From_ header). If the message is a container (i.e. a
multipart or a message/rfc822), then the payload is a list of Message
objects, otherwise it is a string.
Message objects implement part of the `mapping' interface, which assumes
there is exactly one occurrence of the header per message. Some headers
do in fact appear multiple times (e.g. Received) and for those headers,
you must use the explicit API to set or get all the headers. Not all of
the mapping methods are implemented.
"""
def __init__(self):
self._headers = []
self._unixfrom = None
self._payload = None
self._charset = None
# Defaults for multipart messages
self.preamble = self.epilogue = None
self.defects = []
# Default content type
self._default_type = 'text/plain'
def __str__(self):
"""Return the entire formatted message as a string.
This includes the headers, body, and envelope header.
"""
return self.as_string(unixfrom=True)
def as_string(self, unixfrom=False):
"""Return the entire formatted message as a string.
Optional `unixfrom' when True, means include the Unix From_ envelope
header.
This is a convenience method and may not generate the message exactly
as you intend because by default it mangles lines that begin with
"From ". For more flexibility, use the flatten() method of a
Generator instance.
"""
from email.generator import Generator
fp = StringIO()
g = Generator(fp)
g.flatten(self, unixfrom=unixfrom)
return fp.getvalue()
def is_multipart(self):
"""Return True if the message consists of multiple parts."""
return isinstance(self._payload, list)
#
# Unix From_ line
#
def set_unixfrom(self, unixfrom):
self._unixfrom = unixfrom
def get_unixfrom(self):
return self._unixfrom
#
# Payload manipulation.
#
def attach(self, payload):
"""Add the given payload to the current payload.
The current payload will always be a list of objects after this method
is called. If you want to set the payload to a scalar object, use
set_payload() instead.
"""
if self._payload is None:
self._payload = [payload]
else:
self._payload.append(payload)
def get_payload(self, i=None, decode=False):
"""Return a reference to the payload.
The payload will either be a list object or a string. If you mutate
the list object, you modify the message's payload in place. Optional
i returns that index into the payload.
Optional decode is a flag indicating whether the payload should be
decoded or not, according to the Content-Transfer-Encoding header
(default is False).
When True and the message is not a multipart, the payload will be
decoded if this header's value is `quoted-printable' or `base64'. If
some other encoding is used, or the header is missing, or if the
payload has bogus data (i.e. bogus base64 or uuencoded data), the
payload is returned as-is.
If the message is a multipart and the decode flag is True, then None
is returned.
"""
if i is None:
payload = self._payload
elif not isinstance(self._payload, list):
raise TypeError('Expected list, got %s' % type(self._payload))
else:
payload = self._payload[i]
if decode:
if self.is_multipart():
return None
cte = self.get('content-transfer-encoding', '').lower()
if cte == 'quoted-printable':
return utils._qdecode(payload)
elif cte == 'base64':
try:
return utils._bdecode(payload)
except binascii.Error:
# Incorrect padding
return payload
elif cte in ('x-uuencode', 'uuencode', 'uue', 'x-uue'):
sfp = StringIO()
try:
uu.decode(StringIO(payload+'\n'), sfp, quiet=True)
payload = sfp.getvalue()
except uu.Error:
# Some decoding problem
return payload
# Everything else, including encodings with 8bit or 7bit are returned
# unchanged.
return payload
def set_payload(self, payload, charset=None):
"""Set the payload to the given value.
Optional charset sets the message's default character set. See
set_charset() for details.
"""
self._payload = payload
if charset is not None:
self.set_charset(charset)
def set_charset(self, charset):
"""Set the charset of the payload to a given character set.
charset can be a Charset instance, a string naming a character set, or
None. If it is a string it will be converted to a Charset instance.
If charset is None, the charset parameter will be removed from the
Content-Type field. Anything else will generate a TypeError.
The message will be assumed to be of type text/* encoded with
charset.input_charset. It will be converted to charset.output_charset
and encoded properly, if needed, when generating the plain text
representation of the message. MIME headers (MIME-Version,
Content-Type, Content-Transfer-Encoding) will be added as needed.
"""
if charset is None:
self.del_param('charset')
self._charset = None
return
if isinstance(charset, basestring):
charset = email.charset.Charset(charset)
if not isinstance(charset, email.charset.Charset):
raise TypeError(charset)
# BAW: should we accept strings that can serve as arguments to the
# Charset constructor?
self._charset = charset
if 'MIME-Version' not in self:
self.add_header('MIME-Version', '1.0')
if 'Content-Type' not in self:
self.add_header('Content-Type', 'text/plain',
charset=charset.get_output_charset())
else:
self.set_param('charset', charset.get_output_charset())
if isinstance(self._payload, unicode):
self._payload = self._payload.encode(charset.output_charset)
if str(charset) != charset.get_output_charset():
self._payload = charset.body_encode(self._payload)
if 'Content-Transfer-Encoding' not in self:
cte = charset.get_body_encoding()
try:
cte(self)
except TypeError:
self._payload = charset.body_encode(self._payload)
self.add_header('Content-Transfer-Encoding', cte)
def get_charset(self):
"""Return the Charset instance associated with the message's payload.
"""
return self._charset
#
# MAPPING INTERFACE (partial)
#
def __len__(self):
"""Return the total number of headers, including duplicates."""
return len(self._headers)
def __getitem__(self, name):
"""Get a header value.
Return None if the header is missing instead of raising an exception.
Note that if the header appeared multiple times, exactly which
occurrence gets returned is undefined. Use get_all() to get all
the values matching a header field name.
"""
return self.get(name)
def __setitem__(self, name, val):
"""Set the value of a header.
Note: this does not overwrite an existing header with the same field
name. Use __delitem__() first to delete any existing headers.
"""
self._headers.append((name, val))
def __delitem__(self, name):
"""Delete all occurrences of a header, if present.
Does not raise an exception if the header is missing.
"""
name = name.lower()
newheaders = []
for k, v in self._headers:
if k.lower() != name:
newheaders.append((k, v))
self._headers = newheaders
def __contains__(self, name):
return name.lower() in [k.lower() for k, v in self._headers]
def has_key(self, name):
"""Return true if the message contains the header."""
missing = object()
return self.get(name, missing) is not missing
def keys(self):
"""Return a list of all the message's header field names.
These will be sorted in the order they appeared in the original
message, or were added to the message, and may contain duplicates.
Any fields deleted and re-inserted are always appended to the header
list.
"""
return [k for k, v in self._headers]
def values(self):
"""Return a list of all the message's header values.
These will be sorted in the order they appeared in the original
message, or were added to the message, and may contain duplicates.
Any fields deleted and re-inserted are always appended to the header
list.
"""
return [v for k, v in self._headers]
def items(self):
"""Get all the message's header fields and values.
These will be sorted in the order they appeared in the original
message, or were added to the message, and may contain duplicates.
Any fields deleted and re-inserted are always appended to the header
list.
"""
return self._headers[:]
def get(self, name, failobj=None):
"""Get a header value.
Like __getitem__() but return failobj instead of None when the field
is missing.
"""
name = name.lower()
for k, v in self._headers:
if k.lower() == name:
return v
return failobj
#
# Additional useful stuff
#
def get_all(self, name, failobj=None):
"""Return a list of all the values for the named field.
These will be sorted in the order they appeared in the original
message, and may contain duplicates. Any fields deleted and
re-inserted are always appended to the header list.
If no such fields exist, failobj is returned (defaults to None).
"""
values = []
name = name.lower()
for k, v in self._headers:
if k.lower() == name:
values.append(v)
if not values:
return failobj
return values
def add_header(self, _name, _value, **_params):
"""Extended header setting.
name is the header field to add. keyword arguments can be used to set
additional parameters for the header field, with underscores converted
to dashes. Normally the parameter will be added as key="value" unless
value is None, in which case only the key will be added. If a
parameter value contains non-ASCII characters it must be specified as a
three-tuple of (charset, language, value), in which case it will be
encoded according to RFC2231 rules.
Example:
msg.add_header('content-disposition', 'attachment', filename='bud.gif')
"""
parts = []
for k, v in _params.items():
if v is None:
parts.append(k.replace('_', '-'))
else:
parts.append(_formatparam(k.replace('_', '-'), v))
if _value is not None:
parts.insert(0, _value)
self._headers.append((_name, SEMISPACE.join(parts)))
def replace_header(self, _name, _value):
"""Replace a header.
Replace the first matching header found in the message, retaining
header order and case. If no matching header was found, a KeyError is
raised.
"""
_name = _name.lower()
for i, (k, v) in zip(range(len(self._headers)), self._headers):
if k.lower() == _name:
self._headers[i] = (k, _value)
break
else:
raise KeyError(_name)
#
# Use these three methods instead of the three above.
#
def get_content_type(self):
"""Return the message's content type.
The returned string is coerced to lower case of the form
`maintype/subtype'. If there was no Content-Type header in the
message, the default type as given by get_default_type() will be
returned. Since according to RFC 2045, messages always have a default
type this will always return a value.
RFC 2045 defines a message's default type to be text/plain unless it
appears inside a multipart/digest container, in which case it would be
message/rfc822.
"""
missing = object()
value = self.get('content-type', missing)
if value is missing:
# This should have no parameters
return self.get_default_type()
ctype = _splitparam(value)[0].lower()
# RFC 2045, section 5.2 says if its invalid, use text/plain
if ctype.count('/') != 1:
return 'text/plain'
return ctype
def get_content_maintype(self):
"""Return the message's main content type.
This is the `maintype' part of the string returned by
get_content_type().
"""
ctype = self.get_content_type()
return ctype.split('/')[0]
def get_content_subtype(self):
"""Returns the message's sub-content type.
This is the `subtype' part of the string returned by
get_content_type().
"""
ctype = self.get_content_type()
return ctype.split('/')[1]
def get_default_type(self):
"""Return the `default' content type.
Most messages have a default content type of text/plain, except for
messages that are subparts of multipart/digest containers. Such
subparts have a default content type of message/rfc822.
"""
return self._default_type
def set_default_type(self, ctype):
"""Set the `default' content type.
ctype should be either "text/plain" or "message/rfc822", although this
is not enforced. The default content type is not stored in the
Content-Type header.
"""
self._default_type = ctype
def _get_params_preserve(self, failobj, header):
# Like get_params() but preserves the quoting of values. BAW:
# should this be part of the public interface?
missing = object()
value = self.get(header, missing)
if value is missing:
return failobj
params = []
for p in _parseparam(';' + value):
try:
name, val = p.split('=', 1)
name = name.strip()
val = val.strip()
except ValueError:
# Must have been a bare attribute
name = p.strip()
val = ''
params.append((name, val))
params = utils.decode_params(params)
return params
def get_params(self, failobj=None, header='content-type', unquote=True):
"""Return the message's Content-Type parameters, as a list.
The elements of the returned list are 2-tuples of key/value pairs, as
split on the `=' sign. The left hand side of the `=' is the key,
while the right hand side is the value. If there is no `=' sign in
the parameter the value is the empty string. The value is as
described in the get_param() method.
Optional failobj is the object to return if there is no Content-Type
header. Optional header is the header to search instead of
Content-Type. If unquote is True, the value is unquoted.
"""
missing = object()
params = self._get_params_preserve(missing, header)
if params is missing:
return failobj
if unquote:
return [(k, _unquotevalue(v)) for k, v in params]
else:
return params
def get_param(self, param, failobj=None, header='content-type',
unquote=True):
"""Return the parameter value if found in the Content-Type header.
Optional failobj is the object to return if there is no Content-Type
header, or the Content-Type header has no such parameter. Optional
header is the header to search instead of Content-Type.
Parameter keys are always compared case insensitively. The return
value can either be a string, or a 3-tuple if the parameter was RFC
2231 encoded. When it's a 3-tuple, the elements of the value are of
the form (CHARSET, LANGUAGE, VALUE). Note that both CHARSET and
LANGUAGE can be None, in which case you should consider VALUE to be
encoded in the us-ascii charset. You can usually ignore LANGUAGE.
Your application should be prepared to deal with 3-tuple return
values, and can convert the parameter to a Unicode string like so:
param = msg.get_param('foo')
if isinstance(param, tuple):
param = unicode(param[2], param[0] or 'us-ascii')
In any case, the parameter value (either the returned string, or the
VALUE item in the 3-tuple) is always unquoted, unless unquote is set
to False.
"""
if header not in self:
return failobj
for k, v in self._get_params_preserve(failobj, header):
if k.lower() == param.lower():
if unquote:
return _unquotevalue(v)
else:
return v
return failobj
def set_param(self, param, value, header='Content-Type', requote=True,
charset=None, language=''):
"""Set a parameter in the Content-Type header.
If the parameter already exists in the header, its value will be
replaced with the new value.
If header is Content-Type and has not yet been defined for this
message, it will be set to "text/plain" and the new parameter and
value will be appended as per RFC 2045.
An alternate header can specified in the header argument, and all
parameters will be quoted as necessary unless requote is False.
If charset is specified, the parameter will be encoded according to RFC
2231. Optional language specifies the RFC 2231 language, defaulting
to the empty string. Both charset and language should be strings.
"""
if not isinstance(value, tuple) and charset:
value = (charset, language, value)
if header not in self and header.lower() == 'content-type':
ctype = 'text/plain'
else:
ctype = self.get(header)
if not self.get_param(param, header=header):
if not ctype:
ctype = _formatparam(param, value, requote)
else:
ctype = SEMISPACE.join(
[ctype, _formatparam(param, value, requote)])
else:
ctype = ''
for old_param, old_value in self.get_params(header=header,
unquote=requote):
append_param = ''
if old_param.lower() == param.lower():
append_param = _formatparam(param, value, requote)
else:
append_param = _formatparam(old_param, old_value, requote)
if not ctype:
ctype = append_param
else:
ctype = SEMISPACE.join([ctype, append_param])
if ctype != self.get(header):
del self[header]
self[header] = ctype
def del_param(self, param, header='content-type', requote=True):
"""Remove the given parameter completely from the Content-Type header.
The header will be re-written in place without the parameter or its
value. All values will be quoted as necessary unless requote is
False. Optional header specifies an alternative to the Content-Type
header.
"""
if header not in self:
return
new_ctype = ''
for p, v in self.get_params(header=header, unquote=requote):
if p.lower() != param.lower():
if not new_ctype:
new_ctype = _formatparam(p, v, requote)
else:
new_ctype = SEMISPACE.join([new_ctype,
_formatparam(p, v, requote)])
if new_ctype != self.get(header):
del self[header]
self[header] = new_ctype
def set_type(self, type, header='Content-Type', requote=True):
"""Set the main type and subtype for the Content-Type header.
type must be a string in the form "maintype/subtype", otherwise a
ValueError is raised.
This method replaces the Content-Type header, keeping all the
parameters in place. If requote is False, this leaves the existing
header's quoting as is. Otherwise, the parameters will be quoted (the
default).
An alternative header can be specified in the header argument. When
the Content-Type header is set, we'll always also add a MIME-Version
header.
"""
# BAW: should we be strict?
if not type.count('/') == 1:
raise ValueError
# Set the Content-Type, you get a MIME-Version
if header.lower() == 'content-type':
del self['mime-version']
self['MIME-Version'] = '1.0'
if header not in self:
self[header] = type
return
params = self.get_params(header=header, unquote=requote)
del self[header]
self[header] = type
# Skip the first param; it's the old type.
for p, v in params[1:]:
self.set_param(p, v, header, requote)
def get_filename(self, failobj=None):
"""Return the filename associated with the payload if present.
The filename is extracted from the Content-Disposition header's
`filename' parameter, and it is unquoted. If that header is missing
the `filename' parameter, this method falls back to looking for the
`name' parameter.
"""
missing = object()
filename = self.get_param('filename', missing, 'content-disposition')
if filename is missing:
filename = self.get_param('name', missing, 'content-type')
if filename is missing:
return failobj
return utils.collapse_rfc2231_value(filename).strip()
def get_boundary(self, failobj=None):
"""Return the boundary associated with the payload if present.
The boundary is extracted from the Content-Type header's `boundary'
parameter, and it is unquoted.
"""
missing = object()
boundary = self.get_param('boundary', missing)
if boundary is missing:
return failobj
# RFC 2046 says that boundaries may begin but not end in w/s
return utils.collapse_rfc2231_value(boundary).rstrip()
def set_boundary(self, boundary):
"""Set the boundary parameter in Content-Type to 'boundary'.
This is subtly different than deleting the Content-Type header and
adding a new one with a new boundary parameter via add_header(). The
main difference is that using the set_boundary() method preserves the
order of the Content-Type header in the original message.
HeaderParseError is raised if the message has no Content-Type header.
"""
missing = object()
params = self._get_params_preserve(missing, 'content-type')
if params is missing:
# There was no Content-Type header, and we don't know what type
# to set it to, so raise an exception.
raise errors.HeaderParseError('No Content-Type header found')
newparams = []
foundp = False
for pk, pv in params:
if pk.lower() == 'boundary':
newparams.append(('boundary', '"%s"' % boundary))
foundp = True
else:
newparams.append((pk, pv))
if not foundp:
# The original Content-Type header had no boundary attribute.
# Tack one on the end. BAW: should we raise an exception
# instead???
newparams.append(('boundary', '"%s"' % boundary))
# Replace the existing Content-Type header with the new value
newheaders = []
for h, v in self._headers:
if h.lower() == 'content-type':
parts = []
for k, v in newparams:
if v == '':
parts.append(k)
else:
parts.append('%s=%s' % (k, v))
newheaders.append((h, SEMISPACE.join(parts)))
else:
newheaders.append((h, v))
self._headers = newheaders
def get_content_charset(self, failobj=None):
"""Return the charset parameter of the Content-Type header.
The returned string is always coerced to lower case. If there is no
Content-Type header, or if that header has no charset parameter,
failobj is returned.
"""
missing = object()
charset = self.get_param('charset', missing)
if charset is missing:
return failobj
if isinstance(charset, tuple):
# RFC 2231 encoded, so decode it, and it better end up as ascii.
pcharset = charset[0] or 'us-ascii'
try:
# LookupError will be raised if the charset isn't known to
# Python. UnicodeError will be raised if the encoded text
# contains a character not in the charset.
charset = unicode(charset[2], pcharset).encode('us-ascii')
except (LookupError, UnicodeError):
charset = charset[2]
# charset character must be in us-ascii range
try:
if isinstance(charset, str):
charset = unicode(charset, 'us-ascii')
charset = charset.encode('us-ascii')
except UnicodeError:
return failobj
# RFC 2046, $4.1.2 says charsets are not case sensitive
return charset.lower()
def get_charsets(self, failobj=None):
"""Return a list containing the charset(s) used in this message.
The returned list of items describes the Content-Type headers'
charset parameter for this message and all the subparts in its
payload.
Each item will either be a string (the value of the charset parameter
in the Content-Type header of that part) or the value of the
'failobj' parameter (defaults to None), if the part does not have a
main MIME type of "text", or the charset is not defined.
The list will contain one string for each part of the message, plus
one for the container message (i.e. self), so that a non-multipart
message will still return a list of length 1.
"""
return [part.get_content_charset(failobj) for part in self.walk()]
# I.e. def walk(self): ...
from email.iterators import walk
| gpl-2.0 |
wemanuel/smry | server-auth/ls/google-cloud-sdk/lib/requests/packages/chardet/constants.py | 3008 | 1335 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
_debug = 0
eDetecting = 0
eFoundIt = 1
eNotMe = 2
eStart = 0
eError = 1
eItsMe = 2
SHORTCUT_THRESHOLD = 0.95
| apache-2.0 |
lamastex/scalable-data-science | db/xtraResources/edXBigDataSeries2015/CS100-1x/Module 1: Lectures.py | 2 | 3202 | # Databricks notebook source exported at Mon, 14 Mar 2016 02:33:13 UTC
# MAGIC %md
# MAGIC **SOURCE:** This is from the Community Edition of databricks and has been added to this databricks shard at [/#workspace/scalable-data-science/xtraResources/edXBigDataSeries2015/CS100-1x](/#workspace/scalable-data-science/xtraResources/edXBigDataSeries2015/CS100-1x) as extra resources for the project-focussed course [Scalable Data Science](http://www.math.canterbury.ac.nz/~r.sainudiin/courses/ScalableDataScience/) that is prepared by [Raazesh Sainudiin](https://nz.linkedin.com/in/raazesh-sainudiin-45955845) and [Sivanand Sivaram](https://www.linkedin.com/in/sivanand), and *supported by* [](https://databricks.com/)
# MAGIC and
# MAGIC [](https://www.awseducate.com/microsite/CommunitiesEngageHome).
# COMMAND ----------
# MAGIC %md
# MAGIC ## Module One Lectures
# COMMAND ----------
# MAGIC %md
# MAGIC ### Lecture 1: Introduction to Big Data and Data Science
# COMMAND ----------
displayHTML('https://youtube.com/embed/7YtNEQRDMQE')
# COMMAND ----------
# MAGIC %md
# MAGIC #### Course Goals
# COMMAND ----------
displayHTML('https://youtube.com/embed/pYiOeHP6DuQ')
# COMMAND ----------
# MAGIC %md
# MAGIC #### Brief History of Data Analysis
# COMMAND ----------
displayHTML('https://youtube.com/embed/5fSSvYlDkag')
# COMMAND ----------
# MAGIC %md
# MAGIC #### Data Makes Everything Clearer (part 1)
# COMMAND ----------
displayHTML('https://youtube.com/embed/rbj189ATV2I')
# COMMAND ----------
# MAGIC %md
# MAGIC #### Nowcasting vs. Forecasting
# COMMAND ----------
displayHTML('https://youtube.com/embed/16wqonWTAsI')
# COMMAND ----------
# MAGIC %md
# MAGIC #### USA 2012 Presidential Election
# COMMAND ----------
displayHTML('https://youtube.com/embed/Smb-bWiGfMo')
# COMMAND ----------
# MAGIC %md
# MAGIC #### Facebook Lexicon
# COMMAND ----------
displayHTML('https://youtube.com/embed/XVEl6hzWfXA')
# COMMAND ----------
# MAGIC %md
# MAGIC #### Data Makes Everything Clear (part 2)
# COMMAND ----------
displayHTML('https://youtube.com/embed/_cwcr1EEYns')
# COMMAND ----------
# MAGIC %md
# MAGIC #### Where does Big Data come from?
# COMMAND ----------
displayHTML('https://youtube.com/embed/eEJFlHE7Gt4')
# COMMAND ----------
# MAGIC %md
# MAGIC ### Lecture 2: Performing Data Science and Preparing Data
# COMMAND ----------
displayHTML('https://youtube.com/embed/_k5ViDVeZZs')
# COMMAND ----------
displayHTML('https://youtube.com/embed/g4ujW1m2QNc')
# COMMAND ----------
displayHTML('https://youtube.com/embed/c7KG0c3ADk0')
# COMMAND ----------
displayHTML('https://youtube.com/embed/yAOEyeDVn8s')
# COMMAND ----------
displayHTML('https://youtube.com/embed/TAZvh0WmOHM')
# COMMAND ----------
displayHTML('https://youtube.com/embed/MIqbwJ6AbIY')
# COMMAND ----------
displayHTML('https://youtube.com/embed/3V6ws_VEzaE')
# COMMAND ----------
displayHTML('https://youtube.com/embed/gB-9rdM6W1A') | unlicense |
garnaat/troposphere | troposphere/validators.py | 20 | 1842 | # Copyright (c) 2012-2013, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
def boolean(x):
if x in [True, 1, '1', 'true', 'True']:
return "true"
if x in [False, 0, '0', 'false', 'False']:
return "false"
raise ValueError
def integer(x):
try:
int(x)
except (ValueError, TypeError):
raise ValueError("%r is not a valid integer" % x)
else:
return x
def positive_integer(x):
p = integer(x)
if int(p) < 0:
raise ValueError("%r is not a positive integer" % x)
return x
def integer_range(minimum_val, maximum_val):
def integer_range_checker(x):
i = int(x)
if i < minimum_val or i > maximum_val:
raise ValueError('Integer must be between %d and %d' % (
minimum_val, maximum_val))
return x
return integer_range_checker
def network_port(x):
from . import AWSHelperFn
# Network ports can be Ref items
if isinstance(x, AWSHelperFn):
return x
i = integer(x)
if int(i) < -1 or int(i) > 65535:
raise ValueError("network port %r must been between 0 and 65535" % i)
return x
def s3_bucket_name(b):
from re import compile
s3_bucket_name_re = compile(r'^[a-z\d][a-z\d\.-]{1,61}[a-z\d]$')
if s3_bucket_name_re.match(b):
return b
else:
raise ValueError("%s is not a valid s3 bucket name" % b)
def encoding(encoding):
valid_encodings = ['plain', 'base64']
if encoding not in valid_encodings:
raise ValueError('Encoding needs to be one of %r' % valid_encodings)
return encoding
def status(status):
valid_statuses = ['Active', 'Inactive']
if status not in valid_statuses:
raise ValueError('Status needs to be one of %r' % valid_statuses)
return status
| bsd-2-clause |
DucQuang1/youtube-dl | youtube_dl/extractor/theplatform.py | 29 | 8058 | from __future__ import unicode_literals
import re
import json
import time
import hmac
import binascii
import hashlib
from .common import InfoExtractor
from ..compat import (
compat_str,
)
from ..utils import (
determine_ext,
ExtractorError,
xpath_with_ns,
unsmuggle_url,
int_or_none,
)
_x = lambda p: xpath_with_ns(p, {'smil': 'http://www.w3.org/2005/SMIL21/Language'})
class ThePlatformIE(InfoExtractor):
_VALID_URL = r'''(?x)
(?:https?://(?:link|player)\.theplatform\.com/[sp]/(?P<provider_id>[^/]+)/
(?:(?P<media>(?:[^/]+/)+select/media/)|(?P<config>(?:[^/\?]+/(?:swf|config)|onsite)/select/))?
|theplatform:)(?P<id>[^/\?&]+)'''
_TESTS = [{
# from http://www.metacafe.com/watch/cb-e9I_cZgTgIPd/blackberrys_big_bold_z30/
'url': 'http://link.theplatform.com/s/dJ5BDC/e9I_cZgTgIPd/meta.smil?format=smil&Tracking=true&mbr=true',
'info_dict': {
'id': 'e9I_cZgTgIPd',
'ext': 'flv',
'title': 'Blackberry\'s big, bold Z30',
'description': 'The Z30 is Blackberry\'s biggest, baddest mobile messaging device yet.',
'duration': 247,
},
'params': {
# rtmp download
'skip_download': True,
},
}, {
# from http://www.cnet.com/videos/tesla-model-s-a-second-step-towards-a-cleaner-motoring-future/
'url': 'http://link.theplatform.com/s/kYEXFC/22d_qsQ6MIRT',
'info_dict': {
'id': '22d_qsQ6MIRT',
'ext': 'flv',
'description': 'md5:ac330c9258c04f9d7512cf26b9595409',
'title': 'Tesla Model S: A second step towards a cleaner motoring future',
},
'params': {
# rtmp download
'skip_download': True,
}
}, {
'url': 'https://player.theplatform.com/p/D6x-PC/pulse_preview/embed/select/media/yMBg9E8KFxZD',
'info_dict': {
'id': 'yMBg9E8KFxZD',
'ext': 'mp4',
'description': 'md5:644ad9188d655b742f942bf2e06b002d',
'title': 'HIGHLIGHTS: USA bag first ever series Cup win',
}
}, {
'url': 'http://player.theplatform.com/p/NnzsPC/widget/select/media/4Y0TlYUr_ZT7',
'only_matching': True,
}]
@staticmethod
def _sign_url(url, sig_key, sig_secret, life=600, include_qs=False):
flags = '10' if include_qs else '00'
expiration_date = '%x' % (int(time.time()) + life)
def str_to_hex(str):
return binascii.b2a_hex(str.encode('ascii')).decode('ascii')
def hex_to_str(hex):
return binascii.a2b_hex(hex)
relative_path = url.split('http://link.theplatform.com/s/')[1].split('?')[0]
clear_text = hex_to_str(flags + expiration_date + str_to_hex(relative_path))
checksum = hmac.new(sig_key.encode('ascii'), clear_text, hashlib.sha1).hexdigest()
sig = flags + expiration_date + checksum + str_to_hex(sig_secret)
return '%s&sig=%s' % (url, sig)
def _real_extract(self, url):
url, smuggled_data = unsmuggle_url(url, {})
mobj = re.match(self._VALID_URL, url)
provider_id = mobj.group('provider_id')
video_id = mobj.group('id')
if not provider_id:
provider_id = 'dJ5BDC'
path = provider_id
if mobj.group('media'):
path += '/media'
path += '/' + video_id
if smuggled_data.get('force_smil_url', False):
smil_url = url
elif mobj.group('config'):
config_url = url + '&form=json'
config_url = config_url.replace('swf/', 'config/')
config_url = config_url.replace('onsite/', 'onsite/config/')
config = self._download_json(config_url, video_id, 'Downloading config')
smil_url = config['releaseUrl'] + '&format=SMIL&formats=MPEG4&manifest=f4m'
else:
smil_url = 'http://link.theplatform.com/s/%s/meta.smil?format=smil&mbr=true' % path
sig = smuggled_data.get('sig')
if sig:
smil_url = self._sign_url(smil_url, sig['key'], sig['secret'])
meta = self._download_xml(smil_url, video_id)
try:
error_msg = next(
n.attrib['abstract']
for n in meta.findall(_x('.//smil:ref'))
if n.attrib.get('title') == 'Geographic Restriction' or n.attrib.get('title') == 'Expired')
except StopIteration:
pass
else:
raise ExtractorError(error_msg, expected=True)
info_url = 'http://link.theplatform.com/s/%s?format=preview' % path
info_json = self._download_webpage(info_url, video_id)
info = json.loads(info_json)
subtitles = {}
captions = info.get('captions')
if isinstance(captions, list):
for caption in captions:
lang, src, mime = caption.get('lang', 'en'), caption.get('src'), caption.get('type')
subtitles[lang] = [{
'ext': 'srt' if mime == 'text/srt' else 'ttml',
'url': src,
}]
head = meta.find(_x('smil:head'))
body = meta.find(_x('smil:body'))
f4m_node = body.find(_x('smil:seq//smil:video'))
if f4m_node is None:
f4m_node = body.find(_x('smil:seq/smil:video'))
if f4m_node is not None and '.f4m' in f4m_node.attrib['src']:
f4m_url = f4m_node.attrib['src']
if 'manifest.f4m?' not in f4m_url:
f4m_url += '?'
# the parameters are from syfy.com, other sites may use others,
# they also work for nbc.com
f4m_url += '&g=UXWGVKRWHFSP&hdcore=3.0.3'
formats = self._extract_f4m_formats(f4m_url, video_id)
else:
formats = []
switch = body.find(_x('smil:switch'))
if switch is None:
switch = body.find(_x('smil:par//smil:switch'))
if switch is None:
switch = body.find(_x('smil:par/smil:switch'))
if switch is None:
switch = body.find(_x('smil:par'))
if switch is not None:
base_url = head.find(_x('smil:meta')).attrib['base']
for f in switch.findall(_x('smil:video')):
attr = f.attrib
width = int_or_none(attr.get('width'))
height = int_or_none(attr.get('height'))
vbr = int_or_none(attr.get('system-bitrate'), 1000)
format_id = '%dx%d_%dk' % (width, height, vbr)
formats.append({
'format_id': format_id,
'url': base_url,
'play_path': 'mp4:' + attr['src'],
'ext': 'flv',
'width': width,
'height': height,
'vbr': vbr,
})
else:
switch = body.find(_x('smil:seq//smil:switch'))
if switch is None:
switch = body.find(_x('smil:seq/smil:switch'))
for f in switch.findall(_x('smil:video')):
attr = f.attrib
vbr = int_or_none(attr.get('system-bitrate'), 1000)
ext = determine_ext(attr['src'])
if ext == 'once':
ext = 'mp4'
formats.append({
'format_id': compat_str(vbr),
'url': attr['src'],
'vbr': vbr,
'ext': ext,
})
self._sort_formats(formats)
return {
'id': video_id,
'title': info['title'],
'subtitles': subtitles,
'formats': formats,
'description': info['description'],
'thumbnail': info['defaultThumbnailUrl'],
'duration': int_or_none(info.get('duration'), 1000),
}
| unlicense |
proxysh/Safejumper-for-Mac | buildmac/Resources/env/lib/python2.7/site-packages/pyasn1/codec/cer/encoder.py | 5 | 5924 | #
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://pyasn1.sf.net/license.html
#
from pyasn1.type import univ
from pyasn1.type import useful
from pyasn1.codec.ber import encoder
from pyasn1.compat.octets import int2oct, str2octs, null
from pyasn1 import error
class BooleanEncoder(encoder.IntegerEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
if client == 0:
substrate = int2oct(0)
else:
substrate = int2oct(255)
return substrate, 0
class BitStringEncoder(encoder.BitStringEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
return encoder.BitStringEncoder.encodeValue(
self, encodeFun, client, defMode, 1000
)
class OctetStringEncoder(encoder.OctetStringEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
return encoder.OctetStringEncoder.encodeValue(
self, encodeFun, client, defMode, 1000
)
class RealEncoder(encoder.RealEncoder):
def _chooseEncBase(self, value):
m, b, e = value
return self._dropFloatingPoint(m, b, e)
# specialized GeneralStringEncoder here
class GeneralizedTimeEncoder(OctetStringEncoder):
zchar = str2octs('Z')
pluschar = str2octs('+')
minuschar = str2octs('-')
zero = str2octs('0')
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
octets = client.asOctets()
# This breaks too many existing data items
# if '.' not in octets:
# raise error.PyAsn1Error('Format must include fraction of second: %r' % octets)
if len(octets) < 15:
raise error.PyAsn1Error('Bad UTC time length: %r' % octets)
if self.pluschar in octets or self.minuschar in octets:
raise error.PyAsn1Error('Must be UTC time: %r' % octets)
if octets[-1] != self.zchar[0]:
raise error.PyAsn1Error('Missing timezone specifier: %r' % octets)
return encoder.OctetStringEncoder.encodeValue(
self, encodeFun, client, defMode, 1000
)
class UTCTimeEncoder(encoder.OctetStringEncoder):
zchar = str2octs('Z')
pluschar = str2octs('+')
minuschar = str2octs('-')
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
octets = client.asOctets()
if self.pluschar in octets or self.minuschar in octets:
raise error.PyAsn1Error('Must be UTC time: %r' % octets)
if octets and octets[-1] != self.zchar[0]:
client = client.clone(octets + self.zchar)
if len(client) != 13:
raise error.PyAsn1Error('Bad UTC time length: %r' % client)
return encoder.OctetStringEncoder.encodeValue(
self, encodeFun, client, defMode, 1000
)
class SetOfEncoder(encoder.SequenceOfEncoder):
def encodeValue(self, encodeFun, client, defMode, maxChunkSize):
if isinstance(client, univ.SequenceAndSetBase):
client.setDefaultComponents()
client.verifySizeSpec()
substrate = null
idx = len(client)
# This is certainly a hack but how else do I distinguish SetOf
# from Set if they have the same tags&constraints?
if isinstance(client, univ.SequenceAndSetBase):
# Set
comps = []
while idx > 0:
idx -= 1
if client[idx] is None: # Optional component
continue
if client.getDefaultComponentByPosition(idx) == client[idx]:
continue
comps.append(client[idx])
comps.sort(key=lambda x: isinstance(x, univ.Choice) and x.getMinTagSet() or x.getTagSet())
for c in comps:
substrate += encodeFun(c, defMode, maxChunkSize)
else:
# SetOf
compSubs = []
while idx > 0:
idx -= 1
compSubs.append(
encodeFun(client[idx], defMode, maxChunkSize)
)
compSubs.sort() # perhaps padding's not needed
substrate = null
for compSub in compSubs:
substrate += compSub
return substrate, 1
tagMap = encoder.tagMap.copy()
tagMap.update({
univ.Boolean.tagSet: BooleanEncoder(),
univ.BitString.tagSet: BitStringEncoder(),
univ.OctetString.tagSet: OctetStringEncoder(),
univ.Real.tagSet: RealEncoder(),
useful.GeneralizedTime.tagSet: GeneralizedTimeEncoder(),
useful.UTCTime.tagSet: UTCTimeEncoder(),
univ.SetOf().tagSet: SetOfEncoder() # conflcts with Set
})
typeMap = encoder.typeMap.copy()
typeMap.update({
univ.Set.typeId: SetOfEncoder(),
univ.SetOf.typeId: SetOfEncoder()
})
class Encoder(encoder.Encoder):
def __call__(self, client, defMode=False, maxChunkSize=0):
return encoder.Encoder.__call__(self, client, defMode, maxChunkSize)
#: Turns ASN.1 object into CER octet stream.
#:
#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
#: walks all its components recursively and produces a CER octet stream.
#:
#: Parameters
#: ----------
# value: any pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative)
#: A pyasn1 object to encode
#:
#: defMode: :py:class:`bool`
#: If `False`, produces indefinite length encoding
#:
#: maxChunkSize: :py:class:`int`
#: Maximum chunk size in chunked encoding mode (0 denotes unlimited chunk size)
#:
#: Returns
#: -------
#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2)
#: Given ASN.1 object encoded into BER octetstream
#:
#: Raises
#: ------
#: : :py:class:`pyasn1.error.PyAsn1Error`
#: On encoding errors
encode = Encoder(tagMap, typeMap)
# EncoderFactory queries class instance and builds a map of tags -> encoders
| gpl-2.0 |
solazver/conteo | conteo/conteo/settings.py | 1 | 3000 | """
Django settings for conteo project.
Generated by 'django-admin startproject' using Django 1.8.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '(mtm))=g7p(!81w=g6ljtfuh8j&td3xt&g!a!r#+tpn%+lqggu'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'comun',
'presupuesto',
'contabilidad',
'compras',
'finanzas',
'almacen',
'bienes',
'nomina',
#desarrollo
'django_extensions'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'conteo.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'conteo.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
#'ENGINE': 'django.db.backends.sqlite3',
'ENGINE': 'django.db.backends.postgresql_psycopg2',
#'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
'NAME': 'conteo',
'USER': 'ssole',
'PASSWORD': 'ssole',
'HOST': 'localhost',
'PORT': '5432',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
| gpl-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.