prefix stringlengths 0 918k | middle stringlengths 0 812k | suffix stringlengths 0 962k |
|---|---|---|
import os
import shutil
import unittest
import pytest
from pyontutils import obo_io as oio
from .common import temp_path
obo_test_string = """format-version: 1.2
ontology: uberon/core
subsetdef: cumbo "CUMBO"
treat-xrefs-as-has-subclass: EV
import: http://purl.obolibrary.org/obo/uberon/chebi_import.owl
treat-xrefs-as-reverse-genus-differentia: TGMA part_of NCBITaxon:44484
[Term]
id: UBERON:0000003
xref: SCTID:272650008
relationship: in_lateral_side_of UBERON:0000033 {gci_relation="part_of", gci_filler="NCBITaxon:7776", notes="hagfish have median nostril"} ! head
!relationship: in_lateral_side_of UBERON:0000034 {gci_filler="NCBITaxon:7776", gci_relation="part_of", notes="hagfish have median nostril"} ! can't use this due to robot non-determinism
comment: robot does reorder the gci_ so that relation always comes before filler
property_value: external_definition "One of paired external openings of the nasal chamber.[AAO]" xsd:string {date_retrieved="2012-06-20", external_class="AAO:0000311", ontology="AAO", source="AAO:EJS"}
replaced_by: GO:0045202
consider: FMA:67408
[Term]
id: UBERON:0000033
name: head
comment: needed to prevent robot from throwing a null pointer on the relationship axiom above
[Term]
id: UBERON:0000034
[Typedef]
id: in_lateral_side_of
property_value: seeAlso FMA:86003
name: in_lateral_side_of
comment: id needed to prevent robot from throwing a null pointer on the relationship axiom above
comment: apparently also have to have name strangely enough and robot doesn't roundtrip random comments
is_transitive: true
"""
class TMHelper:
parse = oio.TVPair._parse_modifiers
serialize = oio.TVPair._format_trailing_modifiers
class TestOboIo(unittest.TestCase):
@classmethod
def setUpClass(cls):
if temp_path.exists():
shutil.rmtree(temp_path)
temp_path.mkdir()
@classmethod
def tearDownClass(cls):
shutil.rmtree(temp_path)
def test_parse_trailing_modifiers(self):
thm = TMHelper()
lines = (
(('relationship: part_of UBERON:0000949 '
'{source="AAO", source="FMA", source="XAO"} ! endocrine system'),
(('source', 'AAO'), ('source', 'FMA'), ('source', 'XAO'))),
('{oh="look", a="thing!"}', (('oh', 'look'), ('a', 'thing!'))),
('some randome values {oh="look", a="thing!"} ! yay!', (('oh', 'look'), ('a', 'thing!'))),
('some rando}me values {oh="l{ook", a="t{hing!"} ! yay!', (('oh', 'l{ook'), ('a', 't{hing!'))),
('some rando}me values {oh="l{ook", a="t}hing!"} ! yay!', (('oh', 'l{ook'), ('a', 't}hing!'))),
)
bads = [(expect, actual) for line, expect in lines
for _, actual in (thm.parse(line),)
if actual != expect]
assert not bads, '\n' + '\n\n'.join(f'{e}\n{a}' for e, a in bads)
def test_construct_simple_file(self):
of = oio.OboFile()
ids_names = [['123', 'test'],
['234', 'yee'],
['345', 'haw'],
['456', 'oio']]
terms = [oio.Term(id=i, name=n) for i, n in ids_names]
of.add(*terms)
str(of)
def test_header_treat_xrefs(self):
of = oio.OboFile()
test_tag = 'treat-xrefs-as-is_a'
tags_values = [
[test_tag, 'TEMP:test1'],
[test_tag, 'TEMP:test2'],
]
tvpairs = [oio.TVPair(tag=t, value=v) for t, v in tags_values]
of.header.add(*tvpairs)
tv = of.asObo()
assert len(tv.split(test_tag)) > 2, tv
def test_property_value_bug(self):
def _test(string):
pv = oio.Property_value.parse(string)
assert pv.value() == string
tv = oio.TVPair(string)
assert str(tv) == string
return pv, tv
minimal = ('property_value: any " ! " xsd:string')
pv, tv = _test(minimal)
darn = ('property_value: external_ontology_notes "see also MA:0002165 !'
' lieno-pancreatic vein" xsd:string {external_ontology="MA"}')
pv, tv = _test(darn)
ouch = ('property_value: editor_note "TODO -'
' this string breaks the parser A:0 ! wat" xsd:string')
pv, | tv = _test(ouch)
hrm = ('p | roperty_value: editor_note "TODO -'
' consider relationship to UBERON:0000091 ! bilaminar disc" xsd:string')
pv, tv = _test(hrm)
def test_robot(self):
of1 = oio.OboFile(data=obo_test_string)
obo1 = of1.asObo(stamp=False)
obor1 = of1.asObo(stamp=False, version=oio.OBO_VER_ROBOT)
of2 = oio.OboFile(data=obo1)
obo2 = of2.asObo(stamp=False)
# can't test against obor2 because obo1 reordered the trailing qualifiers
# and since there is seemingly no rational way to predict those, we simply
# preserve the ordering that we got
obor2 = of2.asObo(stamp=False, version=oio.OBO_VER_ROBOT)
of3 = oio.OboFile(data=obor1)
obo3 = of3.asObo(stamp=False)
obor3 = of3.asObo(stamp=False, version=oio.OBO_VER_ROBOT)
print(obo1)
print(obo2)
print(obor1)
print(obor2)
assert obo1 == obo2 == obo3 != obor1
assert obor1 == obor3
@pytest.mark.skipif(not shutil.which('robot'), reason='robot not installed')
def test_robot_rt(self):
of = oio.OboFile(data=obo_test_string)
obor1 = of.asObo(stamp=False, version=oio.OBO_VER_ROBOT)
rtp = temp_path / 'robot-test.obo'
robot_path = temp_path / 'robot-test.test.obo'
of.write(rtp, stamp=False, version=oio.OBO_VER_ROBOT)
cmd = f'robot convert -vvv -i {rtp.as_posix()} -o {robot_path.as_posix()}'
wat = os.system(cmd)
if wat:
raise ValueError(wat)
datas = []
for path in (rtp, robot_path):
with open(path, 'rt') as f:
datas.append(f.read())
ours, rob = datas
assert ours == rob
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Script for a testing an existing SDK.
This script is normally run immediately after build_sdk.py.
"""
import argparse
import os
import subprocess
import sys
import buildbot_common
import build_projects
import build_sdk
import build_version
import parse_dsc
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
SDK_SRC_DIR = os.path.dirname(SCRIPT_DIR)
SDK_LIBRARY_DIR = os.path.join(SDK_SRC_DIR, 'libraries')
SDK_DIR = os.path.dirname(SDK_SRC_DIR)
SRC_DIR = os.path.dirname(SDK_DIR)
OUT_DIR = os.path.join(SRC_DIR, 'out')
sys.path.append(os.path.join(SDK_SRC_DIR, 'tools'))
import getos
def StepBuildExamples(pepperdir):
for config in ('Debug', 'Release'):
build_sdk.BuildStepMakeAll(pepperdir, 'getting_started',
'Build Getting Started (%s)' % config,
deps=False, config=config)
build_sdk.BuildStepMakeAll(pepperdir, 'examples',
'Build Examples (%s)' % config,
deps=False, config=config)
def StepCopyTests(pepperdir, toolchains, build_experimental):
buildbot_common.BuildStep('Copy Tests')
# Update test libraries and test apps
filters = {
'DEST': ['tests']
}
if not build_experimental:
filters['EXPERIMENTAL'] = False
tree = parse_dsc.LoadProjectTree(SDK_SRC_DIR, include=filters)
build_projects.UpdateHelpers(pepperdir, clobber=False)
build_projects.UpdateProjects(pepperdir, tree, clobber=False,
toolchains=toolchains)
def StepBuildLibraries(pepperdir, sanitizer):
for config in ('Debug', 'Release'):
title = 'Build Libs (%s)[sanitizer=%s]' % (config, sanitizer)
build_sdk.BuildStepMakeAll(pepperdir, 'src', title, config=config,
args=GetSanitizerArgs(sanitizer))
def StepBuildTests(pepperdir, sanitizer):
for config in ('Debug', 'Release'):
title = 'Build Tests (%s)' % config
if sanitizer:
title += '[sanitizer=%s]' % sanitizer
build_sdk.BuildStepMakeAll(pepperdir, 'tests', title, deps=False,
config=config, args=GetSanitizerArgs(sanitizer))
def GetSanitizerArgs(sanitizer):
if sanitizer == 'valgrind':
return ['TOOLCHAIN=linux', 'RUN_UNDER=valgrind']
elif sanitizer == 'address':
return ['TOOLCHAIN=linux', 'ASAN=1']
elif sanitizer == 'thread':
return ['TOOLCHAIN=linux', 'TSAN=1']
return []
def StepRunSelLdrTests(pepperdir, sanitizer):
filters = {
'SEL_LDR': True
}
tree = parse_dsc.LoadProjectTree(SDK_SRC_DIR, include=filters)
def RunTest(test, toolchain, config, arch=None):
args = ['STANDALONE=1', 'TOOLCHAIN=%s' % toolchain]
args += GetSanitizerArgs(sanitizer)
if arch is not None:
args.append('NACL_ARCH=%s' % arch)
build_projects.BuildProjectsBranch(pepperdir, test, clean=False,
deps=False, config=config,
args=args + ['run'])
if getos.GetPlatform() == 'win':
# On win32 we only support running on the system
# arch
archs = (getos.GetSystemArch('win'),)
elif getos.GetPlatform() == 'mac':
# We only ship 32-bit version of sel_ldr on mac.
archs = ('x86_32',)
else:
# On linux we can run both 32 and 64-bit, and arm (via qemu)
archs = ('x86_64', 'x86_32', 'arm')
for root, projects in tree.iteritems():
for project in projects:
if sanitizer:
sanitizer_name = '[sanitizer=%s]' % sanitizer
else:
sanitizer_name = ''
title = 'standalone test%s: %s' % (sanitizer_name,
os.path.basename(project['NAME']))
location = os.path.join(root, project['NAME'])
buildbot_common.BuildStep(title)
configs = ('Debug', 'Release')
# On linux we can run the standalone tests natively using the host
# compiler.
if getos.GetPlatform() == 'linux':
if sanitizer:
configs = ('Debug',)
for config in configs:
RunTest(location, 'linux', config)
if sanitizer:
continue
for toolchain in ('clang-newlib', 'glibc', 'pnacl'):
for arch in archs:
for config in configs:
RunTest(location, toolchain, config, arch)
def StepRunBrowserTests(toolchains, experimental):
buildbot_common.BuildStep('Run Tests')
args = [
sys.executable,
os.path.join(SCRIPT_DIR, 'test_projects.py'),
'--retry-times=3',
]
if experimental:
args.append('-x')
for toolchain in toolchains:
args.extend(['-t', toolchain])
try:
subprocess.check_call(args)
except subprocess.CalledProcessError:
buildbot_common.ErrorExit('Error running tests.')
def main(args):
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--experimental', help='build experimental tests',
action='store_true')
parser.add_argument('--sanitizer',
help='Run sanitizer (asan/tsan/valgrind) tests',
action='store_true')
parser.add_argument('--verbose', '-v', help='Verbose output',
action='store_true')
parser.add_argument('phases', nargs="*")
if 'NACL_SDK_ROOT' in os.environ:
# We don't want the currently configured NACL_SDK_ROOT to have any effect
# of the build.
del os.environ['NACL_SDK_ROOT']
# To setup bash completion for this command first install optcomplete
# and then add this line to your .bashrc:
# complete -F _optcomplete test_sdk.py
try:
import optcomplete
optcomplete.autocomplete(parser)
except ImportError:
pass
options = parser.parse_args(args)
pepper_ver = str(int(build_version.ChromeMajorVersion()))
pepperdir = os.path.join(OUT_DIR, 'pepper_' + pepper_ver)
toolchains = ['clang-newlib', 'glibc', 'pnacl']
toolchains.append(getos.GetPlatform())
if options.verbose:
build_projects.verbose = True
phases = [
('build_examples', StepBuildExamples, pepperdir),
('copy_tests', StepCopyTests, pepperdir, toolchains, options.experimental),
('build_tests', StepBuildTests, pepperdir, None),
]
if options.sanitizer:
if getos.GetPlatform() != 'linux':
buildbot_common.ErrorExit('sanitizer tests only run on linux.')
clang_dir = os.path.join(SRC_DIR, 'third_party', 'llvm-build',
'Release+Asserts', 'bin')
os.environ['PATH'] = clang_dir + os.pathsep + os.environ['PATH']
phases += [
('build_libs_asan', StepBuildLibraries, pepperdir, 'address'),
('build_libs_tsan', StepBuildLibraries, pepperdir, 'thread'),
('build_tests_asan', StepBuildTests, pepperdir, 'address'),
('build_tests_tsan', StepBuildTests, pepperdir, 'thread'),
('sel_ldr_tests_asan', StepRunSelLdrTests, pepperdir, 'address'),
('sel_ldr_tests_tsan', StepRunSelLdrTests, pepperdir, 'thread'),
# TODO(sbc): get valgrind installed on the bots to enable this
# configuration
#('sel_ldr_tests_valgrind', StepRunSelLdrTests, pepperdir, 'valgrind')
]
else:
phases += [
('sel_ldr_tests', StepRunSelLdrTests, pepperdir, None),
('browser_tests', StepRunBrowserTests, toolchains, options.experimental),
| ]
if options.phases:
phase_names = [p[0] for p in phases]
for arg in options.phases:
if arg not in phase_names:
msg = 'Invalid argument: %s\n' % arg
msg += 'Possible | arguments:\n'
for name in phase_names:
msg += ' %s\n' % name
parser.error(msg.strip())
for phase in phases:
phase_name = phase[0]
if options.phases and phase_name not in options.phases:
continue
phase_func = phase[1]
phase_args = phase[2:]
phase_func(*phase_args)
return 0
if __name__ == '__main__':
try:
sys.exit(main(sys.argv[1:]))
except KeyboardInterrupt:
buildbot_common.ErrorExit('test_sdk: interrupted')
|
class | dstat_plugin(dstat):
def __init__(self):
self.name = 'snooze'
self.vars = ('snooze',)
self.type = 's'
self.width = 6
self.scale = 0
self.before = time.time()
def extract(self):
now = time.time()
if loop != 0:
self.val['snooze'] = now - self.before
else:
| self.val['snooze'] = self.before
if step == op.delay:
self.before = now
def show(self):
if self.val['snooze'] > step + 1:
return ansi['default'] + ' -'
color = 'white'
if step != op.delay:
color = 'gray'
snoze, c = fchg(self.val['snooze'], 6, 1000)
return ansi[color] + snoze
|
ltithread: if set to True, lock send method.
skip_utf8_validation: skip utf8 validation.
"""
def __init__(self, get_mask_key=None, sockopt=None, sslopt=None,
fire_cont_frame=False, enable_multithread=False,
skip_utf8_validation=False):
"""
Initalize WebSocket object.
"""
self.sock_opt = sock_opt(sockopt, sslopt)
self.handshake_response = None
self.sock = None
self.connected = False
self.get_mask_key = get_mask_key
# These buffer over the build-up of a single frame.
self.frame_buffer = frame_buffer(self._recv, skip_utf8_validation)
self.cont_frame = continuous_frame(fire_cont_frame, skip_utf8_validation)
if enable_multithread:
self.lock = threading.Lock()
else:
self.lock = NoLock()
def __iter__(self):
"""
Allow iteration over websocket, implying sequential `recv` executions.
"""
while True:
yield self.recv()
def __next__(self):
return self.recv()
def next(self):
return self.__next__()
def fileno(self):
return self.sock.fileno()
def set_mask_key(self, func):
"""
set function to create musk key. You can custumize mask key generator.
Mainly, this is for testing purpose.
func: callable object. the fuct mu | st 1 argument as integer.
The argument means length of mask key.
This func must be return string(byte array),
which length is argument specified.
| """
self.get_mask_key = func
def gettimeout(self):
"""
Get the websocket timeout(second).
"""
return self.sock_opt.timeout
def settimeout(self, timeout):
"""
Set the timeout to the websocket.
timeout: timeout time(second).
"""
self.sock_opt.timeout = timeout
if self.sock:
self.sock.settimeout(timeout)
timeout = property(gettimeout, settimeout)
def getsubprotocol(self):
"""
get subprotocol
"""
if self.handshake_response:
return self.handshake_response.subprotocol
else:
return None
subprotocol = property(getsubprotocol)
def getstatus(self):
"""
get handshake status
"""
if self.handshake_response:
return self.handshake_response.status
else:
return None
status = property(getstatus)
def getheaders(self):
"""
get handshake response header
"""
if self.handshake_response:
return self.handshake_response.headers
else:
return None
headers = property(getheaders)
def connect(self, url, **options):
"""
Connect to url. url is websocket url scheme.
ie. ws://host:port/resource
You can customize using 'options'.
If you set "header" list object, you can set your own custom header.
>>> ws = WebSocket()
>>> ws.connect("ws://echo.websocket.org/",
... header=["User-Agent: MyProgram",
... "x-custom: header"])
timeout: socket timeout time. This value is integer.
if you set None for this value,
it means "use default_timeout value"
options: "header" -> custom http header list or dict.
"cookie" -> cookie value.
"origin" -> custom origin url.
"host" -> custom host header string.
"http_proxy_host" - http proxy host name.
"http_proxy_port" - http proxy port. If not set, set to 80.
"http_no_proxy" - host names, which doesn't use proxy.
"http_proxy_auth" - http proxy auth infomation.
tuple of username and password.
defualt is None
"subprotocols" - array of available sub protocols.
default is None.
"""
self.sock, addrs = connect(url, self.sock_opt, proxy_info(**options))
try:
self.handshake_response = handshake(self.sock, *addrs, **options)
self.connected = True
except:
if self.sock:
self.sock.close()
self.sock = None
raise
def send(self, payload, opcode=ABNF.OPCODE_TEXT):
"""
Send the data as string.
payload: Payload must be utf-8 string or unicode,
if the opcode is OPCODE_TEXT.
Otherwise, it must be string(byte array)
opcode: operation code to send. Please see OPCODE_XXX.
"""
frame = ABNF.create_frame(payload, opcode)
return self.send_frame(frame)
def send_frame(self, frame):
"""
Send the data frame.
frame: frame data created by ABNF.create_frame
>>> ws = create_connection("ws://echo.websocket.org/")
>>> frame = ABNF.create_frame("Hello", ABNF.OPCODE_TEXT)
>>> ws.send_frame(frame)
>>> cont_frame = ABNF.create_frame("My name is ", ABNF.OPCODE_CONT, 0)
>>> ws.send_frame(frame)
>>> cont_frame = ABNF.create_frame("Foo Bar", ABNF.OPCODE_CONT, 1)
>>> ws.send_frame(frame)
"""
if self.get_mask_key:
frame.get_mask_key = self.get_mask_key
data = frame.format()
length = len(data)
trace("send: " + repr(data))
with self.lock:
while data:
l = self._send(data)
data = data[l:]
return length
def send_binary(self, payload):
return self.send(payload, ABNF.OPCODE_BINARY)
def ping(self, payload=""):
"""
send ping data.
payload: data payload to send server.
"""
if isinstance(payload, six.text_type):
payload = payload.encode("utf-8")
self.send(payload, ABNF.OPCODE_PING)
def pong(self, payload):
"""
send pong data.
payload: data payload to send server.
"""
if isinstance(payload, six.text_type):
payload = payload.encode("utf-8")
self.send(payload, ABNF.OPCODE_PONG)
def recv(self):
"""
Receive string data(byte array) from the server.
return value: string(byte array) value.
"""
opcode, data = self.recv_data()
if six.PY3 and opcode == ABNF.OPCODE_TEXT:
return data.decode("utf-8")
elif opcode == ABNF.OPCODE_TEXT or opcode == ABNF.OPCODE_BINARY:
return data
else:
return ''
def recv_data(self, control_frame=False):
"""
Recieve data with operation code.
control_frame: a boolean flag indicating whether to return control frame
data, defaults to False
return value: tuple of operation code and string(byte array) value.
"""
opcode, frame = self.recv_data_frame(control_frame)
return opcode, frame.data
def recv_data_frame(self, control_frame=False):
"""
Recieve data with operation code.
control_frame: a boolean flag indicating whether to return control frame
data, defaults to False
return value: tuple of operation code and string(byte array) value.
"""
while True:
frame = self.recv_frame()
if not frame:
# handle error:
# 'NoneType' object has no attribute 'opcode'
raise WebSocketProtocolException("Not a valid frame %s" % frame)
elif frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY, ABNF.OPCODE_CONT):
self.cont_frame.validate(frame)
self.cont_frame.add(frame)
if self.cont_frame.is_fire(frame):
return self.cont_frame.extract(frame)
elif frame.opcode == ABNF.OPCODE_CLOSE:
self.send_close()
re |
import cherrypy
from cherrypy.test import helper
script_names = ["", "/path/to/myapp"]
class ProxyTest(helper.CPWebCase):
def setup_server():
# Set up site
cherrypy.config.update({
'tools.proxy.on': True,
'tools.proxy.base': 'www.mydomain.test',
})
# Set up application
class Root:
def __init__(self, sn):
# Calculate a URL outside of any requests.
self.thisnewpage = cherrypy.url(
"/this/new/page", script_name=sn)
def pageurl(self):
return self.thisnewpage
pageurl.exposed = True
def index(self):
raise cherrypy.HTTPRedirect('dummy')
index.exposed = True
def remoteip(self):
return cherrypy.request.remote.ip
remoteip.exposed = True
def xhost(self):
raise cherrypy.HTTPRedirect('blah')
xhost.exposed = True
xhost._cp_config = {'tools.proxy.local': 'X-Host',
'tools.trailing_slash.extra': True,
}
def base(self):
return cherrypy.request.base
base.exposed = True
def ssl(self):
return cherrypy.request.base
ssl.exposed = True
ssl._cp_config = {'tools.proxy.scheme': 'X-Forwarded-Ssl'}
def newurl(self):
return ("Browse to <a href='%s'>this page</a>."
% cherrypy.url("/this/new/page"))
newurl.exposed = True
for sn in script_names:
cherrypy.tree.mount(Root(sn), sn)
setup_server = staticmethod(setup_server)
def testProxy(self):
self.getPage("/")
self.assertHeader('Location',
"%s://www.mydomain.test%s/dummy" %
(self.scheme, self.prefix()))
# Test X-Forwarded-Host (Apache 1.3.33+ and Apache 2)
self.getPage(
"/", headers=[('X-Forwarded-Host', 'http://www.example.test')])
self.assertHeader('Location', "http://www.example.test/dummy")
self.getPage("/", headers=[('X-Forwarded-Host', 'www.example.test')])
self.assertHeader('Location', "%s://www.example.test/dummy" %
self.scheme)
# Test multiple X-Forwarded-Host headers
self.getPage("/", headers=[
('X-Forwarded-Host', 'http://www.example.test, www.cherrypy.test'),
])
self.assertHeader('Location', "http://www.example.test/dummy")
# Test X-Forwarded-For (Apache2)
self.getPage("/remoteip",
headers=[('X-Forwarded-For', '192.168.0.20')])
self.assertBody("192.168.0.20")
#Fix bug #1268
self.getPage("/remoteip",
headers=[
('X-Forwarded-For', '67.15.36.43, 192.168.0.20')
])
self.assertBody("67.15.36.43")
# Test X-Host (lighttpd; see https://trac.lighttpd.net/trac/ticket/418)
self.getPage("/xhost", headers=[('X-Host', 'www.example.test')])
self.assertHeader('Location', "%s://www.example.test/blah" %
self.scheme)
# Test X-Forwarded-Proto (lighttpd)
self.getPage("/base", headers=[('X-Forwarded-Proto', 'https')])
self.assertBody("https://www.mydomain.test")
# Test X-Forwarded-Ssl (webfaction?)
self.getPage("/ssl", headers=[('X-Forwarded-Ssl', 'on')])
self.assertBody("https://www.mydomain.test")
# Test cherrypy.url()
for sn in script_names:
# Test the value inside requests
self.getPage(sn + "/newurl")
self.assertBody(
"Browse to <a href='%s://www.mydomain.test" % self.scheme
+ sn + "/this/new/page'>this page</a>.")
self.getPage(sn + "/newurl", headers=[('X-Forwarded-Host',
'http://www.example.test')])
self.assertBody("Browse to <a href='http://www.example.test"
+ sn + "/this/new/page'>this page</a>.")
# Test the value outside requests
port = ""
if self.scheme == "http" and self.PORT != 80:
port = ":%s" % self.PORT
elif self.scheme == "https" and self.PORT != 443:
port = ":%s" % self.PORT
host = self.HOST
if host in ('0.0.0.0', '::'):
import socket
host = socket.gethostname()
expected = ("%s://%s%s%s/this/new/page"
% (self.scheme, host, port, sn))
self.getPage(sn + "/pageurl")
self.assertBody(expected)
# Test trailing slash (see
# https://github.com/cherrypy/cher | rypy/issues/562).
self.getPage("/xhost/", headers=[('X-Host', 'www.example.test')])
self.assertHeader('Location', "%s://www.example.test/x | host"
% self.scheme)
|
import os
import sys
import django
def main():
"""
Standalone django model test with a 'memory-only-django-installation'.
You can play with a django model without a complete django app installation.
http://www.djangosnippets.org/snippets/1044/
"""
sys.path.append(os.path.abspath(os.path.dirname(__file__)))
os.environ["DJANGO_SETTINGS_MODULE"] = "django.conf.global_settings"
from django.conf import global_settings
global_settings.INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.sites',
'django.contr | ib.contenttypes',
'websettings',
)
global_settings.DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
global_settings.MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
| 'django.contrib.auth.middleware.AuthenticationMiddleware',
)
global_settings.SECRET_KEY = "secret_key_for_testing"
global_settings.ROOT_URLCONF = "websettings.urls"
global_settings.WEBSETTINGS_MODULE = 'websettings.tests.settingstore'
from django.test.utils import get_runner
test_runner = get_runner(global_settings)
test_runner = test_runner()
failures = test_runner.run_tests(['websettings'])
sys.exit(failures)
if __name__ == '__main__':
main()
|
#! /usr/bin/env python
# --*-- coding:utf-8 --*--
import os
import sys
sys.path.append(os.path.split(os.path.split(os.path.abspath(sys.path[0]))[0])[0])
from CountMan.monitor.util import *
from CountMan.monitor.setting import *
class Queryer(object):
def __init__(self):
self.dao = DatabaseInterface()
self.dataSet = dict()
self.logger = getLogger('root')
de | f getData(self):
for queryKey in QUERYPARAM:
self.dataSet[queryKey] = getResponse(QUERYPARAM.get(queryKey))
@property
def set2db(self):
self.getData()
self.logger.info('get query data: {0} success'.format(self.dataSet))
self.dao.insertC | ollection(self.dataSet)
if __name__ == '__main__':
q = Queryer()
if ISDEBUG:
import cProfile
cProfile.run("q.set2db")
else:
q.set2db
|
# Copyright (c) 2015, Lars Tingelstad
# All rights reserved.
#
# All righ | ts reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright n | otice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of pyversor nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Operations on directions in 3D conformal geometric algebra."""
from __pyversor__.c3d.directions import (
DirectionVector, DirectionBivector, DirectionTrivector)
|
from django.conf.urls import include, url
fro | m . import views
urlpatterns = [
url(r'^$', views.post_list), #URL para lista de todos los | post
url(r'^post/(?P<pk>[0-9]+)/$', views.post_detail), #URL para ver los detalles del post
url(r'^post/new/$', views.post_new, name='post_new'), #URL para crear un nuevo post sin el panel de admin
url(r'^post/(?P<pk>[0-9]+)/edit/$', views.post_edit, name='post_edit'), #URL para editar los post sin el panel de admin
] |
# SPDX-License-Identifier: Apache-2.0
# Copyright Contributors to the Rez Project
'''
Bundle a context and its packages into a relocatable dir.
'''
from __future__ import print_function
import os
import os.path
import sys
def setup_parser(parser, completions=False):
group = parser.add_mutually_exclusive_group()
group.add_argument(
"-s", "--skip-non-relocatable", action="store_true",
help="leave non-relocatable packages non-bun | dled, rather than raise an error")
group.add_argument(
"-f", "--force", action="store_true",
help="bundle package even if it isn't relocatable (use at your own risk)")
group.add_argument(
"-n", "--no-lib-patch", action="store_true",
help="don't apply library patching within the bundle")
parser.add_argument(
"RXT",
help="context to bundle")
parser.add_argument(
"DEST_DIR",
help="directory to create bundle in; must not exist | ")
def command(opts, parser, extra_arg_groups=None):
from rez.utils.logging_ import print_error
from rez.bundle_context import bundle_context
from rez.resolved_context import ResolvedContext
rxt_filepath = os.path.abspath(os.path.expanduser(opts.RXT))
dest_dir = os.path.abspath(os.path.expanduser(opts.DEST_DIR))
# sanity checks
if not os.path.exists(rxt_filepath):
print_error("File does not exist: %s", rxt_filepath)
sys.exit(1)
context = ResolvedContext.load(rxt_filepath)
bundle_context(
context=context,
dest_dir=dest_dir,
force=opts.force,
skip_non_relocatable=opts.skip_non_relocatable,
verbose=opts.verbose,
patch_libs=(not opts.no_lib_patch)
)
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at you | r option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICUL | AR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from report import report_sxw
from common_report_header import common_report_header
#
# Use period and Journal for selection or resources
#
class journal_print(report_sxw.rml_parse, common_report_header):
def __init__(self, cr, uid, name, context=None):
if context is None:
context = {}
super(journal_print, self).__init__(cr, uid, name, context=context)
self.period_ids = []
self.journal_ids = []
self.localcontext.update({
'time': time,
'lines': self.lines,
'sum_debit': self._sum_debit,
'sum_credit': self._sum_credit,
'get_filter': self._get_filter,
'get_fiscalyear': self._get_fiscalyear,
'get_account': self._get_account,
'get_start_period': self.get_start_period,
'get_end_period': self.get_end_period,
'get_sortby': self._get_sortby,
'get_start_date':self._get_start_date,
'get_end_date':self._get_end_date,
'display_currency':self._display_currency,
'get_target_move': self._get_target_move,
})
def set_context(self, objects, data, ids, report_type=None):
obj_move = self.pool.get('account.move.line')
new_ids = ids
self.query_get_clause = ''
self.target_move = data['form'].get('target_move', 'all')
if (data['model'] == 'ir.ui.menu'):
new_ids = 'active_ids' in data['form'] and data['form']['active_ids'] or []
self.query_get_clause = 'AND '
self.query_get_clause += obj_move._query_get(self.cr, self.uid, obj='l', context=data['form'].get('used_context', {}))
objects = self.pool.get('account.journal.period').browse(self.cr, self.uid, new_ids)
if new_ids:
self.cr.execute('SELECT period_id, journal_id FROM account_journal_period WHERE id IN %s', (tuple(new_ids),))
res = self.cr.fetchall()
self.period_ids, self.journal_ids = zip(*res)
return super(journal_print, self).set_context(objects, data, ids, report_type=report_type)
def lines(self, period_id, journal_id):
move_state = ['draft','posted']
if self.target_move == 'posted':
move_state = ['posted']
self.cr.execute('SELECT a.currency_id, a.code, a.name, c.symbol AS currency_code, l.currency_id, l.amount_currency, SUM(debit) AS debit, SUM(credit) AS credit \
from account_move_line l \
LEFT JOIN account_move am ON (l.move_id=am.id) \
LEFT JOIN account_account a ON (l.account_id=a.id) \
LEFT JOIN res_currency c on (l.currency_id=c.id) WHERE am.state IN %s AND l.period_id=%s AND l.journal_id=%s '+self.query_get_clause+' GROUP BY a.id, a.code, a.name,l.amount_currency,c.symbol, a.currency_id,l.currency_id', (tuple(move_state), period_id, journal_id))
return self.cr.dictfetchall()
def _set_get_account_currency_code(self, account_id):
self.cr.execute("SELECT c.symbol as code "\
"FROM res_currency c,account_account as ac "\
"WHERE ac.id = %s AND ac.currency_id = c.id"%(account_id))
result = self.cr.fetchone()
if result:
self.account_currency = result[0]
else:
self.account_currency = False
def _get_account(self, data):
if data['model'] == 'account.journal.period':
return self.pool.get('account.journal.period').browse(self.cr, self.uid, data['id']).company_id.name
return super(journal_print,self)._get_account(data)
def _get_fiscalyear(self, data):
if data['model'] == 'account.journal.period':
return self.pool.get('account.journal.period').browse(self.cr, self.uid, data['id']).fiscalyear_id.name
return super(journal_print,self)._get_fiscalyear(data)
def _display_currency(self, data):
if data['model'] == 'account.journal.period':
return True
return data['form']['amount_currency']
report_sxw.report_sxw('report.account.central.journal', 'account.journal.period', 'addons/account/report/account_central_journal.rml', parser=journal_print, header='internal')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
from io import StringIO
import re
import httpretty
from django.core.management import call_command
from oppia.test import OppiaTestCase
from settings import constants
from settings.models import SettingProperties
from tests.utils import get_file_contents
class CartoDBUpdateTest(OppiaTestCase):
fixtures = ['tests/test_user.json',
'tests/test_oppia.json',
'tests/test_quiz.json',
'tests/test_permissions.json',
'default_badges.json',
'tests/test_course_permissions.json',
'tests/test_viz.json']
cartodb_valid_response = './oppia/fixtures/tests/cartodb/200_valid.json'
cartodb_uri_regex = re.compile(
"https://[A-Za-z0-9-]+.cartodb.com/api/v2/sql??(?:&?[^=&]*=[^=&]*)*")
@httpretty.activate
def test_cartodb_output(self):
cartodb_response = get_file_contents | (self.cartodb_valid_response)
httpretty.register_uri(httpretty.GET,
self.cartodb_uri_regex,
body=cartodb_response)
SettingProperties.set_string(constants.OPPIA_CARTODB_ACCOUNT,
| "account")
SettingProperties.set_string(constants.OPPIA_CARTODB_KEY,
"FAKE_APIKEY")
SettingProperties.set_string(constants.OPPIA_HOSTNAME, "localhost")
out = StringIO()
call_command('cartodb_update', stdout=out)
@httpretty.activate
def test_cartodb_no_key_account(self):
cartodb_response = get_file_contents(self.cartodb_valid_response)
httpretty.register_uri(httpretty.GET,
self.cartodb_uri_regex,
body=cartodb_response)
SettingProperties.set_string(constants.OPPIA_CARTODB_ACCOUNT, None)
SettingProperties.set_string(constants.OPPIA_CARTODB_KEY, None)
SettingProperties.set_string(constants.OPPIA_HOSTNAME, None)
out = StringIO()
call_command('cartodb_update', stdout=out)
|
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2011 Nick Hall
# Copyright (C) 2011 Tim G L Lyons
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id$
#
from gramps.gui.listmodel import ListModel, NOSORT
from gramps.gen.utils.db import navigation_label
from gramps.gen.plug import Gramplet
from gramps.gen.ggettext import gettext as _
from gi.repository import Gtk
class Backlinks(Gramplet):
"""
Displays the back references for an object.
"""
def init(self):
self.gui.WIDGET = self.build_gui()
self.gui.get_container_widget().remove(self.gui.textview)
self.gui.get_container_widget().add_with_viewport(self.gui.WIDGET)
self.gui.WIDGET.show()
def build_gui(self):
"""
Build the GUI interface.
"""
top = Gtk.TreeView()
titles = [(_('Type'), 1, 100),
(_('Name'), 2, 100)]
self.model = ListModel(top, titles)
return top
def display_backlinks(self, active_handle):
"""
Display the back references for an object.
"""
for classname, handle in \
self.dbstate.db.find_backlink_handles(active_handle):
name = navigation_label(self.dbstate.db, classname, handle)[0]
self.model.add((_(classname), name))
self.set_has_data(self.model.count > 0)
def get_has_data(self, active_handle):
"""
Return True if the gramplet has data, else return False.
"""
if active_handle is None:
return False
for handle in self.dbstate.db.find_backlink_handles(active_handle):
return True
return False
class PersonBacklinks(Backlinks):
"""
Displays the back references for a person.
"""
def db_changed(self):
self.dbstate.db.connect('person-update', self.update)
def active_changed(self, handle):
self.update()
def update_has_data(self):
active_handle = self.get_active('Person')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Person')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
class EventBacklinks(Backlinks):
"""
Displays the back references for an event.
"""
def db_changed(self):
self.dbstate.db.connect('event-update', self.update)
self.connect_signal('Event', self.update)
def update_has_data(self):
active_handle = self.get_active('Event')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Event')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
class FamilyBacklinks(Backlinks):
"""
Displays the back references for a family.
"""
def db_changed(self):
self.dbstate.db.connect('family-update', self.update)
self.connect_signal('Family', self.update)
def update_has_data(self):
active_handle = self.get_active('Family')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Family')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
class PlaceBacklinks(Backlinks):
"""
Displays the back references for a place.
"""
def db_changed(self):
self.dbstate.db.connect('place-update', self.update)
self.connect_signal('Place', self.update)
def update_has_data(self):
active_handle = self.get_active('Place')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Place')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
class SourceBacklinks(Backlinks):
"""
Displays the back references for a source,.
"""
def db_changed(self):
self.dbstate.db.connect('source-update', self.update)
self.connect_signal('Source', self.update)
def update_has_data(self):
active_handle = self.get_active('Source')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Source')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
class CitationBacklinks(Backlinks):
| """
Displays the back references for a Citation,.
"""
def db_changed( | self):
self.dbstate.db.connect('citation-update', self.update)
self.connect_signal('Citation', self.update)
def update_has_data(self):
active_handle = self.get_active('Citation')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Citation')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
class RepositoryBacklinks(Backlinks):
"""
Displays the back references for a repository.
"""
def db_changed(self):
self.dbstate.db.connect('repository-update', self.update)
self.connect_signal('Repository', self.update)
def update_has_data(self):
active_handle = self.get_active('Repository')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Repository')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
class MediaBacklinks(Backlinks):
"""
Displays the back references for a media object.
"""
def db_changed(self):
self.dbstate.db.connect('media-update', self.update)
self.connect_signal('Media', self.update)
def update_has_data(self):
active_handle = self.get_active('Media')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Media')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
class NoteBacklinks(Backlinks):
"""
Displays the back references for a note.
"""
def db_changed(self):
self.dbstate.db.connect('note-update', self.update)
self.connect_signal('Note', self.update)
def update_has_data(self):
active_handle = self.get_active('Note')
self.set_has_data(self.get_has_data(active_handle))
def main(self):
active_handle = self.get_active('Note')
self.model.clear()
if active_handle:
self.display_backlinks(active_handle)
else:
self.set_has_data(False)
|
"""
Mapping functions
"""
# pylint: disable=invalid-name, missing-docstring, no-member
from __future__ import print_function
import pandas
from mpl_toolkits.basemap import Basemap
def create_map(lon, lat):
"""
Create a map projection.
"""
lon_center = lon[0]
lat_center = lat[0]
return Basemap(
lon_0=lon_center,
lat_0=lat_center, projection='tmerc | ',
width=1e-5, height=1e-5)
def project_lat_lon(df):
gps_map = Basemap(lat_0=df.GPS_Lat.values[0],
lon_0=df.GPS_Lon.values[0],
width=11e-5, height=1e-5, projection='tmer | c')
gps_y, gps_x = gps_map(df.GPS_Lon.values, df.GPS_Lat.values)
gps_z = df.GPS_Alt - df.GPS_Alt.values[0]
df_new = pandas.DataFrame(pandas.DataFrame({
'GPS_X': gps_x, 'GPS_Y': gps_y, 'GPS_Z': gps_z}, index=df.index))
return pandas.concat([df, df_new], axis=1)
# vim: set et fenc= ff=unix sts=0 sw=4 ts=4 :
|
# My files
from handlers import MainPage
from handlers import WelcomePage
from handlers import SignUpPage
from handlers import SignIn
from handlers import SignOut
from handlers import NewPost
from handlers import EditPost
from handlers import DeletePost
from handlers import SinglePost
from handlers import LikePost
from handlers import DislikePost
from handlers import EditComment
from handlers import DeleteComment
import webapp2
app = webapp2.WSGIApplication([
('/', MainPage),
('/signup', S | ignUpPage),
('/welcome', WelcomePage),
('/post/([0-9]+)', SinglePost),
('/new-post', NewPost),
('/edit-post/([0-9]+)', EditPost),
| ('/delete-post', DeletePost),
('/like-post', LikePost),
('/dislike-post', DislikePost),
('/edit-comment', EditComment),
('/delete-comment', DeleteComment),
('/login', SignIn),
('/logout', SignOut)
], debug=True)
|
# -*- coding: utf-8 -*-
from __future__ import unic | ode_literals, absolute_import, division
from alchemist import management
from flask import Flask
class TestManager:
def setup(self):
self.app = Flask('alchemist')
self.app.config['COMPONENTS'] = ['alchemist']
def test_discover_commands(self):
"""Should discover commands from registered components.
"""
manager = management.Manager(self.app)
assert 'run' in ma | nager._commands
|
#!/bin/env python
# -*- coding: utf-8; -*-
#
# (c) 2016 FABtotum, http://www.fabtotum.com
#
# This file is | part of FABUI.
#
# FABUI is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# FABUI is distributed in the hope that it wi | ll be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with FABUI. If not, see <http://www.gnu.org/licenses/>.
# Import standard python module
# Import external modules
# Import internal modules
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from rcr.robots.dagucar.DaguCar import DaguCar
def main():
car = DaguCar( "/dev/rfcomm1", 500 )
car.MoveForward( 15 )
| car.Pause( 1000 )
car.MoveBackward( 15 )
car.Pause( 1000 )
car.MoveLeft( 15 )
car.Pause( 1000 )
car.MoveRight( 15 ) |
car.Pause( 1000 )
car.MoveForwardLeft( 15 )
car.Pause( 1000 )
car.MoveForwardRight( 15 )
car.Pause( 1000 )
car.MoveBackwardLeft( 15 )
car.Pause( 1000 )
car.MoveBackwardRight( 15 )
car.Pause( 1000 )
car.Stop()
car.Close()
###
main()
|
imp | ort sys
from mailpile.app import Mai | n
def main():
Main(sys.argv[1:])
if __name__ == "__main__":
main()
|
import os
from tito.builder import UpstreamBuilder
from tito.common import debug, run_command
class DistributionBuilder(UpstreamBuilder):
""" This class is used for building packages for distributions.
Parent class UpstreamBuilder build one big patch from upstream and create e.g.:
Patch0: foo-1.2.13-1-to-foo-1.2.13-3-sat.patch
This class create one patch per each release. E.g.:
Patch0: foo-1.2.13-1-to-foo-1.2.13-2-sat.patch
Patch1: foo-1.2.13-2-to-foo-1.2.13-3-sat.patch
"""
def __init__(self, name=None, version=None, tag=None, build_dir=None,
pkg_config=None, global_config=None, user_config=None, options=None):
UpstreamBuilder.__init__(self, name, version, tag, build_dir, pkg_config,
global_config, user_config, options)
self.patch_files = []
def patch_upstream(self):
""" Create one patch per each release """
os.chdir(os.path.join(self.git_root, self.relative_project_dir))
debug("Running /usr/bin/generate-patches.pl -d %s %s %s-1 %s %s" \
% (self.rpmbuild_gitcopy, self.project_name, self.upstream_version, self.build_version, self.git_commit_id))
output = run_command("/usr/bin/generate-patches.pl -d %s %s %s-1 %s %s" \
% (self.rpmbuild_gitcopy, self.project_name, self.upstream_version, self.build_version, self.git_commit_id))
self | .patch_files = output.split("\n")
for p_file in self.patch_files:
run_command("cp %s/%s %s" % (self.rpmbuild_gitcopy, p_file, self.rpmbuild_sourcedir))
(patch_number, patch_insert_index, patch_apply_index, lines) = self._patch_upstream()
for patch in self.patch_files:
lines.insert(patch_insert_index, "Patch%s: %s\n" % (patch_number, patch))
lines.insert(patch_apply_index, "%%patch%s -p1\n" % (patch_number) | )
patch_number += 1
patch_insert_index += 1
patch_apply_index += 2
self._write_spec(lines)
|
# -*- coding: UTF-8 -*-
from django.conf import settings as dsettings
from django.contrib.syndication.views import Feed, FeedDoesNotExist
from django.core.urlresolvers import reverse
from microblog import models
from microblog impo | rt settings
import os.path
class FeedsDict(dict):
"""
dict custom che solleva un FeedDoesNotExist al posto di un KeyError
"""
def __getitem__(self, k):
try:
return super(FeedsDict, self).__getitem__(k)
except KeyError:
raise FeedDoesNotExist()
languages = FeedsDict((l, l) for l, n in ds | ettings.LANGUAGES)
languages[None] = settings.MICROBLOG_DEFAULT_LANGUAGE
class LatestPosts(Feed):
def get_object(self, request, lang_code=None):
return languages[lang_code]
def link(self, obj):
try:
path = reverse('microblog-feeds-latest')
except:
path = reverse('microblog-feeds-latest', kwargs={'lang_code': obj})
return os.path.join(dsettings.DEFAULT_URL_PREFIX, path)
title = settings.MICROBLOG_TITLE
description = settings.MICROBLOG_DESCRIPTION
description_template = 'microblog/feeds/item_description.html'
author_name = settings.MICROBLOG_AUTHOR_NAME
author_email = settings.MICROBLOG_AUTHOR_EMAIL
author_link = settings.MICROBLOG_AUTHOR_LINK
def items(self, obj):
return models.PostContent.objects\
.all()\
.filter(language=obj, post__status='P')\
.exclude(headline='')\
.select_related('post', 'post__author')\
.order_by('-post__date')[:10]
def item_title(self, item):
return item.headline
def item_description(self, item):
return item.body
def item_pubdate(self, item):
return item.post.date
def item_categories(self, item):
return [ x.name for x in item.post.tags.all()]
def item_author_name(self, item):
user = item.post.author
return '%s %s' % (user.first_name, user.last_name)
|
"""Test modile for API."""
from django.contrib.auth.models import User
from django.core.files.uploadedfile import SimpleUploadedFile
from django.urls import reverse
import factory
from imager_images.models import Album
from imager_images.models import Photo
from imagersite.settings import MEDIA_ROOT
import os
from rest_framework.test import APITestCase
class UserFactory(factory.django.DjangoModelFactory):
"""Setting up users for tests."""
class Meta(object):
"""Meta."""
model = User
username = factory.Sequence(lambda n: "user{}".format(n))
email = | factory.Sequence(
lambda n: "user{}@example.co | m".format(n)
)
class PhotoFactory(factory.django.DjangoModelFactory):
"""Create photos for testing."""
class Meta(object):
"""Meta."""
model = Photo
title = factory.Sequence(lambda n: "photo{}".format(n))
image = SimpleUploadedFile(
name="testing.png",
content=open(MEDIA_ROOT + '/test/testing.png', 'rb').read(),
content_type="image/png"
)
class AlbumFactory(factory.django.DjangoModelFactory):
"""Create albums for testing."""
class Meta(object):
"""Meta."""
model = Album
title = factory.Sequence(lambda n: "album{}".format(n))
class ApiTests(APITestCase):
"""Tests for the Api."""
def setUp(self):
"""Set up for testing."""
user = UserFactory.create()
user.set_password('caaarlos')
user.save()
self.user = user
photos = [PhotoFactory.create(profile=user.profile) for i in range(20)]
album = AlbumFactory.build()
album.profile = user.profile
album.save()
for photo in photos:
album.photos.add(photo)
album.cover_photo = photos[0]
album.save()
def tearDown(self):
"""Teardown when tests complete."""
to_delete = os.path.join(MEDIA_ROOT, 'photos', 'testing*.png')
os.system('rm -rf ' + to_delete)
def test_get_route_status_200(self):
"""Status 200."""
response = self.client.get(reverse('api'))
self.assertEqual(response.status_code, 200)
def test_get_route_sends_photos(self):
"""Sends Json Photos."""
response = self.client.get(reverse('api'))
self.assertEqual(
len(response.json()),
Photo.objects.count()
)
def test_get_route_photos_have_meta_info(self):
"""Meta info on photos from api."""
response = self.client.get(reverse('api'))
image_meta = response.json()[0]
self.assertTrue('title' in image_meta)
self.assertTrue('description' in image_meta)
self.assertTrue('profile' in image_meta)
self.assertTrue('image' in image_meta)
self.assertTrue('date_uploaded' in image_meta)
self.assertTrue('date_modified' in image_meta)
self.assertTrue('date_published' in image_meta)
self.assertTrue('published' in image_meta)
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.monkey}.
"""
from __future__ import division, absolute_import
from twisted.trial import unittest
from twisted.python.monkey import MonkeyPatcher
class TestObj:
def __init__(self):
self.foo = 'foo value'
self.bar = 'bar value'
self.baz = 'baz value'
class MonkeyPatcherTests(unittest.SynchronousTestCase):
"""
Tests for L{MonkeyPatcher} monkey-patching class.
"""
def setUp(self):
self.testObject = TestObj()
self.originalObject = TestObj()
self.monkeyPatcher = MonkeyPatcher( | )
def test_empty(self):
"""
A monkey patcher without patches shouldn' | t change a thing.
"""
self.monkeyPatcher.patch()
# We can't assert that all state is unchanged, but at least we can
# check our test object.
self.assertEqual(self.originalObject.foo, self.testObject.foo)
self.assertEqual(self.originalObject.bar, self.testObject.bar)
self.assertEqual(self.originalObject.baz, self.testObject.baz)
def test_constructWithPatches(self):
"""
Constructing a L{MonkeyPatcher} with patches should add all of the
given patches to the patch list.
"""
patcher = MonkeyPatcher((self.testObject, 'foo', 'haha'),
(self.testObject, 'bar', 'hehe'))
patcher.patch()
self.assertEqual('haha', self.testObject.foo)
self.assertEqual('hehe', self.testObject.bar)
self.assertEqual(self.originalObject.baz, self.testObject.baz)
def test_patchExisting(self):
"""
Patching an attribute that exists sets it to the value defined in the
patch.
"""
self.monkeyPatcher.addPatch(self.testObject, 'foo', 'haha')
self.monkeyPatcher.patch()
self.assertEqual(self.testObject.foo, 'haha')
def test_patchNonExisting(self):
"""
Patching a non-existing attribute fails with an C{AttributeError}.
"""
self.monkeyPatcher.addPatch(self.testObject, 'nowhere',
'blow up please')
self.assertRaises(AttributeError, self.monkeyPatcher.patch)
def test_patchAlreadyPatched(self):
"""
Adding a patch for an object and attribute that already have a patch
overrides the existing patch.
"""
self.monkeyPatcher.addPatch(self.testObject, 'foo', 'blah')
self.monkeyPatcher.addPatch(self.testObject, 'foo', 'BLAH')
self.monkeyPatcher.patch()
self.assertEqual(self.testObject.foo, 'BLAH')
self.monkeyPatcher.restore()
self.assertEqual(self.testObject.foo, self.originalObject.foo)
def test_restoreTwiceIsANoOp(self):
"""
Restoring an already-restored monkey patch is a no-op.
"""
self.monkeyPatcher.addPatch(self.testObject, 'foo', 'blah')
self.monkeyPatcher.patch()
self.monkeyPatcher.restore()
self.assertEqual(self.testObject.foo, self.originalObject.foo)
self.monkeyPatcher.restore()
self.assertEqual(self.testObject.foo, self.originalObject.foo)
def test_runWithPatchesDecoration(self):
"""
runWithPatches should run the given callable, passing in all arguments
and keyword arguments, and return the return value of the callable.
"""
log = []
def f(a, b, c=None):
log.append((a, b, c))
return 'foo'
result = self.monkeyPatcher.runWithPatches(f, 1, 2, c=10)
self.assertEqual('foo', result)
self.assertEqual([(1, 2, 10)], log)
def test_repeatedRunWithPatches(self):
"""
We should be able to call the same function with runWithPatches more
than once. All patches should apply for each call.
"""
def f():
return (self.testObject.foo, self.testObject.bar,
self.testObject.baz)
self.monkeyPatcher.addPatch(self.testObject, 'foo', 'haha')
result = self.monkeyPatcher.runWithPatches(f)
self.assertEqual(
('haha', self.originalObject.bar, self.originalObject.baz), result)
result = self.monkeyPatcher.runWithPatches(f)
self.assertEqual(
('haha', self.originalObject.bar, self.originalObject.baz),
result)
def test_runWithPatchesRestores(self):
"""
C{runWithPatches} should restore the original values after the function
has executed.
"""
self.monkeyPatcher.addPatch(self.testObject, 'foo', 'haha')
self.assertEqual(self.originalObject.foo, self.testObject.foo)
self.monkeyPatcher.runWithPatches(lambda: None)
self.assertEqual(self.originalObject.foo, self.testObject.foo)
def test_runWithPatchesRestoresOnException(self):
"""
Test runWithPatches restores the original values even when the function
raises an exception.
"""
def _():
self.assertEqual(self.testObject.foo, 'haha')
self.assertEqual(self.testObject.bar, 'blahblah')
raise RuntimeError("Something went wrong!")
self.monkeyPatcher.addPatch(self.testObject, 'foo', 'haha')
self.monkeyPatcher.addPatch(self.testObject, 'bar', 'blahblah')
self.assertRaises(RuntimeError, self.monkeyPatcher.runWithPatches, _)
self.assertEqual(self.testObject.foo, self.originalObject.foo)
self.assertEqual(self.testObject.bar, self.originalObject.bar)
|
from eisoil.core.exception import CoreException
class ScheduleException(CoreException):
def __init__(self, desc):
self._desc = desc
def __str__( | self):
return "Schedule: %s" % (self._desc,)
class ScheduleOverbookingError(ScheduleException):
def __init__(self, schedule_subject, resource_id, start_time, end_time):
"""All parameters should be strings or be able to str(...) itself."""
super(ScheduleOverbookingError, self).__init__("There are alrea | dy reservations for %s during [%s - %s] in the %s schedule." % (str(resource_id), str(start_time), str(end_time), str(schedule_subject)))
class ScheduleNoSuchReservationError(ScheduleException):
def __init__(self, reservation_id):
super(ScheduleNoSuchReservationError, self).__init__("Could not find reservation with id %d." % (reservation_id)) |
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# (c) 2018 Luca 'remix_tj' Lorenzetto
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the f | ollowing disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of condit | ions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
emc_vnx_argument_spec = {
'sp_address': dict(type='str', required=True),
'sp_user': dict(type='str', required=False, default='sysadmin'),
'sp_password': dict(type='str', required=False, default='sysadmin',
no_log=True),
}
|
from __future__ import absolute_import
import weakref
import threading
import asyncore
import socket
from walky.objects import *
from walky.port import *
from walky.engine import *
class Client(object):
engine = None
settings = None
connection = None
port = None
engine_class = Engine
object_class = ObjectStub
def __init__( self,
**settings ):
settings.setdefault('engine_class',self.engine_class)
settings.setdefault('port_class',self.port_class)
settings.setdefault('object_class',self.object_class)
self.port = settings.get('port')
self.settings = settings
self.reset()
def reset(self):
if self.engine: self.engine.shutdown()
self.engine = self.settings['engine_class']()
def connect( | self,*args,**kwargs):
""" Start the engine and the asyncore
"""
self.engine.start()
self.connection = self.engine.connection_new(*args,**kwargs)
def run(self):
pass
def on_readline(self,line):
try:
pass
except Exception as ex:
pass
def sendline(self,line):
self.port().sendline(line)
def object_ge | t(self,reg_obj_id):
return self.object_class(self.connection,reg_obj_id)
def close(self):
self.engine.shutdown()
|
# -*- coding: utf-8 -*-
import os
import codecs
import random
# this is the path of the folder that | will contain the tweet files
tweets_folder = os.path.join("D:", os.sep, "Documents", "PycharmProjects",
"easy_group_clas | sifier", "text_files")
# checks if previous path exists, if not, it creates it
if not os.path.isdir(tweets_folder):
os.makedirs(tweets_folder)
# the name of the file with clean tweets to scramble
filename = "technology"
tweets_file = os.path.join(tweets_folder, "%s.txt" % filename)
shuffled_file = os.path.join(tweets_folder, "%s_shuffled.txt" % filename)
tweet_list = []
with codecs.open(tweets_file, "rb", encoding="utf-8") as f:
for line in f:
tweet = line.strip()
tweet_list.append(tweet)
random.shuffle(tweet_list)
with codecs.open(shuffled_file, "wb", encoding="utf-8") as f:
for tweet in tweet_list:
f.write("%s\n" % tweet)
|
# generated from catkin/cmake/template/cfg-extras. | context.py.in
DEVELSPACE = 'FALSE' == 'TRUE'
INSTALLSPACE = 'TRUE' == 'TRUE'
CATKIN_DEVEL_PREFIX = '/home/pi/Documents/desenvolvimentoRos/de | vel'
CATKIN_GLOBAL_BIN_DESTINATION = 'bin'
CATKIN_GLOBAL_ETC_DESTINATION = 'etc'
CATKIN_GLOBAL_INCLUDE_DESTINATION = 'include'
CATKIN_GLOBAL_LIB_DESTINATION = 'lib'
CATKIN_GLOBAL_LIBEXEC_DESTINATION = 'lib'
CATKIN_GLOBAL_PYTHON_DESTINATION = 'lib/python2.7/dist-packages'
CATKIN_GLOBAL_SHARE_DESTINATION = 'share'
CATKIN_PACKAGE_BIN_DESTINATION = 'lib/rosserial_arduino'
CATKIN_PACKAGE_ETC_DESTINATION = 'etc/rosserial_arduino'
CATKIN_PACKAGE_INCLUDE_DESTINATION = 'include/rosserial_arduino'
CATKIN_PACKAGE_LIB_DESTINATION = 'lib'
CATKIN_PACKAGE_LIBEXEC_DESTINATION = ''
CATKIN_PACKAGE_PYTHON_DESTINATION = 'lib/python2.7/dist-packages/rosserial_arduino'
CATKIN_PACKAGE_SHARE_DESTINATION = 'share/rosserial_arduino'
CMAKE_BINARY_DIR = '/home/pi/Documents/desenvolvimentoRos/build'
CMAKE_CURRENT_BINARY_DIR = '/home/pi/Documents/desenvolvimentoRos/build/rosserial/rosserial_arduino'
CMAKE_CURRENT_SOURCE_DIR = '/home/pi/Documents/desenvolvimentoRos/src/rosserial/rosserial_arduino'
CMAKE_INSTALL_PREFIX = '/home/pi/Documents/desenvolvimentoRos/install'
CMAKE_SOURCE_DIR = '/home/pi/Documents/desenvolvimentoRos/src'
PKG_CMAKE_DIR = '${rosserial_arduino_DIR}'
PROJECT_NAME = 'rosserial_arduino'
PROJECT_BINARY_DIR = '/home/pi/Documents/desenvolvimentoRos/build/rosserial/rosserial_arduino'
PROJECT_SOURCE_DIR = '/home/pi/Documents/desenvolvimentoRos/src/rosserial/rosserial_arduino'
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
###
# Copyright (2016-2019) Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
import pytest
from mock import mock
from hpe_test_utils import OneViewBaseFactsTest
from oneview_module_loader import EthernetNetworkFactsModule
ERROR_MSG = 'Fake message error'
PARAMS_GET_ALL = dict(
config='config.json',
name=None
)
PARAMS_GET_BY_NAME = dict(
config='config.json',
name="Test Ethernet Network",
options=[]
)
PARAMS_GET_BY_NAME_WITH_OPTIONS = dict(
config='config.json',
name="Test Ethernet Network",
options=['associatedProfiles', 'associatedUplinkGroups']
)
PRESENT_ENETS = [{
"name": "Test Ethernet Network",
"uri": "/rest/ethernet-networks/d34dcf5e-0d8e-441c-b00d-e1dd6a067188"
}]
ENET_ASSOCIATED_UPLINK_GROUP_URIS = [
"/rest/uplink-sets/c6bf9af9-48e7-4236-b08a-77684dc258a5",
"/rest/uplink-sets/e2f0031b-52bd-4223-9ac1-d91cb519d548"
]
ENET_ASSOCIATED_PROFILE_URIS = [
"/rest/server-profiles/83e2e117-59dc-4e33-9f24-462af951cbbe",
"/rest/server-profiles/57d3af2a-b6d2-4446-8645-f38dd808ea4d"
]
ENET_ASSOCIATED_UPLINK_GROUPS = [dict(uri=ENET_ASSOCIATED_UPLINK_GROUP_URIS[0], name='Uplink Set 1'),
dict(uri=ENET_ASSOCIATED_UPLINK_GROUP_URIS[1], name='Uplink Set 2')]
ENET_ASSOCIATED_PROFILES = [dict(uri=ENET_ASSOCIATED_PROFILE_URIS[0], name='Server Profile 1'),
dict(uri=ENET_ASSOCIATED_PROFILE_URIS[1], name='Server Profile 2')]
@pytest.mark.resource(TestEthernetNetworkFactsModule='ethernet_networks')
class TestEthernetNetworkFactsModule(OneViewBaseFactsTest):
def test_should_get_all_enets(self):
self.resource.get_all.return_value = PRESENT_ENETS
self.mock_ansible_module.params = PARAMS_GET_ALL
EthernetNetworkFactsModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(ethernet_networks=(PRESENT_ENETS))
)
def test_should_get_enet_by_name(self):
self.resource.data = PRESENT_ENETS
self.mock_ansible_module.params = PARAMS_GET_BY_NAME
EthernetNetworkFactsModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(ethernet_networks=(PRESENT_ENETS))
)
def test_should_get_enet_by_name_with_options(self):
self.res | ource.data = PRESENT_ENETS
self.resource.get_associated_profiles.return_value = ENET_ASSOCIATED_PROFILE_URIS
self.resource.get_associated_uplink_groups.return_value = ENET_ASSOCIATED_UPLINK_GROUP_URIS
profiles = []
for data in ENET_ASSOCIATED_PROFILES:
obj = mock.Mock()
obj.data = data
profiles.append(obj)
uplinks = []
for data in ENET_ASSOCIAT | ED_UPLINK_GROUPS:
obj = mock.Mock()
obj.data = data
uplinks.append(obj)
self.mock_ov_client.server_profiles.get_by_uri.side_effect = profiles
self.mock_ov_client.uplink_sets.get_by_uri.side_effect = uplinks
self.mock_ansible_module.params = PARAMS_GET_BY_NAME_WITH_OPTIONS
EthernetNetworkFactsModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(ethernet_networks=PRESENT_ENETS,
enet_associated_profiles=ENET_ASSOCIATED_PROFILES,
enet_associated_uplink_groups=ENET_ASSOCIATED_UPLINK_GROUPS)
)
if __name__ == '__main__':
pytest.main([__file__])
|
f Equals(self, x):
if x is self: return 1
if self.has_service_call_name_ != x.has_service_call_name_: return 0
if self.has_service_call_name_ and self.service_call_name_ != x.service_call_name_: return 0
if self.has_total_amount_of_calls_ != x.has_total_amount_of_calls_: return 0
if self.has_total_amount_of_calls_ and self.total_amount_of_calls_ != x.total_amount_of_calls_: return 0
if self.has_total_cost_of_calls_microdollars_ != x.has_total_cost_of_calls_microdollars_: return 0
if self.has_total_cost_of_calls_microdollars_ and self.total_cost_of_calls_microdollars_ != x.total_cost_of_calls_microdollars_: return 0
if len(self.total_billed_ops_) != len(x.total_billed_ops_): return 0
for e1, e2 in zip(self.total_billed_ops_, x.total_billed_ops_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_service_call_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: service_call_name not set.')
if (not self.has_total_amount_of_calls_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: total_amount_of_calls not set.')
for p in self.total_billed_ops_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.service_call_name_))
n += self.lengthVarInt64(self.total_amount_of_calls_)
if (self.has_total_cost_of_calls_microdollars_): n += 1 + self.lengthVarInt64(self.total_cost_of_calls_microdollars_)
n += 1 * len(self.total_billed_ops_)
for i in xrange(len(self.total_billed_ops_)): n += self.lengthString(self.total_billed_ops_[i].ByteSize())
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_service_call_name_):
n += 1
n += self.lengthString(len(self.service_call_name_))
if (self.has_total_amount_of_calls_):
n += 1
n += self.lengthVarInt64(self.total_amount_of_calls_)
if (self.has_total_cost_of_calls_microdollars_): n += 1 + self.lengthVarInt64(self.total_cost_of_calls_microdollars_)
n += 1 * len(self.total_billed_ops_)
for i in xrange(len(self.total_billed_ops_)): n += self.lengthString(self.total_billed_ops_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_service_call_name()
self.clear_total_amount_of_calls()
self.clear_total_cost_of_calls_microdollars()
self.clear_total_billed_ops()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.service_call_name_)
out.putVarInt32(24)
out.putVarInt64(self.total_amount_of_calls_)
if (self.has_total_cost_of_calls_microdollars_):
out.putVarInt32(32)
out.putVarInt64(self.total_cost_of_calls_microdollars_)
for i in xrange(len(self.total_billed_ops_)):
out.putVarInt32(42)
out.putVarInt32(self.total_billed_ops_[i].ByteSize())
self.total_billed_ops_[i].OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_service_call_name_):
out.putVarInt32(10)
out.putPrefixedString(self.service_call_name_)
if (self.has_total_amount_of_calls_):
out.putVarInt32(24)
out.putVarInt64(self.total_amount_of_calls_)
if (self.has_total_cost_of_calls_microdollars_):
out.putVarInt32(32)
out.putVarInt64(self.total_cost_of_calls_microdollars_)
for i in xrange(len(self.total_billed_ops_)):
out.putVarInt32(42)
out.putVarInt32(self.total_billed_ops_[i].ByteSizePartial())
self.total_billed_ops_[i].OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_service_call_name(d.getPrefixedString())
continue
if tt == 24:
self.set_total_amount_of_calls(d.getVarInt64())
continue
if tt == 32:
self.set_total_cost_of_calls_microdollars(d.getVarInt64())
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_total_billed_ops().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_service_call_name_: res+=prefix+("service_call_name: %s\n" % self.DebugFormatString(self.service_call_name_))
if self.has_total_amount_of_calls_: res+=prefix+("total_amount_of_calls: %s\n" % self.DebugFormatInt64(self.total_amount_of_calls_))
if self.has_total_cost_of_calls_microdollars_: res+=prefix+("total_cost_of_calls_microdollars: %s\n" % self.DebugFormatInt64(self.total_cost_of_calls_microdollars_))
cnt=0
for e in self.total_billed_ops_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("total_billed_ops%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kservice_call_name = 1
ktotal_amount_of_calls = 3
ktotal_cost_of_calls_microdollars = 4
ktotal_billed_ops = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "service_call_name",
3: "total_amount_of_calls",
4: "total_cost_of_calls_microdollars",
5: "total_billed_ops",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.AggregateRpcStatsProto'
class KeyValProto(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_value_ = 0
value_ = ""
def __init__(self, contents=None):
if contents is not None: | self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def value(self): return self.value_
def set_value(self, x):
sel | f.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
n += self.lengthString(len(self.value_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_key_):
n += 1
n += self.lengthString(len(self.key_))
if (self.has_value_):
n += 1
n += self.lengthString(len(self.value_))
return n
def Clear(self):
self.clear_key()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.key_)
out.putVarInt32(18)
out.putPrefixedString(self.value_)
def OutputPartial(self, out):
if (self.has_key_):
out.putVarInt32(10)
out.putPrefixedString(self.key_)
if (self.has_value_):
out.putVarInt32(18)
out.putPrefixedS |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from lazagne.config.module_info import ModuleInfo
try:
from ConfigParser import RawConfigParser # Python 2.7
except ImportError:
from configparser import RawConfigParser # Python 3
from collections import OrderedDict
class Wifi(ModuleInfo):
def __init__(self):
ModuleInfo.__init__(self, 'wifi', 'wifi')
def run(self):
pwd_found = []
directory = u'/etc/NetworkManager/system-connections'
if os.path.exists(directory):
| if os.getuid() == 0:
wireless_ssid = [f for f in os.listdir(directory) if os.path.isfile(os.path.join(directory, f))]
for w in wireless_ssid:
cp = RawConfigParser()
cp.read(os.path.join(directory, w))
values = OrderedDict()
| try:
values['SSID'] = cp.get('wifi', 'ssid')
values['Password'] = cp.get('wifi-security', 'psk')
pwd_found.append(values)
except Exception:
pass
else:
self.info('You need sudo privileges')
return pwd_found
|
import sys
import os.path
import subprocess
PY3 = sys.version >= '3'
from setuptools import setup, find_packages
# http://blogs.nopcode.org/brainstorm/2013/05/20/pragmatic-python-versioning-via-setuptools- | and-git-tags/
# Fetch version from git tags, and write to version.py.
# Also, when git is not available (PyPi package), use stored version.py.
version_py = os.path.join(os.path.dirname(__file__), 'dame', 'version.py')
try:
version_git = subprocess.check_output(
["git", "describe", "--always"]).rstrip()
# Convert bytes to str for Python3
if PY3:
version_git = version_git.decode()
excep | t:
with open(version_py, 'r') as fh:
version_git = fh.read().strip().split('=')[-1].replace('"', '')
version_msg = ("# Do not edit this file, "
"pipeline versioning is governed by git tags")
with open(version_py, 'w') as fh:
fh.write(version_msg + os.linesep +
"__version__='{}'\n".format(version_git))
setup(
name="dame",
author="Richard Lindsley",
version=version_git,
packages=find_packages(),
license="MIT",
entry_points={
'gui_scripts': [
'dame = dame.dame:main'
]
},
)
|
from distutils import log
import distutils.command.sdist as orig
import os
import sys
import io
import contextlib
from setuptools.extern import six, ordered_set
from .py36compat import sdist_add_defaults
import pkg_resources
_default_revctrl = list
def walk_revctrl(dirname=''):
"""Find all files under revision control"""
for ep in pkg_resources.iter_entry_points('setuptools.file_finders'):
for item in ep.load()(dirname):
yield item
class sdist(sdist_add_defaults, orig.sdist):
"""Smart sdist that finds anything supported by revision control"""
user_options = [
('formats=', None,
"formats for source distribution (comma-separated list)"),
('keep-temp', 'k',
"keep the distribution tree around after creating " +
"archive file(s)"),
('dist-dir=', 'd',
"directory to put the source distribution archive(s) in "
"[default: dist]"),
]
negative_opt = {}
README_EXTENSIONS = ['', '.rst', '.txt', '.md']
READMES = tuple('README{0}'.format(ext) for ext in README_EXTENSIONS)
def run(self):
self.run_command('egg_info')
ei_cmd = self.get_finalized_command('egg_info')
self.filelist = ei_cmd.filelist
self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt'))
self.check_readme()
# Run sub commands
for cmd_name in self.get_sub_commands():
self.run_command(cmd_name)
self.make_distribution()
dist_files = getattr(self.distribution, 'dist_files', [])
for file in self.archive_files:
data = ('sdist', '', file)
if data not in dist_files:
dist_files.append(data)
def initialize_options(self):
orig.sdist.initialize_options(self)
self._default_to_gztar()
def _default_to_gztar(self):
# only needed on Python prior to 3.6.
if sys.version_info >= (3, 6, 0, 'beta', 1):
return
self.formats = ['gztar']
def make_distribution(self):
"""
Workaround for #516
"""
with self._remove_os_link():
orig.sdist.make_distribution(self)
@staticmethod
@contextlib.contextmanager
def _remove_os_link():
"""
In a context, remove and restore os.link if it exists
"""
class NoValue:
pass
orig_val = getattr(os, 'link', NoValue)
try:
del os.link
except Exception:
pass
try:
yield
finally:
if orig_val is not NoValue:
setattr(os, 'link', orig_val)
def __read_template_hack(self):
# This grody hack closes the template file (MANIFEST.in) if an
# exception occurs during read_template.
# Doing so prevents an error when easy_install attempts to delete the
# file.
try:
orig.sdist.read_template(self)
except Exception:
_, _, tb = sys.exc_info()
tb.tb_next.tb_frame.f_locals['template'].close()
raise
# Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle
# has been fixed, so only override the method if we're using an earlier
# Python.
has_leaky_handle = (
sys.version_info < (2, 7, 2)
or (3, 0) <= sys.version_info < (3, 1, 4)
or (3, 2) <= sys.version_info < (3, 2, 1)
)
if has_leaky_handle:
read_template = __read_template_hack
def _add_defaults_optional(self):
if six.PY2:
sdist_add_defaults._add_defaults_optional(self)
else:
super()._add_defaults_optional()
if os.path.isfile('pyproject.toml'):
self.filelist.append('pyproject.toml')
def _add_defaults_python(self):
"""getting python files"""
if self.distributi | on.has_pure_modules():
build_py = self.get_finalized_command('build_py') |
self.filelist.extend(build_py.get_source_files())
self._add_data_files(self._safe_data_files(build_py))
def _safe_data_files(self, build_py):
"""
Extracting data_files from build_py is known to cause
infinite recursion errors when `include_package_data`
is enabled, so suppress it in that case.
"""
if self.distribution.include_package_data:
return ()
return build_py.data_files
def _add_data_files(self, data_files):
"""
Add data files as found in build_py.data_files.
"""
self.filelist.extend(
os.path.join(src_dir, name)
for _, src_dir, _, filenames in data_files
for name in filenames
)
def _add_defaults_data_files(self):
try:
if six.PY2:
sdist_add_defaults._add_defaults_data_files(self)
else:
super()._add_defaults_data_files()
except TypeError:
log.warn("data_files contains unexpected objects")
def check_readme(self):
for f in self.READMES:
if os.path.exists(f):
return
else:
self.warn(
"standard file not found: should have one of " +
', '.join(self.READMES)
)
def make_release_tree(self, base_dir, files):
orig.sdist.make_release_tree(self, base_dir, files)
# Save any egg_info command line options used to create this sdist
dest = os.path.join(base_dir, 'setup.cfg')
if hasattr(os, 'link') and os.path.exists(dest):
# unlink and re-copy, since it might be hard-linked, and
# we don't want to change the source version
os.unlink(dest)
self.copy_file('setup.cfg', dest)
self.get_finalized_command('egg_info').save_version_info(dest)
def _manifest_is_not_generated(self):
# check for special comment used in 2.7.1 and higher
if not os.path.isfile(self.manifest):
return False
with io.open(self.manifest, 'rb') as fp:
first_line = fp.readline()
return (first_line !=
'# file GENERATED by distutils, do NOT edit\n'.encode())
def read_manifest(self):
"""Read the manifest file (named by 'self.manifest') and use it to
fill in 'self.filelist', the list of files to include in the source
distribution.
"""
log.info("reading manifest file '%s'", self.manifest)
manifest = open(self.manifest, 'rb')
for line in manifest:
# The manifest must contain UTF-8. See #303.
if not six.PY2:
try:
line = line.decode('UTF-8')
except UnicodeDecodeError:
log.warn("%r not UTF-8 decodable -- skipping" % line)
continue
# ignore comments and blank lines
line = line.strip()
if line.startswith('#') or not line:
continue
self.filelist.append(line)
manifest.close()
def check_license(self):
"""Checks if license_file' or 'license_files' is configured and adds any
valid paths to 'self.filelist'.
"""
files = ordered_set.OrderedSet()
opts = self.distribution.get_option_dict('metadata')
# ignore the source of the value
_, license_file = opts.get('license_file', (None, None))
if license_file is None:
log.debug("'license_file' option was not specified")
else:
files.add(license_file)
try:
files.update(self.distribution.metadata.license_files)
except TypeError:
log.warn("warning: 'license_files' option is malformed")
for f in files:
if not os.path.exists(f):
log.warn(
"warning: Failed to find the configured license file '%s'",
f)
files.remove(f)
self.filelist.extend(files)
|
ms", {}) # type: Dict[str, Any]
if expand is not None:
query_parameters['$expand'] = _SERIALIZER.query("expand", expand, 'str')
if top is not None:
query_parameters['$top'] = _SERIALIZER.query("top", top, 'int')
if orderby is not None:
query_parameters['$orderby'] = _SERIALIZER.query("orderby", orderby, 'str')
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_offers_request(
location: str,
publisher_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers')
path_format_arguments = {
"location": _SERIALIZER.url("location", location, 'str'),
"publisherName": _SERIALIZER.url("publisher_name", publisher_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_publishers_request(
location: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers')
path_format_arguments = {
"location": _SERIALIZER.url("location", location, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_skus_request(
location: str,
publisher_name: str,
offer: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers/{offer}/skus')
path_format_arguments = {
"location": _SERIALIZER.url("location", location, 'str'),
"publisherName": _SERIALIZER.url("publisher_name", publisher_name, 'str'),
"offer": _SERIALIZER.url("offer", offer, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class VirtualMachineImagesOperations(object):
"""VirtualMachineImagesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2020_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def get(
self,
location: str,
publisher_name: str,
offer: str,
skus: str,
version: str,
**kwargs: Any
) -> "_models.VirtualMachineImage":
"""Gets a virtual machine image.
:param location: The name of a supported Azure region.
:type location: str
:param publisher_name: A valid image publisher.
:type publisher_name: str
:param offer: A valid image publisher offer.
:type offe | r: str
:param skus: A valid image SKU.
:type skus: str
:param version: A valid image SKU version.
:type version: str
:keyword callable cls: A custom type or function that will be pass | ed the direct response
:return: VirtualMachineImage, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2020_12_01.models.VirtualMachineImage
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineImage"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
location=location,
publisher_name=publisher_name,
offer=offer,
skus=skus,
version=version,
subscription_id=self._config.subscription_id,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualMachineImage', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers/{offer}/skus/{skus}/versions/{version}'} # type: ignore
@distributed_trace
def list(
self,
location: str,
publisher_name: str,
offer: str,
skus: str,
expand: Optional[str] = None,
top: Optional[int] = None,
orderby: Optional[str] = None,
**kwargs: Any
) -> List["_models.VirtualMachineImageResource"]:
"""Gets a list of all virtual machine image versions for the specified location, publisher, |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Techies' Stasis Trap
:copyright: (c) 2014 Runzhou Li (Leo)
:license: The MIT License (MIT), see LICENSE for details.
"""
import sys
from logging import Handler, NOTSET
_ref_atributes = | [
'%(levelname)s',
'%(name)s',
'%(pathname)s',
'%(module)s',
'%(funcName)s',
'%(lineno)d',
'%(message)s'
]
'''
Reference log format, best used with UniQueue or CountQueue
'''
R | EF_LOG_FORMAT = ':'.join(_ref_atributes)
class QueueHandler(Handler):
'''
Queue Logging Handler
Inherits standard logging.Handler that emits to any standard Queue
compatible implementations. Including the ones in techies.landmines module
'''
def __init__(self, q, level=NOTSET):
if sys.version_info[:2] > (2, 6):
super(QueueHandler, self).__init__(level)
else:
Handler.__init__(self, level)
self.q = q
def emit(self, record):
try:
self.q.put(self.format(record))
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
|
# Copyright 2016 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import mock
import openhtf
from openhtf import plugs
def PlainFunc():
"""Plain Docstring"""
pass
def NormalTestPhase(test):
return 'return value'
def ExtraArgFunc(input=None):
return input
class TestPhaseInfo(unittest.TestCase):
def setUp(self):
self._phase_data = mock.Mock(plug_manager=plugs.PlugManager())
def testBasics(self):
phase = openhtf.PhaseInfo.WrapOrCopy(PlainFunc)
self.assertIs(phase.func, PlainFunc)
self.assertEqual(0, len(phase.plugs))
self.assertEqual('PlainFunc', phase.name)
self.assertEqual('Plain Docstring', phase.doc)
phase(self._phase_data)
test_phase = openhtf.PhaseInfo.WrapOrCopy(NormalTestPhase)
self.assertEqual('NormalTestPhase', test_phase.name)
self.assertEqual('return value', test_phase(self._phase_data))
def testMultiplePhases(self):
phase = openhtf.PhaseInfo.WrapOrCopy(PlainFunc)
second_phase = openhtf.PhaseInfo.WrapOrCopy(phase)
for attr in type(phase).all_attribute_names:
if attr == 'func': contin | ue
self.assertIsNot(getattr(phase, attr), getattr(second_phase, attr))
def testWithArgs(self):
phase = openhtf.PhaseInfo.WrapOrCopy(ExtraArgFunc)
phase = phase.WithArgs(input='input arg')
result = phase(self._phase_data)
self.assertEqual('input arg', result)
second_phase = phase.WithArgs(input='second input')
first_result = phase(self._phase_data)
second_result = second_phase(self._phase_da | ta)
self.assertEqual('input arg', first_result)
self.assertEqual('second input', second_result)
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import platform
import sys
import pytest
import spack
from spack.main import SpackCommand
python = SpackCommand('python')
def test_python():
out = python('-c', 'import spack; print(spack.spack_version)')
assert out.strip() == spack.spack_version
def test_python_interpreter_path():
out = python('--path')
assert out.str | ip() == sys.executable
def test_python_version():
out = python('-V')
| assert platform.python_version() in out
def test_python_with_module():
# pytest rewrites a lot of modules, which interferes with runpy, so
# it's hard to test this. Trying to import a module like sys, that
# has no code associated with it, raises an error reliably in python
# 2 and 3, which indicates we successfully ran runpy.run_module.
with pytest.raises(ImportError, match="No code object"):
python('-m', 'sys')
def test_python_raises():
out = python('--foobar', fail_on_error=False)
assert "Error: Unknown arguments" in out
|
import sys
import argparse
import logging
import importlib
from .server import Server, build_endpoint_description_strings
from .access import AccessLogGenerator
logger = logging.getLogger(__name__)
DEFAULT_HOST = '127.0.0.1'
DEFAULT_PORT = 8000
class CommandLineInterface(object):
"""
Acts as the main CLI entry point for running the server.
"""
description = "Django HTTP/WebSocket server"
def __init__(self):
self.parser = argparse.ArgumentParser(
description=self.description,
)
self.parser.add_argument(
'-p',
'--port',
type=int,
help='Port number to listen on',
default=None,
)
self.parser.add_argument(
'-b',
'--bind',
dest='host',
help='The host/address to bind to',
default=None,
)
self.parser.add_argument(
'--websocket_timeout',
type=int,
help='max time websocket connected. -1 to infinite.',
default=None,
)
self.parser.add_argument(
'--websocket_connect_timeout',
type=int,
help='max time to refuse establishing connection. -1 to infinite',
default=None,
)
self.parser.add_argument(
'-u',
'--unix-socket',
dest='unix_socket',
help='Bind to a UNIX socket rather than a TCP host/port',
default=None,
)
self.parser.add_argument(
'--fd',
type=int,
dest='file_descriptor',
help='Bind to a file descriptor rather than a TCP host/port or named unix socket',
default=None,
)
self.parser.add_argument(
'-e',
'--endpoint',
dest='socket_strings',
action='append',
help='Use raw server strings passed directly to twisted',
default=[],
)
self.parser.add_argument(
'-v',
'--verbosity',
type=int,
help='How verbose to make the output',
default=1,
)
self.parser.add_argument(
'-t',
'--http-timeout',
type=int,
help='How long to wait for worker server before timing out HTTP connections',
default=120,
)
self.parser.add_argument(
'--access-log',
help='Where to write the access log (- for stdout, the default for verbosity=1)',
default=None,
)
self.parser.add_argument(
'--ping-interval',
type=int,
help='The number of seconds a WebSocket must be idle before a keepalive ping is sent',
default=20,
)
self.parser.add_argument(
'--ping-timeout',
type=int,
help='The number of seconds before a WeSocket is closed if no response to a keepalive ping',
default=30,
)
self.parser.add_argument(
'--ws-protocol',
nargs='*',
dest='ws_protocols',
help='The WebSocket protocols you wish to support',
default=None,
)
self.parser.add_argument(
'--root-path',
dest='root_path',
help='The setting for the ASGI root_path variable',
default="",
)
self.parser.add_argument(
'--proxy-headers',
dest='proxy_headers',
help='Enable parsing and using of X-Forwarded-For and X-Forwarded-Port headers and using that as the '
'client address',
default=False,
action='store_true',
)
self.parser.add_argument(
'--force-sync',
dest='force_sync',
action='store_true',
help='Force the server to use synchronous mode on its ASGI channel layer',
default=False,
)
self.parser.add_argument(
'channel_layer',
help='The ASGI channel layer instance to use as path.to.module:instance.path',
)
self.server = None
@classmethod
def entrypoint(cls):
"""
Main entrypoint for external starts.
"""
cls().run(sys.argv[1:])
def run(self, args):
"""
Pass in raw argument list and it will decode them
and run the server.
"""
# Decode args
args = self.parser.parse_args(args)
# Set up logging
logging.basicConfig(
level={
0: logging.WARN,
1: logging.INFO,
2: logging.DEBUG,
}[args.verbosity],
format="%(asctime)-15s %(levelname)-8s %(message)s", |
)
# If verbosity is 1 or greater, or they told us explicitly, set up access log
access_log_stream = None
if args.access_log:
if args.access_log == "-":
access_log_stream = sys.stdout
else:
access_log_stream = open(args.access_log, "a", 1)
elif args.verbosity >= 1: |
access_log_stream = sys.stdout
# Import channel layer
sys.path.insert(0, ".")
module_path, object_path = args.channel_layer.split(":", 1)
channel_layer = importlib.import_module(module_path)
for bit in object_path.split("."):
channel_layer = getattr(channel_layer, bit)
if not any([args.host, args.port, args.unix_socket, args.file_descriptor, args.socket_strings]):
# no advanced binding options passed, patch in defaults
args.host = DEFAULT_HOST
args.port = DEFAULT_PORT
elif args.host and not args.port:
args.port = DEFAULT_PORT
elif args.port and not args.host:
args.host = DEFAULT_HOST
# build endpoint description strings from (optional) cli arguments
endpoints = build_endpoint_description_strings(
host=args.host,
port=args.port,
unix_socket=args.unix_socket,
file_descriptor=args.file_descriptor
)
endpoints = sorted(
args.socket_strings + endpoints
)
logger.info(
'Starting server at %s, channel layer %s.' %
(', '.join(endpoints), args.channel_layer)
)
self.server = Server(
channel_layer=channel_layer,
endpoints=endpoints,
http_timeout=args.http_timeout,
ping_interval=args.ping_interval,
ping_timeout=args.ping_timeout,
websocket_timeout=args.websocket_timeout,
websocket_connect_timeout=args.websocket_connect_timeout,
action_logger=AccessLogGenerator(access_log_stream) if access_log_stream else None,
ws_protocols=args.ws_protocols,
root_path=args.root_path,
verbosity=args.verbosity,
proxy_forwarded_address_header='X-Forwarded-For' if args.proxy_headers else None,
proxy_forwarded_port_header='X-Forwarded-Port' if args.proxy_headers else None,
force_sync=args.force_sync,
)
self.server.run()
|
class Solution:
"""
@param A : a list of integers
@param target : an integer to be searched
@return : a list of length 2, [index1, index2]
"""
def searchRange(self, A, target):
# write your code here
res = []
l, r = 0, len(A) - 1
while l <= r:
m = (l + r) / 2
if A[m] >= target:
r = m - 1
else:
l = m + 1
res.append(l)
l, r = 0, len(A) - 1
while l <= r:
m = (l + r) / 2
if A[m] > target:
r = m - 1
| else:
l = l + 1
res.append( | r)
if res[0] > res[1]: return [-1, -1]
return res
|
from ajenti.api import *
@interface
class UserSyncProvider (BasePlugin):
allows_renaming = False
syncs_root = False
def | test(self):
return False
def check_passw | ord(self, username, password):
return False
def sync(self):
pass |
f):
self.cts = HttpCtsRetriever("http://domainname.com/rest/cts")
def test_request_Cts_getCapabilities_arguments(self):
""" Tests that methods getCapabilities maps correctly to request"""
with patch('requests.get') as patched_get:
self.cts.getCapabilities(inventory="inventory")
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"inv": "inventory",
"request": "GetCapabilities"
}
)
def test_request_Cts_getValidReff_arguments(self):
""" Tests that methods getValidReff maps correctly to request"""
with patch('requests.get') as patched_get:
self.cts.getValidReff(u | rn="urn", inventory="inventory", leve | l=1)
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"inv": "inventory",
"request": "GetValidReff",
"level": "1",
"urn": "urn"
}
)
with patch('requests.get') as patched_get:
self.cts.getValidReff(urn="urn", inventory="inventory")
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"inv": "inventory",
"request": "GetValidReff",
"urn": "urn"
}
)
def test_request_Cts_getPassage_arguments(self):
""" Tests that methods getPassage maps correctly to request"""
with patch('requests.get') as patched_get:
self.cts.getPassage(urn="urn", inventory="inventory", context=1)
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"inv": "inventory",
"request": "GetPassage",
"context": "1",
"urn": "urn"
}
)
with patch('requests.get') as patched_get:
self.cts.getPassage(urn="urn", inventory="inventory")
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"inv": "inventory",
"request": "GetPassage",
"urn": "urn"
}
)
def test_call_with_default(self):
inv = HttpCtsRetriever("http://domainname.com/rest/cts", inventory="annotsrc")
with patch('requests.get') as patched_get:
inv.getPassage(urn="urn")
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"inv": "annotsrc",
"request": "GetPassage",
"urn": "urn"
}
)
def test_request_Cts_getPassagePlus_arguments(self):
""" Tests that methods getPassagePlus maps correctly to request"""
with patch('requests.get') as patched_get:
self.cts.getPassagePlus(
urn="urn", inventory="inventory", context=1)
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"inv": "inventory",
"request": "GetPassagePlus",
"context": "1",
"urn": "urn"
}
)
with patch('requests.get') as patched_get:
self.cts.getPassagePlus(urn="urn", inventory="inventory")
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"inv": "inventory",
"request": "GetPassagePlus",
"urn": "urn"
}
)
def test_request_Cts_getFirstUrn_arguments(self):
""" Tests that methods getFirstUrn maps correctly to request"""
with patch('requests.get') as patched_get:
self.cts.getFirstUrn(urn="urn", inventory="inventory")
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"inv": "inventory",
"request": "GetFirstUrn",
"urn": "urn"
}
)
def test_request_Cts_getPrevNextUrn_arguments(self):
""" Tests that methods getPrevNextUrn maps correctly to request"""
with patch('requests.get') as patched_get:
self.cts.getPrevNextUrn(urn="urn", inventory="inventory")
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"inv": "inventory",
"request": "GetPrevNextUrn",
"urn": "urn"
}
)
def test_request_Cts_getLabel_arguments(self):
""" Tests that methods getLabel maps correctly to request"""
with patch('requests.get') as patched_get:
self.cts.getLabel(urn="urn", inventory="inventory")
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"inv": "inventory",
"request": "GetLabel",
"urn": "urn"
}
)
def test_get_siblings(self):
""" Ensure Citable CtsTextMetadata Service getMetadata is correctly routed """
with patch('requests.get') as patched_get:
self.cts.getSiblings("urn:cts:latinLit:phi1294.phi002.perseus-lat2", "1.1")
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"request": "GetPrevNextUrn",
"urn": "urn:cts:latinLit:phi1294.phi002.perseus-lat2:1.1"
}
)
def test_get_children(self):
""" Ensure Citable CtsTextMetadata Service getMetadata is correctly routed """
with patch('requests.get') as patched_get:
self.cts.getReffs("urn:cts:latinLit:phi1294.phi002.perseus-lat2")
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"request": "GetValidReff",
"urn": "urn:cts:latinLit:phi1294.phi002.perseus-lat2",
"level": "1"
}
)
with patch('requests.get') as patched_get:
self.cts.getReffs("urn:cts:latinLit:phi1294.phi002.perseus-lat2", subreference="1.1")
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"request": "GetValidReff",
"urn": "urn:cts:latinLit:phi1294.phi002.perseus-lat2:1.1",
"level": "3"
}
)
with patch('requests.get') as patched_get:
self.cts.getReffs("urn:cts:latinLit:phi1294.phi002.perseus-lat2", subreference="1", level=2)
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"request": "GetValidReff",
"urn": "urn:cts:latinLit:phi1294.phi002.perseus-lat2:1",
"level": "3"
}
)
def test_get_metadata(self):
""" Ensure Citable CtsTextMetadata Service getMetadata is correctly routed """
with patch('requests.get') as patched_get:
self.cts.getMetadata()
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"request": "GetCapabilities"
}
)
with patch('requests.get') as patched_get:
self.cts.getMetadata(objectId="urn")
patched_get.assert_called_once_with(
"http://domainname.com/rest/cts", params={
"request": "GetCapabilities",
"urn": "urn"
}
)
def test_get_text(self):
""" Ensure Citable CtsTextMetadata Service getText is correctly routed """
with patch('reques |
prefiltered_projects[p_id] = p_info
else:
if filter_projects == 'aborted':
filtered_projects[p_id] = p_info
else:
filtered_projects = projects
if filter_projects == 'pending':
for p_id, p_info in prefiltered_projects.iteritems():
if not 'open_date' in p_info:
filtered_projects[p_id] = p_info
elif filter_projects == 'open':
for p_id, p_info in prefiltered_projects.iteritems():
if 'open_date' in p_info:
filtered_projects[p_id] = p_info
elif filter_projects == 'reception_control':
for p_id, p_info in prefiltered_projects.iteritems():
if 'open_date' in p_info and not 'queued' in p_info:
filtered_projects[p_id] = p_info
elif filter_projects == 'ongoing':
for p_id, p_info in prefiltered_projects.iteritems():
if 'queued' in p_info and not 'close_date' in p_info:
filtered_projects[p_id] = p_info
elif filter_projects == 'closed':
for p_id, p_info in prefiltered_projects.iteritems():
if 'close_date' in p_info :
filtered_projects[p_id] = p_info
elif filter_projects == "pending_review":
for p_id, p_info in prefiltered_projects.iteritems():
if 'pending_reviews' in p_info:
filtered_projects[p_id] = p_info
final_projects = self.filter_per_date(filtered_projects, youngest_open_date, oldest_open_date, youngest_queue_date, oldest_queue_date, youngest_close_date, oldest_close_date)
# Include dates for each project:
for row in self.application.projects_db.view("project/summary_dates", descending=True, group_level=1):
if row.key[0] in final_projects:
for date_type, date in row.value.iteritems():
final_projects[row.key[0]][date_type] = date
return final_projects
def filter_per_date(self, plist, yod, ood, yqd, oqd, ycd, ocd):
default_open_date='2012-01-01'
default_close_date=datetime.datetime.now().strftime("%Y-%m-%d")
""" yod : youngest open date
ood : oldest open date
yqd : youngest queue date
oqd : oldest queue date
ycd : youngest close date
ocd : oldest close date"""
filtered_projects = OrderedDict()
for p_id, p_info in plist.iteritems():
if ycd != default_close_date or ocd != default_open_date:
if 'close_date' not in p_info or (p_info['close_date'] > ycd or p_info['close_date'] < ocd):
continue
if yqd != default_close_date or oqd != default_open_date:
if 'queued' not in p_info or (p_info['queued'] > yqd or p_info['queued'] < oqd):
continue
if yod != default_close_date or ood != default_open_date:
if 'open_date' not in p_info or (p_info['open_date'] > yod or p_info['open_date'] < ood):
continue
filtered_projects[p_id]=p_info
return filtered_projects
def list_project_fields(self, undefined=False, project_list='all'):
# If undefined=True is given, only return fields not in columns defined
# in constants in StatusDB
columns = self.application.genstat_defaults.get('pv_columns')
project_list = self.list_projects(filter_projects=project_list)
field_items = set()
for project_id, value in project_list.iteritems():
for key, _ in value.iteritems():
field_items.add(key)
if undefined:
for column_category, column_dict in columns.iteritems():
field_items = field_items.difference(set(column_dict.values()))
return field_items
def search_project_names(self, search_string=''):
if len(search_string) == 0:
return ''
projects = []
summary_view = self.application.projects_db.view("project/summary", descending=True)
for row in summary_view:
if search_string.lower() in row.value['project_name'].lower() or search_string.lower() in row.key[1].lower():
| project = {
"url": '/project/'+row.key[1],
"name": row.value['project_name']
}
projects.append(project);
return projects
def prettify_css_names(s):
return s.replace("(","_").replace(")", "_")
class ProjectsDataHandler(ProjectsBaseDataHandler):
""" Serves brief information for project in the database.
Loaded through /api/v1/projects
"""
def get(self):
self. | set_header("Content-type", "application/json")
self.write(json.dumps(self.list_projects(self.get_argument('list', 'all'))))
class ProjectsFieldsDataHandler(ProjectsBaseDataHandler):
""" Serves all fields occuring in the values of the ProjectsDataHandler
json object.
Loaded through /api/v1/projects_fields
"""
def get(self):
undefined = self.get_argument("undefined", "False")
undefined = (string.lower(undefined) == "true")
project_list = self.get_argument("project_list", "all")
field_items = self.list_project_fields(undefined=undefined, project_list=project_list)
self.write(json.dumps(list(field_items)))
class ProjectsSearchHandler(ProjectsBaseDataHandler):
""" Searches for projects matching the supplied search string
Loaded through /api/v1/project_search/([^/]*)$
"""
def get(self, search_string):
self.set_header("Content-type", "application/json")
self.write(json.dumps(self.search_project_names(search_string)))
class ProjectDataHandler(ProjectsBaseDataHandler):
""" Serves brief information of a given project.
Loaded through /api/v1/project_summary/([^/]*)$
"""
def get(self, project):
self.set_header("Content-type", "application/json")
self.write(json.dumps(self.project_info(project)))
def project_info(self, project):
view = self.application.projects_db.view("project/summary")["open", project]
if not view.rows:
view = self.application.projects_db.view("project/summary")["closed", project]
if not len(view.rows) == 1:
return {}
summary_row = view.rows[0]
summary_row = self.project_summary_data(summary_row)
date_view = self.application.projects_db.view("project/summary_dates",
descending=True,
group_level=1)
date_result = date_view[[project + 'ZZZZ']:[project]]
if date_result.rows:
for date_row in date_result.rows:
for date_type, date in date_row.value.iteritems():
summary_row.value[date_type] = date
return summary_row.value
class ProjectSamplesDataHandler(SafeHandler):
""" Serves brief info about all samples in a given project.
Loaded through /api/v1/project/([^/]*)$
"""
def sample_data(self, sample_data,project, sample):
sample_data["sample_run_metrics"] = []
sample_data["prep_status"] = []
sample_data["prep_finished_date"] = []
sample_data["run_metrics_data"]={}
if "library_prep" in sample_data:
for lib_prep in sorted(sample_data["library_prep"]):
content=sample_data["library_prep"][lib_prep]
if "sample_run_metrics" in content:
for run, id in content["sample_run_metrics"].iteritems():
sample_data["sample_run_metrics"].append(run)
sample_data["run_metrics_data"][run]=self.get_sample_run_metrics(run)
if "prep_status" in content:
if content["prep_status"] == "PASSED":
sample_data["prep_status"].append(content["prep_status"])
else:
sa |
#!/usr/bin/python
# -*- coding: utf-8 -*-
corto = 10
largo = long(corto)
print type(cor | to) |
print type(largo)
|
.Instance(name=self.INSTANCE_NAME)
klass = self._getTargetClass()
with self.assertRaises(ValueError):
klass.from_pb(instance_pb, client)
def test_name_property(self):
client = _Client(project=self.PROJECT)
instance = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID)
self.assertEqual(instance.name, self.INSTANCE_NAME)
def test___eq__(self):
client = object()
instance1 = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID)
instance2 = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID)
self.assertEqual(instance1, instance2)
def test___eq__type_differ(self):
client = object()
instance1 = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID)
instance2 = object()
self.assertNotEqual(instance1, instance2)
def test___ne__same_value(self):
client = object()
instance1 = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID)
instance2 = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID)
comparison_val = (instance1 != instance2)
self.assertFalse(comparison_val)
def test___ne__(self):
instance1 = self._makeOne('instance_id1', 'client1', self.LOCATION_ID)
instance2 = self._makeOne('instance_id2', 'client2', self.LOCATION_ID)
self.assertNotEqual(instance1, instance2)
def test_reload(self):
from gcloud.bigtable._generated_v2 import (
instance_pb2 as data_v2_pb2)
from gcloud.bigtable._generated_v2 import (
bigtable_instance_admin_pb2 as messages_v2_pb)
from gcloud.bigtable._testing import _FakeStub
client = _Client(self.PROJECT, timeout_seconds=self.TIMEOUT_SECONDS)
instance = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID)
# Create request_pb
request_pb = messages_v2_pb.GetInstanceRequest(
name=self.INSTANCE_NAME)
# Create response_pb
DISPLAY_NAME = u'hey-hi-hello'
response_pb = data_v2_pb2.Instance(
display_name=DISPLAY_NAME,
)
# Patch the stub used by the API method.
client._instance_stub = stub = _FakeStub(response_pb)
# Create expected_result.
expected_result = None # reload() has no return value.
# Check Instance optional config values before.
self.assertEqual(instance.display_name, self.INSTANCE_ID)
# Perform the method and check the result.
result = instance.reload()
self.assertEqual(result, expected_result)
self.assertEqual(stub.method_calls, [(
'GetInstance',
(request_pb, self.TIMEOUT_SECONDS),
{},
)])
# Check Instance optional config values before.
self.assertEqual(instance.display_name, DISPLAY_NAME)
def test_create(self):
from google.longrunning import operations_pb2
from gcloud._testing import _Monkey
from gcloud.bigtable._testing import _FakeStub
from gcloud.bigtable import instance as MUT
client = _Client(self.PROJECT, timeout_seconds=self.TIMEOUT_SECONDS)
instance = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID)
# Create request_pb. Just a mock since we monkey patch
# _prepare_create_request
request_pb = object()
# Create response_pb
OP_BEGIN = object()
response_pb = operations_pb2.Operation(name=self.OP_NAME)
# Patch the stub used by the API method.
client._instance_stub = s | tub = _FakeStub(response_pb)
# Create expected_result.
expected_result = MUT.Operation('create', self.OP_ID, OP_BEGIN,
self.LOCATION_ID, instance=instance)
# Create the mocks.
prep_create_called = []
def mock_prep_create_req(instance):
prep_create_called.append(instance)
| return request_pb
process_operation_called = []
def mock_process_operation(operation_pb):
process_operation_called.append(operation_pb)
return self.OP_ID, self.LOCATION_ID, OP_BEGIN
# Perform the method and check the result.
with _Monkey(MUT,
_prepare_create_request=mock_prep_create_req,
_process_operation=mock_process_operation):
result = instance.create()
self.assertEqual(result, expected_result)
self.assertEqual(stub.method_calls, [(
'CreateInstance',
(request_pb, self.TIMEOUT_SECONDS),
{},
)])
self.assertEqual(prep_create_called, [instance])
self.assertEqual(process_operation_called, [response_pb])
def test_create_w_explicit_serve_nodes(self):
from google.longrunning import operations_pb2
from gcloud._testing import _Monkey
from gcloud.bigtable._testing import _FakeStub
from gcloud.bigtable import instance as MUT
SERVE_NODES = 5
client = _Client(self.PROJECT, timeout_seconds=self.TIMEOUT_SECONDS)
instance = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID,
serve_nodes=SERVE_NODES)
# Create request_pb. Just a mock since we monkey patch
# _prepare_create_request
request_pb = object()
# Create response_pb
OP_BEGIN = object()
response_pb = operations_pb2.Operation(name=self.OP_NAME)
# Patch the stub used by the API method.
client._instance_stub = stub = _FakeStub(response_pb)
# Create expected_result.
expected_result = MUT.Operation('create', self.OP_ID, OP_BEGIN,
self.LOCATION_ID, instance=instance)
# Create the mocks.
prep_create_called = []
def mock_prep_create_req(instance):
prep_create_called.append(instance)
return request_pb
process_operation_called = []
def mock_process_operation(operation_pb):
process_operation_called.append(operation_pb)
return self.OP_ID, self.LOCATION_ID, OP_BEGIN
# Perform the method and check the result.
with _Monkey(MUT,
_prepare_create_request=mock_prep_create_req,
_process_operation=mock_process_operation):
result = instance.create()
self.assertEqual(result, expected_result)
self.assertEqual(stub.method_calls, [(
'CreateInstance',
(request_pb, self.TIMEOUT_SECONDS),
{},
)])
self.assertEqual(prep_create_called, [instance])
self.assertEqual(process_operation_called, [response_pb])
def test_update(self):
from gcloud.bigtable._generated_v2 import (
instance_pb2 as data_v2_pb2)
from gcloud.bigtable._testing import _FakeStub
client = _Client(self.PROJECT, timeout_seconds=self.TIMEOUT_SECONDS)
instance = self._makeOne(self.INSTANCE_ID, client, self.LOCATION_ID,
display_name=self.DISPLAY_NAME)
# Create request_pb
request_pb = data_v2_pb2.Instance(
name=self.INSTANCE_NAME,
display_name=self.DISPLAY_NAME,
)
# Create response_pb
response_pb = data_v2_pb2.Instance()
# Patch the stub used by the API method.
client._instance_stub = stub = _FakeStub(response_pb)
# Create expected_result.
expected_result = None
# Perform the method and check the result.
result = instance.update()
self.assertEqual(result, expected_result)
self.assertEqual(stub.method_calls, [(
'UpdateInstance',
(request_pb, self.TIMEOUT_SECONDS),
{},
)])
def test_delete(self):
from google.protobuf import empty_pb2
from gcloud.bigtable._generated_v2 import (
bigtable_instance_admin_pb2 as messages_v2_pb)
from gcloud.bigtable._testing import _FakeStub
client = _Client(self.PROJECT, timeout_s |
s, fixtures):
"""Add Awair devices to hass, using specified fixtures for data."""
entry = MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG)
with patch("python_awair.AwairClient.query", side_effect=fixtures):
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
def assert_expected_properties(
hass, registry, name, unique_id, state_value, attributes
):
"""Assert expected properties from a dict."""
entry = registry.async_get(name)
assert entry.unique_id == unique_id
state = hass.states.get(name)
assert state
assert state.state == state_value
for attr, value in attributes.items():
assert state.attributes.get(attr) == value
async def test_awair_gen1_sensors(hass):
"""Test expected sensors on a 1st gen Awair."""
fixtures = [USER_FIXTURE, DEVICES_FIXTURE, GEN1_DATA_FIXTURE]
await setup_awair(hass, fixtures)
registry = er.async_get(hass)
assert_expected_properties(
hass,
registry,
"sensor.living_room_awair_score",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_SCORE][ATTR_UNIQUE_ID]}",
"88",
{ATTR_ICON: "mdi:blur"},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_temperature",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_TEMP][ATTR_UNIQUE_ID]}",
"21.8",
{ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS, "awair_index": 1.0},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_humidity",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_HUMID][ATTR_UNIQUE_ID]}",
"41.59",
{ATTR_UNIT_OF_MEASUREMENT: PERCENTAGE, "awair_index": 0.0},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_carbon_dioxide",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_CO2][ATTR_UNIQUE_ID]}",
"654.0",
{
ATTR_ICON: "mdi:cloud",
ATTR_UNIT_OF_MEASUREMENT: CONCENTRATION_PARTS_PER_MILLION,
"awair_index": 0.0,
},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_volatile_organic_compounds",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_VOC][ATTR_UNIQUE_ID]}",
"366",
{
ATTR_ICON: "mdi:cloud",
ATTR_UNIT_OF_MEASUREMENT: CONCENTRATION_PARTS_PER_BILLION,
"awair_index": 1.0,
},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_pm2_5",
# gen1 unique_id should be awair_12345-DUST, which matches old integration behavior
f"{AWAIR_UUID}_DUST",
"14.3",
{
ATTR_ICON: "mdi:blur",
ATTR_UNIT_OF_MEASUREMENT: CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
"awair_index": 1.0,
},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_pm10",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_PM10][ATTR_UNIQUE_ID]}",
"14.3",
{
ATTR_ICON: "mdi:blur",
ATTR_UNIT_OF_MEASUREMENT: CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
"awair_index": 1.0,
},
)
# We should not have a dust sensor; it's aliased as pm2.5
# and pm10 sensors.
assert hass.states.get("sensor.living_room_dust") is None
# We should not have sound or lux sensors.
assert hass.states.get("sensor.living_room_sound_level") is None
assert hass.states.get("sensor.living_room_illuminance") is None
async def test_awair_gen2_sensors(hass):
"""Test expected sensors on a 2nd gen Awair."""
fixtures = [USER_FIXTURE, DEVICES_FIXTURE, GEN2_DATA_FIXTURE]
await setup_awair(hass, fixtures)
registry = er.async_get(hass)
assert_expected_properties(
hass,
registry,
"sensor.living_room_awair_score",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_SCORE][ATTR_UNIQUE_ID]}",
"97",
{ATTR_ICON: "mdi:blur"},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_pm2_5",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_PM25][ATTR_UNIQUE_ID]}",
"2.0",
{
ATTR_ICON: "mdi:blur",
ATTR_UNIT_OF_MEASUREMENT: CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
"awair_index": 0.0,
},
)
# The Awair 2nd gen reports specifically a pm2.5 sensor,
# and so we don't alias anything. Make sure we didn't do that.
assert hass.states.get("sensor.living_room_pm10") is None
async def test_awair_mint_sensors(hass):
"""Test expected sensors on an Awair mint."""
fixtures = [USER_FIXTURE, DEVICES_FIXTURE, MINT_DATA_FIXTURE]
await setup_awair(hass, fixtures)
registry = er.async_get(hass)
assert_expected_properties(
hass,
registry,
"sensor.living_room_awair_score",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_SCORE][ATTR_UNIQUE_ID]}",
"98",
{ATTR_ICON: "mdi:blur"},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_pm2_5",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_PM25][ATTR_UNIQUE_ID]}",
"1.0",
{
ATTR_ICON: "mdi:blur",
ATTR_UNIT_OF_MEASUREMENT: CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
| "awair_index": 0.0,
},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_illuminance",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_LUX][ATTR_UNIQUE_ID]}",
"441.7",
{ATTR_UNIT_OF_MEASUREMENT: LIGHT_LUX},
)
# The Mint does not have a CO2 sensor.
assert hass.states.get("sensor.living_room_carbon_dioxide") is None
async def test_awair_glow_sensors(hass):
| """Test expected sensors on an Awair glow."""
fixtures = [USER_FIXTURE, DEVICES_FIXTURE, GLOW_DATA_FIXTURE]
await setup_awair(hass, fixtures)
registry = er.async_get(hass)
assert_expected_properties(
hass,
registry,
"sensor.living_room_awair_score",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_SCORE][ATTR_UNIQUE_ID]}",
"93",
{ATTR_ICON: "mdi:blur"},
)
# The glow does not have a particle sensor
assert hass.states.get("sensor.living_room_pm2_5") is None
async def test_awair_omni_sensors(hass):
"""Test expected sensors on an Awair omni."""
fixtures = [USER_FIXTURE, DEVICES_FIXTURE, OMNI_DATA_FIXTURE]
await setup_awair(hass, fixtures)
registry = er.async_get(hass)
assert_expected_properties(
hass,
registry,
"sensor.living_room_awair_score",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_SCORE][ATTR_UNIQUE_ID]}",
"99",
{ATTR_ICON: "mdi:blur"},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_sound_level",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_SPL_A][ATTR_UNIQUE_ID]}",
"47.0",
{ATTR_ICON: "mdi:ear-hearing", ATTR_UNIT_OF_MEASUREMENT: "dBa"},
)
assert_expected_properties(
hass,
registry,
"sensor.living_room_illuminance",
f"{AWAIR_UUID}_{SENSOR_TYPES[API_LUX][ATTR_UNIQUE_ID]}",
"804.9",
{ATTR_UNIT_OF_MEASUREMENT: LIGHT_LUX},
)
async def test_awair_offline(hass):
"""Test expected behavior when an Awair is offline."""
fixtures = [USER_FIXTURE, DEVICES_FIXTURE, OFFLINE_FIXTURE]
await setup_awair(hass, fixtures)
# The expected behavior is that we won't have any sensors
# if the device is not online when we set it up. python_awair
# does not make any assumptions about what sensors a device
# might have - they are created dynamically.
# We check for the absence of the "awair score", which every
# device *should* have if it's online. If we don't see it,
# then we probably didn't set anything up. Which is correct,
# in this case.
assert hass.states.get("sensor.living_room_awair_score") is None
async def test_awair_unavailable(hass):
"""Test expected behavior when an Awair becomes offline later."""
fixtures = [USER_ |
d_language_code(endpoint, values):
if flask.g.lang_code is not None and \
app.url_map.is_endpoint_expecting(endpoint, 'lang_code'):
values.setdefault('lang_code', flask.g.lang_code)
@app.url_value_preprocessor
def pull_lang_code(endpoint, values):
flask.g.lang_code = values.pop('lang_code', None)
@app.route('/<lang_code>/')
def index():
return flask.url_for('about')
@app.route('/<lang_code>/about')
def about():
return flask.url_for('something_else')
@app.route('/foo')
def something_else():
return flask.url_for('about', lang_code='en')
c = app.test_client()
self.assert_equal(c.get('/de/').data, b'/de/about')
self.assert_equal(c.get('/de/about').data, b'/foo')
self.assert_equal(c.get('/foo').data, b'/en/about')
def test_inject_blueprint_url_defaults(self):
app = flask.Flask(__name__)
bp = flask.Blueprint('foo.bar.baz', __name__,
template_folder='template')
@bp.url_defaults
def bp_defaults(endpoint, values):
values['page'] = 'login'
@bp.route('/<page>')
def view(page): pass
app.register_blueprint(bp)
values = dict()
app.inject_url_defaults('foo.bar.baz.view', values)
expected = dict(page='login')
self.assert_equal(values, expected)
with app.test_request_context('/somepage'):
url = flask.url_for('foo.bar.baz.view')
expected = '/login'
self.assert_equal(url, expected)
def test_nonascii_pathinfo(self):
app = flask.Flask(__name__)
app.testing = True
@app.route(u'/киртест')
def index():
return 'Hello World!'
c = app.test_client()
rv = c.get(u'/киртест')
self.assert_equal(rv.data, b'Hello World!')
def test_debug_mode_complains_after_first_request(self):
app = flask.Flask(__name__)
app.debug = True
@app.route('/')
def index():
return 'Awesome'
self.assert_false(app.got_first_request)
self.assert_equal(app.test_client().get('/').data, b'Awesome')
try:
@app.route('/foo')
def broken():
return 'Meh'
except AssertionError as e:
self.assert_in('A setup function was called', str(e))
else:
self.fail('Expected exception')
app.debug = False
@app.route('/foo')
def working():
return 'Meh'
self.assert_equal(app.test_client().get('/foo').data, b'Meh')
self.assert_true(app.got_first_request)
def test_before_first_request_functions(self):
got = []
app = flask.Flask(__name__)
@app.before_first_request
def foo():
got.append(42)
c = app.test_client()
c.get('/')
self.assert_equal(got, [42])
c.get('/')
self.assert_equal(got, [42])
self.assert_true(app.got_first_request)
def test_before_first_request_functions_concurrent(self):
got = []
app = flask.Flask(__name__)
@app.before_first_request
def foo():
time.sleep(0.2)
got.append(42)
c = app.test_client()
def get_and_assert():
c.get("/")
self.assert_equal(got, [42])
t = Thread(target=get_and_assert)
t.start()
get_and_assert()
t.join()
self.assert_true(app.got_first_request)
def test_routing_redirect_debugging(self):
app = flask.Flask(__name__)
app.debug = True
@app.route('/foo/', methods=['GET', 'POST'])
def foo():
return 'success'
with app.test_client() as c:
try:
c.post('/foo', data={})
except AssertionError as e:
self.assert_in('http://localhost/foo/', str(e))
self.assert_in('Make sure to directly send your POST-request '
'to this URL', str(e))
else:
self.fail('Expected exception')
rv = c.get('/foo', data={}, follow_redirects=True)
self.assert_equal(rv.data, b'success')
app.debug = False
with app.test_client() as c:
rv = c.post('/foo', data={}, follow_redirects=True)
self.assert_equal(rv.data, b'success')
def test_route_decorator_custom_endpoint(self):
app = flask.Flask(__name__)
app.debug = True
@app.route('/foo/')
def foo():
return flask.request.endpoint
@app.r | oute('/bar/', endpoint='bar')
def for_bar():
return flask.request.endpoint
@app.ro | ute('/bar/123', endpoint='123')
def for_bar_foo():
return flask.request.endpoint
with app.test_request_context():
assert flask.url_for('foo') == '/foo/'
assert flask.url_for('bar') == '/bar/'
assert flask.url_for('123') == '/bar/123'
c = app.test_client()
self.assertEqual(c.get('/foo/').data, b'foo')
self.assertEqual(c.get('/bar/').data, b'bar')
self.assertEqual(c.get('/bar/123').data, b'123')
def test_preserve_only_once(self):
app = flask.Flask(__name__)
app.debug = True
@app.route('/fail')
def fail_func():
1 // 0
c = app.test_client()
for x in range(3):
with self.assert_raises(ZeroDivisionError):
c.get('/fail')
self.assert_true(flask._request_ctx_stack.top is not None)
self.assert_true(flask._app_ctx_stack.top is not None)
# implicit appctx disappears too
flask._request_ctx_stack.top.pop()
self.assert_true(flask._request_ctx_stack.top is None)
self.assert_true(flask._app_ctx_stack.top is None)
def test_preserve_remembers_exception(self):
app = flask.Flask(__name__)
app.debug = True
errors = []
@app.route('/fail')
def fail_func():
1 // 0
@app.route('/success')
def success_func():
return 'Okay'
@app.teardown_request
def teardown_handler(exc):
errors.append(exc)
c = app.test_client()
# After this failure we did not yet call the teardown handler
with self.assert_raises(ZeroDivisionError):
c.get('/fail')
self.assert_equal(errors, [])
# But this request triggers it, and it's an error
c.get('/success')
self.assert_equal(len(errors), 2)
self.assert_true(isinstance(errors[0], ZeroDivisionError))
# At this point another request does nothing.
c.get('/success')
self.assert_equal(len(errors), 3)
self.assert_equal(errors[1], None)
def test_get_method_on_g(self):
app = flask.Flask(__name__)
app.testing = True
with app.app_context():
self.assert_equal(flask.g.get('x'), None)
self.assert_equal(flask.g.get('x', 11), 11)
flask.g.x = 42
self.assert_equal(flask.g.get('x'), 42)
self.assert_equal(flask.g.x, 42)
def test_g_iteration_protocol(self):
app = flask.Flask(__name__)
app.testing = True
with app.app_context():
flask.g.foo = 23
flask.g.bar = 42
self.assert_equal('foo' in flask.g, True)
self.assert_equal('foos' in flask.g, False)
self.assert_equal(sorted(flask.g), ['bar', 'foo'])
class SubdomainTestCase(FlaskTestCase):
def test_basic_support(self):
app = flask.Flask(__name__)
app.config['SERVER_NAME'] = 'localhost'
@app.route('/')
def normal_index():
return 'normal index'
@app.route('/', subdomain='test')
def test_index():
return 'test index'
c = app.test_client()
rv = c.get('/', 'http://localhost/')
self.assert_equal(rv.data, b'normal index') |
"""
WSGI config for project.
It exposes the WSG | I callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployme | nt/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "backend.settings")
application = get_wsgi_application()
|
import sys
import pygame
from pygame.locals import *
pygame.init()
screen = pygame.display.set_mode((600, 500))
pygame.display.set_caption("Drawing Lines")
screen.fill((0, 80, 0))
# draw the line |
color = 100, 255, 200
width = 8
pygame.draw.line(screen, color, (100, 100), (500, 400), width)
pygame.display.update()
while True:
for event in pygame.event.get():
if event.type in (QUIT, KEYDOWN):
| sys.exit() |
# Part of django-hookbox
# Copyright 2011, Duane Griffin <duaneg@dghda.com>
from django.conf import settings
from django.dispatch import Signal
from django.http import HttpResponse
from django.template import loader, RequestContext
from django.views.decorators.csrf import csrf_exempt
import json
import logging
import random
import string
logger = logging.getLogger('djhookbox')
secret = getattr(settings, 'HOOKBOX_WEBHOOK_SECRET', None)
_callbacks = []
def _call_callbacks(op, *args, **kwargs):
result = None
for callback in [cb for (cbop, cb) in _callbacks if cbop is None or cbop == op]:
oneresult = callback(op, *args, **kwargs)
if result is None:
result = oneresult
| elif not oneresult is None:
logger.warn("multiple results returned from %s callback", op)
return result
def whcallback(arg):
'''
Decorator for functions which handle webhoo | k callbacks.
All functions are called with the operation type and user as the first two
arguments. Operations on a channel (i.e. not connect/disconnect) will be
called with a channel name as the third argument, and publish will be
called with the payload as the fourth argument.
If a string argument is given the function will only be called for
matching webhooks. If no argument is given it will be called for all
webhooks.
Webhooks may optionally return a result, handling of which is dependent on
the operation type:
- The connect/disconnect operations ignore any results.
- Create callbacks should return either a dict containing the channel
options or, if they want to disallow channel creation, a failure
message (string).
If no create callback returns a response the operation is deemed to have
*failed*.
- Other callbacks may return a failure message (string), a
dictionary (which will be returned as a successful response), or a
properly formatted hookbox response.
If no callback returns a response the operation will be deemed to have
*succeded* and an empty success response will be returned.
In all cases, including connect/disconnect, if more that one callback
returns a result the first will be used and a warning will be logged.
'''
# Called without op arg: register the callback for all operations
if callable(arg):
_callbacks.append((None, arg))
return arg
# Otherwise only register the callback for the specified operation
def decorator(method):
_callbacks.append((arg, method))
return decorator
# TODO: Not sure these are necessary any more, the callbacks provide a super-set
# of their functionality.
signals = {
'connect': Signal(),
'disconnect': Signal(),
'subscribe': Signal(providing_args = ['channel']),
'unsubscribe': Signal(providing_args = ['channel']),
}
def webhook(method):
'''
Decorator which:
- checks a WebHook's secret key is correct
- exempts the view from CSRF checks
- massages the return result into the format expected by hookbox
Returns 403 if the secret is required and not present/incorrect.
'''
@csrf_exempt
def wrapper(*args, **kwargs):
# Process the request
request = args[0]
if secret is None or ('secret' in request.POST and request.POST['secret'] == secret):
try:
data = method(*args, **kwargs)
if data is None:
result = [True, {}]
elif isinstance(data, dict):
result = [True, data]
elif isinstance(data, str):
result = [False, {'msg': data}]
else:
assert isinstance(data, list)
assert len(data) == 2
result = data
except Exception as err:
result = [False, {'msg': str(err)}]
else:
result = [False, {'msg': 'webhook secret verification failed'}]
# Log the result
if result[0]:
logger.info("webhook succeeded: %s (%s): %s", method.__name__, request.user.username, str(result[1]))
else:
logger.warn("webhook failed: %s (%s): %s", method.__name__, request.user.username, result[1]['msg'])
return HttpResponse(json.dumps(result), mimetype = 'application/json')
return wrapper
@webhook
def connect(request):
signals['connect'].send(request.user)
_call_callbacks('connect', request.user)
if request.user.is_authenticated():
username = request.user.username
else:
username = ' _' + ''.join(random.choice(string.letters + string.digits) for i in xrange(10))
return {
'name': username
}
@webhook
def disconnect(request):
signals['disconnect'].send_robust(request.user)
_call_callbacks('disconnect', request.user)
@webhook
def create_channel(request):
result = _call_callbacks('create', request.user, request.POST['channel_name'])
return result or [False, {'msg': 'unrecognized channel: %s' % request.POST['channel_name']}]
@webhook
def publish(request):
return _call_callbacks('publish', request.user, request.POST['channel_name'], request.POST['payload'])
@webhook
def destroy_channel(request):
return _call_callbacks('destroy', request.user, channel = request.POST['channel_name'])
@webhook
def subscribe(request):
signals['subscribe'].send(request.user, channel = request.POST['channel_name'])
return _call_callbacks('subscribe', request.user, channel = request.POST['channel_name'])
@webhook
def unsubscribe(request):
signals['unsubscribe'].send_robust(request.user, channel = request.POST['channel_name'])
return _call_callbacks('unsubscribe', request.user, channel = request.POST['channel_name'])
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright © 2014 Daniel Tschan <tschan@puzzle.ch>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
import os
from string import Template
VIRTUALENV = os.path.join(
os.environ['OPENSHIFT_PYTHON_DIR'], 'virtenv', 'bin', 'activate_this.py'
)
with open(VIRTUALENV) as handle:
code = compile(handle.read(), 'activate_this.py', 'exec')
# pylint: disable=exec-used
exec(code, dict(__file__=VIRTUALENV)) # noqa
def application(environ, start_response):
ctype = 'text/html'
response_body = Template('''<!doctype html>
<html lang="en">
<head>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta charset="utf-8">
<title>Installing Weblate</title>
<style>
html {
background: #f5f5f5;
height: 100%;
}
body {
color: #404040;
font-family: "Helvetica Neue",Helvetica,"Liberation Sans",Arial,sans-serif;
font-size: 14px;
line-height: 1.4;
}
h1 {
color: #000;
line-height: 1.38em;
margin: .4em 0 .5em;
font-size: 25px;
font-weight: 300;
border-bottom: 1px solid #fff;
}
h1:after {
content: "";
display: block;
height: 1px;
background-color: #ddd;
}
p {
margin: 0 0 2em;
}
pre {
padding: 13.333px 20px;
margin: 0 0 20px;
font-size: 13px;
line-height: 1.4;
background-color: #fff;
border-left: 2px solid rgba(120,120,120,0.35);
font-family: Menlo,Monaco,"Liberation Mono",Consolas,monospace !important;
}
.content {
display: table;
margin-left: -15px;
margin-right: -15px;
position: relative;
min-height: 1px;
padding-left: 30px;
padding-right: 30px;
}
</style>
</head>
<body>
<div class="content">
<h1>$action1 Weblate</h1>
<p>
Weblate is being $action2.
Please wait a few minutes and refresh this page.
</p>
$log
</div>
</body>
</html>''')
context = {}
if os.path.exists(os.environ['OPENSHIFT_DATA_DIR'] + '/.installed'):
context['action1'] = 'Updating'
context['action2'] = 'updated'
context['log'] = ''
else:
context['action1'] = 'Installing'
context['action2'] = 'installed'
log_msg = os.popen(
r"cat ${OPENSHIFT_PYTHON_LOG_DIR}/install.log |"
r" grep '^[^ ]\|setup.py install' |"
r" sed 's,/var/lib/openshift/[a-z0-9]\{24\},~,g'"
).read()
context['log'] = '<pre>' + log_msg + '</pre>'
response_body = response_body.substitute(context)
status = '200 OK'
response_headers = [
('Content-Type', ctype),
('Conten | t-Length', str(len(response_body)))
]
start_response(status, response_headers)
return [response_bod | y]
|
import bottl | e
@bottle.route('/')
def home_page():
mythings = ['apple','orange','banana','peach']
return bottle.template('hello_world', username='Todd',things=mythings)
bottle.debug(True)
bottle.run(host='localhost', p | ort=8082)
|
spacing (kwarg, float in [0,1], default=0.2 / cols):
Space between subplot columns.
Applies to all columns (use 'specs' subplot-dependents spacing)
vertical_spacing (kwarg, float in [0,1], default=0.3 / rows):
Space between subplot rows.
Applies to all rows (use 'specs' subplot-dependents spacing)
subplot_titles (kwarg, list of strings, default=empty list):
Title of each subplot.
"" can be included in the list if no subplot title is desired in
that space so that the titles are properly indexed.
specs (kwarg, list of lists of dictionaries):
Subplot specifications.
ex1: specs=[[{}, {}], [{'colspan': 2}, None]]
ex2: specs=[[{'rowspan': 2}, {}], [None, {}]]
- Indices of the outer list correspond to subplot grid rows
starting from the bottom. The number of rows in 'specs'
must be equal to 'rows'.
- Indices of the inner lists correspond to subplot grid columns
starting from the left. The number of columns in 'specs'
must be equal to 'cols'.
- Each item in the 'specs' list corresponds to one subplot
in a subplot grid. (N.B. The subplot grid has exactly 'rows'
times 'cols' cells.)
- Use None for blank a subplot cell (or to move pass a col/row span).
- Note that specs[0][0] has the specs of the 'start_cell' subplot.
- Each item in 'specs' is a dictionary.
The available keys are:
* is_3d (boolean, default=False): flag for 3d scenes
* colspan (int, default=1): number of subplot columns
for this subplot to span.
* rowspan (int, default=1): number of subplot rows
for this subplot to span.
* l (float, default=0.0): padding left of cell
* r (float, default=0.0): padding right of cell
* t (float, default=0.0): padding right of cell
* b (float, default=0.0): padding bottom of cell
- Use 'horizontal_spacing' and 'vertical_spacing' to adjust
the spacing in between the subplots.
insets (kwarg, list of dictionaries):
Inset specifications.
- Each item in 'insets' is a dictionary.
The available keys are:
* cell (tuple, default=(1,1)): (row, col) index of the
subplot cell to overlay inset axes onto.
* is_3d (boolean, default=False): flag for 3d scenes
* l (float, default=0.0): padding left of inset
in fraction of cell width
* w (float or 'to_end', default='to_end') inset width
in fraction of cell width ('to_end': to cell right edge)
* b (float, default=0.0): padding bottom of inset
in fraction of cell height
* h (float or 'to_end', default='to_end') inset height
in fraction of cell height ('to_end': to cell top edge)
column_width (kwarg, list of numbers)
Column_width specifications
- Functions similarly to `column_width` of `plotly.graph_objs.Table`.
Specify a list that contains numbers where the amount of numbers in
the list is equal to `cols`.
- The numbers in the list indicate the proportions that each column
domains take across the full horizontal domain excluding padding.
- For example, if columns_width=[3, 1], horizontal_spacing=0, and
cols=2, the domains for each column would be [0. 0.75] and [0.75, 1]
row_width (kwargs, list of numbers)
Row_width specifications
- Functions similarly to `column_width`. Specify a list that contains
numbers where the amount of numbers in the list is equal to `rows`.
- The numbers in the list indicate the proportions that each row
domains take along the full vertical domain excluding padding.
- For example, if row_width=[3, 1], vertical_spacing=0, and
cols=2, the domains for each row from top to botton would be
[0. 0.75] and [0.75, 1]
"""
import plotly.subplots
warnings.warn(
"plotly.tools.make_subplots is deprecated, "
"please use plotly.subplots.make_subplots instead",
DeprecationWarning,
stacklevel=1,
)
return plotly.subplots.make_subplots(
rows=rows,
cols=cols,
shared_xaxes=shared_xaxes,
shared_yaxes=shared_yaxes,
start_cell=start_cell,
print_grid=print_grid,
**kwargs
)
warnings.filterwarnings(
"default", r"plotly\.tools\.make_subplots is deprecated", DeprecationWarning
)
def get_graph_obj(obj, obj_type=None):
"""Returns a new graph object.
OLD FUNCTION: this will *silently* strip out invalid pieces of the object.
NEW FUNCTION: no striping of invalid pieces anymore - only raises error
on unrecognized graph_objs
"""
# TODO: Deprecate or move. #283
from plotly.graph_objs import graph_objs
try:
cls = getattr(graph_objs, obj_type)
except (AttributeError, KeyError):
raise exceptions.PlotlyError(
"'{}' is not a recognized graph_obj.".format(obj_type)
)
return cls(obj)
def _replace_newline(obj):
"""Replaces '\n' with '<br>' for all strings in a collection."""
if isinstance(obj, dict):
d = dict()
for key, val in list(obj.items()):
d[key] = _replace_newline(val)
return d
elif isinstance(obj, list):
l = list()
for index, entry in enumerate(obj):
l += [_replace_newline(entry)]
return l
elif isinstance(obj, six.string_types):
s = obj.replace("\n", "<br>")
if s != obj:
warnings.warn(
"Looks like you used a newline character: '\\n'.\n\n"
"Plotly uses a subset of HTML escape characters\n"
"to do things like newline (<br>), bold (<b></b>),\n"
"italics (<i></i>), etc. Your newline characters \n"
"have been converted to '<br>' so they will show \n"
"up right on your Plotly figure!"
)
return s
else:
return obj # we return the actual reference... but DON'T mutate.
def return_figure_from_figure_or_data(figure_or_data, validate_figure):
from plotly.graph_objs import Figure
from plotly.basedatatypes import BaseFigure
validated = False
if isinstance(figure_or_data, dict):
figure = figure_or_data
elif isinstance(figure_or_data, list):
figure = {"data": figure_or_data}
elif isinstance(figure_or_data, BaseFigure):
| figure = figur | e_or_data.to_dict()
validated = True
else:
raise exceptions.PlotlyError(
"The `figure_or_data` positional "
"argument must be "
"`dict`-like, `list`-like, or an instance of plotly.graph_objs.Figure"
)
if validate_figure and not validated:
try:
figure = Figure(**figure).to_dict()
except exceptions.PlotlyError as err:
raise exceptions.PlotlyError(
"Invalid 'figure_or_data' argument. "
"Plotly will not be able to properly "
"parse the resulting JSON. If you "
"want to send this 'figure_or_data' "
"to Plotly anyway (not recommended), "
"you can set 'validate=False' as a "
"plot option.\nHere's why you're "
"seeing this error:\n\n{0}"
"".format(err)
)
if not figure["data"]:
raise exceptions.PlotlyEmptyDataError(
"Empty data list found. Make sure that you populated the "
"list of data objects you're sending and try again.\n"
"Questions? Visit support.plot.ly"
)
return figure
# Default colours for finance charts
_DEFAULT_INCREASING_COLOR = "#3D9970" # http://clrs.cc
_DEFAULT_DECREASING_COLOR = "#FF4136"
DIAG_CHOICES = ["scatter", "histogram", "box"]
VALID_COLORMAP_TYPES = ["ca |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2009-2012 Zuza Software Foundation
#
# This file is part of Pootle.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import locale
from django.conf import settings
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.utils.translation import ugettext as _
from pootle.i18n.gettext import tr_lang
from pootle_app.models import Directory
from pootle_app.models.permissions import (get_matching_permissions,
check_permission)
from pootle_app.views.top_stats import gentopstats_root
from pootle_language.models import Language
from pootle_misc.browser import get_table_headings
from pootle_misc.stats import get_raw_stats
from pootle_profile.models import get_profile
from pootle_pro | ject.mode | ls import Project
from pootle_statistics.models import Submission
def get_items(request, model, get_last_action, name_func):
items = []
if not check_permission('view', request):
return items
for item in model.objects.iterator():
stats = get_raw_stats(item)
translated_percentage = stats['translated']['percentage']
items.append({
'code': item.code,
'name': name_func(item.fullname),
'lastactivity': get_last_action(item),
'stats': stats,
'completed_title': _("%(percentage)d%% complete",
{'percentage': translated_percentage}),
})
items.sort(lambda x, y: locale.strcoll(x['name'], y['name']))
return items
def getlanguages(request):
def get_last_action(item):
try:
return Submission.objects.filter(
translation_project__language=item).latest().as_html()
except Submission.DoesNotExist:
return ''
return get_items(request, Language, get_last_action, tr_lang)
def getprojects(request):
def get_last_action(item):
try:
return Submission.objects.filter(
translation_project__project=item).latest().as_html()
except Submission.DoesNotExist:
return ''
return get_items(request, Project, get_last_action, lambda name: name)
def view(request):
request.permissions = get_matching_permissions(get_profile(request.user),
Directory.objects.root)
can_edit = request.user.is_superuser
languages = getlanguages(request)
languages_table_fields = ['language', 'progress', 'activity']
languages_table = {
'id': 'index-languages',
'proportional': False,
'fields': languages_table_fields,
'headings': get_table_headings(languages_table_fields),
'items': filter(lambda x: x['stats']['total']['words'] != 0, languages),
}
projects = getprojects(request)
projects_table_fields = ['project', 'progress', 'activity']
projects_table = {
'id': 'index-projects',
'proportional': False,
'fields': projects_table_fields,
'headings': get_table_headings(projects_table_fields),
'items': projects,
}
templatevars = {
'description': _(settings.DESCRIPTION),
'keywords': [
'Pootle',
'translate',
'translation',
'localisation',
'localization',
'l10n',
'traduction',
'traduire',
],
'topstats': gentopstats_root(),
'permissions': request.permissions,
'can_edit': can_edit,
'languages_table': languages_table,
'projects_table': projects_table,
}
visible_langs = [l for l in languages if l['stats']['total']['words'] != 0]
templatevars['moreprojects'] = (len(projects) > len(visible_langs))
if can_edit:
from pootle_misc.siteconfig import load_site_config
from pootle_app.forms import GeneralSettingsForm
siteconfig = load_site_config()
setting_form = GeneralSettingsForm(siteconfig)
templatevars['form'] = setting_form
return render_to_response('index/index.html', templatevars,
RequestContext(request))
|
se");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Example code for TensorFlow Wide & Deep Tutorial using TF.Learn API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import shutil
import sys
import tensorflow as tf
_CSV_COLUMNS = [
'age', 'workclass', 'fnlwgt', 'education', 'education_num',
'marital_status', 'occupation', 'relationship', 'race', 'gender',
'capital_gain', 'capital_loss', 'hours_per_week', 'native_country',
'income_bracket'
]
_CSV_COLUMN_DEFAULTS = [[0], [''], [0], [''], [0], [''], [''], [''], [''], [''],
[0], [0], [0], [''], ['']]
parser = argparse.ArgumentParser()
parser.add_argument(
'--model_dir', type=str, default='/tmp/census_model',
help='Base directory for the model.')
parser.add_argument(
'--model_type', type=str, default='wide_deep',
help="Valid model types: {'wide', 'deep', 'wide_deep'}.")
parser.add_argument(
'--train_epochs', type=int, default=20, help='Number of training epochs.')
parser.add_argument(
'--epochs_per_eval', type=int, default=2,
help='The number of training epochs to run between evaluations.')
parser.add_argument(
'--batch_size', type=int, default=40, help='Number of examples per batch.')
parser.add_argument(
'--train_data', type=str, default='/tmp/census_data/adult.data',
help='Path to the training data.')
parser.add_argument(
'--test_data', type=str, default='/tmp/census_data/adult.test',
help='Path to the test data.')
def build_model_columns():
"""Builds a set of wide and deep feature columns."""
# Continuous columns
age = tf.feature_column.numeric_column('age')
education_num = tf.feature_column.numeric_column('education_num')
capital_gain = tf.feature_column.numeric_column('capital_gain')
capital_loss = tf.feature_column.numeric_column('capital_loss')
hours_per_week = tf.feature_column.numeric_column('hours_per_week')
education = tf.feature_column.categorical_column_with_vocabulary_list(
'education', [
'Bachelors', 'HS-grad', '11th', 'Masters', '9th', 'Some-college',
'Assoc-acdm', 'Assoc-voc', '7th-8th', 'Doctorate', 'Prof-school',
'5th-6th', '10th', '1st-4th', 'Preschool', '12th'])
marital_status = tf.feature_column.categorical_column_with_vocabulary_list(
'marital_status', [
'Married-civ-spouse', 'Divorced', 'Married-spouse-absent',
'Never-married', 'Separated', 'Married-AF-spouse', 'Widowed'])
relationship = tf.feature_column.categorical_column_with_vocabulary_list(
'relationship', [
'Husband', 'Not-in-family', 'Wife', 'Own-child', 'Unmarried',
'Other-relative'])
workclass = tf.feature_column.categorical_column_with_vocabulary_list(
'workclass', [
'Self-emp-not-inc', 'Private', 'State-gov', 'Federal-gov',
'Local-gov', '?', 'Self-emp-inc', 'Without-pay', 'Never-worked'])
# To show an example of hashing:
occupation = tf.feature_column.categorical_column_with_hash_bucket(
'occupation', hash_bucket_size=1000)
# Transformations.
age_buckets = tf.feature_column.bucketized_column(
age, boundaries=[18, 25, 30, 35, 40, 45, 50, 55, 60, 65])
# Wide columns and deep columns.
base_columns = [
education, marital_status, relationship, workclass, occupation,
age_buckets,
]
crossed_columns = [
tf.feature_column.crossed_column(
['education', 'occupation'], hash_bucket_size=1000),
tf.feature_column.crossed_column(
[age_buckets, 'education', 'occupation'], hash_bucket_size=10 | 00),
]
wide_columns = base_columns + crossed_columns
deep | _columns = [
age,
education_num,
capital_gain,
capital_loss,
hours_per_week,
tf.feature_column.indicator_column(workclass),
tf.feature_column.indicator_column(education),
tf.feature_column.indicator_column(marital_status),
tf.feature_column.indicator_column(relationship),
# To show an example of embedding
tf.feature_column.embedding_column(occupation, dimension=8),
]
return wide_columns, deep_columns
def build_estimator(model_dir, model_type):
"""Build an estimator appropriate for the given model type."""
wide_columns, deep_columns = build_model_columns()
hidden_units = [100, 75, 50, 25]
# Create a tf.estimator.RunConfig to ensure the model is run on CPU, which
# trains faster than GPU for this model.
run_config = tf.estimator.RunConfig().replace(
session_config=tf.ConfigProto(device_count={'GPU': 0}))
if model_type == 'wide':
return tf.estimator.LinearClassifier(
model_dir=model_dir,
feature_columns=wide_columns,
config=run_config)
elif model_type == 'deep':
return tf.estimator.DNNClassifier(
model_dir=model_dir,
feature_columns=deep_columns,
hidden_units=hidden_units,
config=run_config)
else:
return tf.estimator.DNNLinearCombinedClassifier(
model_dir=model_dir,
linear_feature_columns=wide_columns,
dnn_feature_columns=deep_columns,
dnn_hidden_units=hidden_units,
config=run_config)
def input_fn(data_file, num_epochs, shuffle, batch_size):
"""Generate an input function for the Estimator."""
assert tf.gfile.Exists(data_file), (
'%s not found. Please make sure you have either run data_download.py or '
'set both arguments --train_data and --test_data.' % data_file)
def parse_csv(value):
print('Parsing', data_file)
columns = tf.decode_csv(value, record_defaults=_CSV_COLUMN_DEFAULTS)
features = dict(zip(_CSV_COLUMNS, columns))
labels = features.pop('income_bracket')
return features, tf.equal(labels, '>50K')
# Extract lines from input files using the Dataset API.
dataset = tf.contrib.data.TextLineDataset(data_file)
dataset = dataset.map(parse_csv, num_threads=5)
# Apply transformations to the Dataset
dataset = dataset.batch(batch_size)
dataset = dataset.repeat(num_epochs)
# Input function that is called by the Estimator
def _input_fn():
if shuffle:
# Apply shuffle transformation to re-shuffle the dataset in each call.
shuffled_dataset = dataset.shuffle(buffer_size=100000)
iterator = shuffled_dataset.make_one_shot_iterator()
else:
iterator = dataset.make_one_shot_iterator()
features, labels = iterator.get_next()
return features, labels
return _input_fn
def main(unused_argv):
# Clean up the model directory if present
shutil.rmtree(FLAGS.model_dir, ignore_errors=True)
model = build_estimator(FLAGS.model_dir, FLAGS.model_type)
# Set up input function generators for the train and test data files.
train_input_fn = input_fn(
data_file=FLAGS.train_data,
num_epochs=FLAGS.epochs_per_eval,
shuffle=True,
batch_size=FLAGS.batch_size)
eval_input_fn = input_fn(
data_file=FLAGS.test_data,
num_epochs=1,
shuffle=False,
batch_size=FLAGS.batch_size)
# Train and evaluate the model every `FLAGS.epochs_per_eval` epochs.
for n in range(FLAGS.train_epochs // FLAGS.epochs_per_eval):
model.train(input_fn=train_input_fn)
results = model.evaluate(input_fn=eval_input_fn)
# Display evaluation metrics
print('Results at epoch', (n + 1) * FLAGS.epochs_per_eval)
print('-' * 30)
for key in sorted(results):
print('%s: %s' % (key, results[key]))
if __name__ == '__main__':
tf.logging.set_verbosity(tf.logging.INFO)
FLAGS, unparsed = parser.parse_known_args()
tf.app.run(main=main, argv=[sys.argv[0]] + |
subclass to be used.
#
# This should allow easy re-use of the IPKernelApp entry point to configure and
# launch kernels other than IPython's own.
# c.IPKernelApp.kernel_class = 'IPython.kernel.zmq.ipkernel.Kernel'
# Run the module as a script.
# c.IPKernelApp.module_to_run = ''
# The date format used by logging formatters for %(asctime)s
# c.IPKernelApp.log_datefmt = '%Y-%m-%d %H:%M:%S'
# set the shell (ROUTER) port [default: random]
# c.IPKernelApp.shell_port = 0
# set the control (ROUTER) port [default: random]
# c.IPKernelApp.control_port = 0
# Whether to overwrite existing config files when copying
# c.IPKernelApp.overwrite = False
# Execute the given command string.
# c.IPKernelApp.code_to_run = ''
# set the stdin (ROUTER) port [default: random]
# c.IPKernelApp.stdin_port = 0
# Set the log level by value or name.
# c.IPKernelApp.log_level = 30
# lines of code to run at IPython startup.
# c.IPKernelApp.exec_lines = []
# Path to an extra config file to load.
#
# If specified, load this config file in addition to any other IPython config.
# c.IPKernelApp.extra_config_file = u''
# The importstring for the OutStream factory
# c.IPKernelApp.outstream_class = 'IPython.kernel.zmq.iostream.OutStream'
# Whether to create profile dir if it doesn't exist
# c.IPKernelApp.auto_create = False
# set the heartbeat port [default: random]
# c.IPKernelApp.hb_port = 0
#
# c.IPKernelApp.transport = 'tcp'
# redirect stdout to the null device
# c.IPKernelApp.no_stdout = False
# Should variables loaded at startup (by startup files, exec_lines, etc.) be
# hidden from tools like %who?
# c.IPKernelApp.hide_initial_ns = True
# dotted module name of an IPython extension to load.
# c.IPKernelApp.extra_extension = ''
# A file to be run
# c.IPKernelApp.file_to_run = ''
# The IPython profile to use.
# c.IPKernelApp.profile = u'default'
#
# c.IPKernelApp.parent_appname = u''
# kill this process if its parent dies. On Windows, the argument specifies the
# HANDLE of the parent process, otherwise it is simply boolean.
# c.IPKernelApp.parent_handle = 0
# JSON file in which to store connection info [default: kernel-<pid>.json]
#
# This file will contain the IP, ports, and authentication key needed to connect
# clients to this kernel. By default, this file will be created in the security
# dir of the current profile, but can be sp | ecified by absolute path.
# c.IPKernelApp.connection_file = ''
# If true, IPython will populate the user namespace with numpy, pylab, etc. and
# an ``import *`` is done from numpy and pylab, when using pylab mode.
#
# When False, pylab mode should not import any names into the user namespace.
# c.IPKernelApp.pylab_import_all = True
# The name of the IPython directory. This directory is used for | logging
# configuration (through profiles), history storage, etc. The default is usually
# $HOME/.ipython. This options can also be specified through the environment
# variable IPYTHONDIR.
# c.IPKernelApp.ipython_dir = u''
# Configure matplotlib for interactive use with the default matplotlib backend.
# c.IPKernelApp.matplotlib = None
# ONLY USED ON WINDOWS Interrupt this process when the parent is signaled.
# c.IPKernelApp.interrupt = 0
# Whether to install the default config files into the profile dir. If a new
# profile is being created, and IPython contains config files for that profile,
# then they will be staged into the new directory. Otherwise, default config
# files will be automatically generated.
# c.IPKernelApp.copy_config_files = False
# List of files to run at IPython startup.
# c.IPKernelApp.exec_files = []
# Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk3', 'none',
# 'osx', 'pyglet', 'qt', 'qt4', 'tk', 'wx').
# c.IPKernelApp.gui = None
# A list of dotted module names of IPython extensions to load.
# c.IPKernelApp.extensions = []
# redirect stderr to the null device
# c.IPKernelApp.no_stderr = False
# The Logging format template
# c.IPKernelApp.log_format = '[%(name)s]%(highlevel)s %(message)s'
# set the iopub (PUB) port [default: random]
# c.IPKernelApp.iopub_port = 0
#------------------------------------------------------------------------------
# ZMQInteractiveShell configuration
#------------------------------------------------------------------------------
# A subclass of InteractiveShell for ZMQ.
# ZMQInteractiveShell will inherit config from: InteractiveShell
# Use colors for displaying information about objects. Because this information
# is passed through a pager (like 'less'), and some pagers get confused with
# color codes, this capability can be turned off.
# c.ZMQInteractiveShell.color_info = True
# A list of ast.NodeTransformer subclass instances, which will be applied to
# user input before code is run.
# c.ZMQInteractiveShell.ast_transformers = []
#
# c.ZMQInteractiveShell.history_length = 10000
# Don't call post-execute functions that have failed in the past.
# c.ZMQInteractiveShell.disable_failing_post_execute = False
# Show rewritten input, e.g. for autocall.
# c.ZMQInteractiveShell.show_rewritten_input = True
# Set the color scheme (NoColor, Linux, or LightBG).
# c.ZMQInteractiveShell.colors = 'Linux'
#
# c.ZMQInteractiveShell.separate_in = '\n'
# Deprecated, use PromptManager.in2_template
# c.ZMQInteractiveShell.prompt_in2 = ' .\\D.: '
#
# c.ZMQInteractiveShell.separate_out = ''
# Deprecated, use PromptManager.in_template
# c.ZMQInteractiveShell.prompt_in1 = 'In [\\#]: '
# Enable deep (recursive) reloading by default. IPython can use the deep_reload
# module which reloads changes in modules recursively (it replaces the reload()
# function, so you don't need to change anything to use it). deep_reload()
# forces a full reload of modules whose code may have changed, which the default
# reload() function does not. When deep_reload is off, IPython will use the
# normal reload(), but deep_reload will still be available as dreload().
# c.ZMQInteractiveShell.deep_reload = False
# Make IPython automatically call any callable object even if you didn't type
# explicit parentheses. For example, 'str 43' becomes 'str(43)' automatically.
# The value can be '0' to disable the feature, '1' for 'smart' autocall, where
# it is not applied if there are no more arguments on the line, and '2' for
# 'full' autocall, where all callable objects are automatically called (even if
# no arguments are present).
# c.ZMQInteractiveShell.autocall = 0
#
# c.ZMQInteractiveShell.separate_out2 = ''
# Deprecated, use PromptManager.justify
# c.ZMQInteractiveShell.prompts_pad_left = True
#
# c.ZMQInteractiveShell.readline_parse_and_bind = ['tab: complete', '"\\C-l": clear-screen', 'set show-all-if-ambiguous on', '"\\C-o": tab-insert', '"\\C-r": reverse-search-history', '"\\C-s": forward-search-history', '"\\C-p": history-search-backward', '"\\C-n": history-search-forward', '"\\e[A": history-search-backward', '"\\e[B": history-search-forward', '"\\C-k": kill-line', '"\\C-u": unix-line-discard']
# Enable magic commands to be called without the leading %.
# c.ZMQInteractiveShell.automagic = True
#
# c.ZMQInteractiveShell.debug = False
#
# c.ZMQInteractiveShell.object_info_string_level = 0
#
# c.ZMQInteractiveShell.ipython_dir = ''
#
# c.ZMQInteractiveShell.readline_remove_delims = '-/~'
# Start logging to the default log file.
# c.ZMQInteractiveShell.logstart = False
# The name of the logfile to use.
# c.ZMQInteractiveShell.logfile = ''
#
# c.ZMQInteractiveShell.wildcards_case_sensitive = True
# Save multi-line entries as one entry in readline history
# c.ZMQInteractiveShell.multiline_history = False
# Start logging to the given file in append mode.
# c.ZMQInteractiveShell.logappend = ''
#
# c.ZMQInteractiveShell.xmode = 'Context'
#
# c.ZMQInteractiveShell.quiet = False
# Deprecated, use PromptManager.out_template
# c.ZMQInteractiveShell.prompt_out = 'Out[\\#]: '
# Set the size of the output cache. The default is 1000, you can change it
# permanently in your config file. Setting it to 0 completely disables the
# caching system, and the minimum value accepted is 20 (if you provide a value
# less than 20, it is reset to 0 and a warning is issue |
import pygame as pg
from i18n import i18n
class BaseScene(object):
"""
Parent class for individual game states to inherit from.
"""
def __init__(self, helpers=None):
self.done = False
self.quit = False
self.sounds = helpers['sounds'] if helpers else None
self.assets = helpers['assets'] if helpers else None
self.vars = helpers['vars'] if helpers else None
self.game = helpers['vars']['pygame'] if helpers else None
self.i18n = i18n.Localize()
self.next_state = None
self.wait = None
self.screen_rect = pg.display.get_surface().get_rect()
self.persist = {}
self.player = None
self.players = None
self.font = pg.font.SysFont("Arial", 24)
def startup(self, persistent):
"""
Called when a state resumes being active.
Allows information to be passed between states.
persistent: a dict passed from sta | te to state
"""
self.persist = persistent
def get_event(self, event):
"""
Handle a single event passed by the Game object.
"""
pass
def update(self, dt):
"""
Update the state. Called by the Game object once
per frame.
dt: time | since last frame
"""
pass
def draw(self, surface):
"""
Draw everything to the screen.
"""
pass
|
from behave import *
import operator
from django.db.models import Q
use_step_matcher("parse")
@given('Exists album at group "{group_name}" by "{username}"')
def step_impl(context, group_name, username):
from django.contrib.auth.models import User
user = User.objects.get(username=username)
from musicseaapp.models import Group
group = Group.objects.get(name=group_name)
from musicseaapp.models import Album
for row in context.table:
album = Album(group=group, user=user)
for heading in row.headings:
setattr(album, heading, row[heading])
album.save()
@given('Exists album registered by "{username}"')
def step_impl(context, username):
from django.contrib.auth.models import User
user = User.objects.get(username=username)
from musicseaapp.models import Album
for row in context.table:
album = Album(user=user)
for heading in row.headings:
setattr(album, heading, row[heading])
album.save()
@when('I register album at group "{group_name}"')
def step_impl(context, group_name):
from musicseaapp.models import Group
group = Group.objects.get(name=group_name)
for row in context.table:
context.browser.visit(context.get_url('musicseaapp:albums_create', group.pk))
if context.browser.url == context.get_url('musicseaapp:albums_create', group.pk):
form = context.browser.find_by_tag('form').first
for heading in row.headings:
context.browser.fill(heading, row[heading])
form.find_by_value('Submit').first.click()
@then('I\'m view | ing the details page for album at group "{group_name}" by "{username}"')
def step_impl(context, group_name, username):
q_list = [Q((attribute, context.table.rows[0][attribute])) for attribute in context.table.headings]
from django.contrib.auth.models import User
q_list.append(Q(('user', User.objects.get(userna | me=username))))
from musicseaapp.models import Group
q_list.append(Q(('group', Group.objects.get(name=group_name))))
from musicseaapp.models import Album
album = Album.objects.filter(reduce(operator.and_, q_list)).get()
assert context.browser.url == context.get_url(album)
@then('There are {count:n} albums')
def step_impl(context, count):
from musicseaapp.models import Album
assert count == Album.objects.count()
@when('I edit the current album')
def step_impl(context):
context.browser.find_link_by_text('edit').click()
# TODO: Test also using direct edit view link
# context.browser.visit(context.get_url('musicseaapp:album_edit', album.pk))
form = context.browser.find_by_tag('form').first
for heading in context.table.headings:
context.browser.fill(heading, context.table[0][heading])
form.find_by_value('Submit').first.click()
|
import glob
import sys
import string
import Bio.PDB
def parse_noe(filename):
f = open(filename, 'r')
noe_pairs = []
for line in f.readlines():
res_a = int(string.split(line)[2])
res_b = int(string.split(line)[7])
noe_pair = [res_a, res_b]
if noe_pair not in noe_pairs:
noe_pairs.append(noe_pair)
f.close()
print len(noe_pairs), "CA lines"
return noe_pairs
def count_restraints(filename):
f = open(filename, 'r')
noe_pairs = []
for line in f.readlines():
# print line
res_a = int(string.split(line)[2])
res_b = int(string.split(line)[7])
name_a = string.split(line)[5].rstrip(")")[:-1]
name_b = string.split(line)[10].rstrip(")")[:-1]
noe_pair = [res_a, res_b, name_a, name_b]
if [res_a, res_b, name_a, name_b] not in noe_pairs and \
[res_b, res_a, name_b, name_a] not in noe_pairs:
noe_pairs.append(noe_pair)
f.close()
print len(noe_pairs), "NOE contacts"
return len(noe_pairs)
native_pdb = sys.argv[1]
noe = False
noe_file = ""
if len(sys.argv) == 3:
noe = True
| noe_file = sys.argv[2]
count_restraints(noe_file)
cmd.load(native_pdb, "native")
cmd.hide("all")
cmd.show("cartoon", "native")
if noe:
for pair in parse_noe(noe_file):
cmd.distance("noe", "native and resi %i and name ca" % (pair[0]),
"native and resi %i and name ca" % (pair[1]))
cmd.hide("l | abels", "noe")
cmd.color("grey", "native")
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of HSPlasma.
#
# HSPlasma is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HSPlasma is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HSPlasma. If not, see <http://www.gnu.org/licenses/>.
# This script reads a bunch of prp and age files to check whether libHSPlasma shows any error messages or other problems.
# Call "./prp-checkfiles.py --help" for a list of options.
# by Diafero
import sys, glob, os
from optparse import OptionParser
import PyHSPlasma
width = 80
kTmpFile = "tmpcomparefile.prp"
def readObjects(location):
data = []
# read all the objects
for type in rm.getTypes(location):
while len(data) <= type:
data.append({}) # fill array
for key in rm.getKeys(location, type):
if key.exists() and key.isLoaded():
data[type][key.name] = key.object.stub.getData()
return data
def checkObjectsEqual(objs1, objs2, ignorePhysics):
if len(objs1) != len(objs2):
raise Exception('Number of types changed')
for type in range(0, len(objs1)):
typeName = PyHSPlasma.plFactory.ClassName(type, rm.getVer())
# compare the objects of this type
for name in objs1[type].keys():
if not name in objs2[type].keys():
print('Type [%04X]%s, object %s missing' % (type, typeName, name))
if ignorePhysics and type == PyHSPlasma.plFactory.kGenericPhysical: continue
obj1 = objs1[type][name]
obj2 = objs2[type][name]
if len(obj1) != len(obj2):
print('Type [%04X]%s, object %s changed changed size (%d => %d)' % (type, typeName, name, len(obj1), len(obj2)))
if obj1 != obj2:
print('Type [%04X]%s, object %s changed but stay same size' % (type, typeName, name))
# check if something got added
for name in objs2[type].keys():
if not name in objs1[type].keys():
print('Type [%04X]%s, object %s added' % (type, typeName, name))
def compareFiles(file1, file2, ignorePhysics):
# read old objects
location = rm.ReadPage(file1, True).location
oldObjects = readObjects(location)
rm.UnloadPage(location)
# read new objects
location = rm.ReadPage(file2, True).location
newObjects = readObjects(location)
rm.UnloadPage(location)
# now compare the objects
checkObjectsEqual(oldObjects, newObjects, ignorePhysics)
def overprint(text):
sys.stdout.write("\r"+text+(" "*(width-len(text))))
sys.stdout.flush()
### Main app
parser = OptionParser()
parser.add_option("-v", "--verbose",
action="count", dest="verbose", default=0,
help="If set one time, warnings are printed. If set two or more times, all debug messages are printed.")
parser.add_option("-c", "--check-repack",
action="store_true", dest="checkrepack", default=False,
help="Re-pack the prp files and compare to the original file (does nothing for age files)")
parser.add_option("-k", "--keep-repacked",
action="store_true", dest="keeprepack", default=False,
help="Do not remove the temporary repacked file (has no effect if --check-repack is not given)")
parser.add_option("-p", "--ignore-physics",
action="store_true", dest="ignorephysics", default=False,
help="Do not compare re-packed physicals (has no effect if --check-repack is not given)")
(options, args) = parser.parse_args()
# set verbose level
if options.verbose >= 2:
PyHSPlasma.plDebug.Init(PyHSPlasma.plDebug.kDLAll)
elif options.verbose == 1:
PyHSPlasma.plDebug.Init(PyHSPlasma.plDebug.kDLWarning)
else: # options.verbose == 0
PyHSPlasma.plDebug.Init(PyHSPlasma.plDebug.kDLError)
# read files
rm = PyHSPlasma.plResManager()
for files in args:
for file in glob.iglob(files): # do the globbing on Windows, too
overprint("Reading "+file+"...")
if file.lower().endswith(".prp"):
page = rm.ReadPage(file)
if options.checkrepack:
overprint("Writing "+file+"...")
rm.WritePage(kTmpFile, page)
| rm.UnloadPage(page.location)
if options.checkrepack:
overprint("Comparing "+file+"...")
compareFiles(file, kTmpFile, options.ignorephysics)
if not option | s.keeprepack: os.remove(kTmpFile)
elif file.lower().endswith(".age"):
age = rm.ReadAge(file, True) # readPages=True
for pageNum in range(0, age.getNumPages()):
loc = age.getPageLoc(pageNum, rm.getVer())
page = rm.FindPage(loc)
if (page == None):
raise Exception("Unable to completely load age "+age.name+": Can't find page "+str(loc))
rm.UnloadAge(age.name)
else:
print("Error: Unknown file type!")
overprint("Done!")
sys.stdout.write("\n")
|
import shlex
import textwrap
from ert_gui.shell.libshell import autoCompleteList, ShellFunction, ShellProperty, widthAsPercentageOfConsoleWidth, getTerminalSize
class ShellCollection(object):
command_help_message | = "The command: '%s' supports the following keywords:"
def __init__(self, name, parent=None, description="No description available"):
super(ShellCollection, self).__init__()
self.__name = name
self.__parent = None |
self.__description = description
if parent is not None:
self.setParent(parent)
parent.addChild(self)
self.__collection = {}
self.__model_tracker = {}
self.__children = []
def setParent(self, parent):
if not hasattr(parent, "shellContext"):
raise ValueError("Parent is missing function: shellContext()")
if not hasattr(parent, "lastCommandFailed"):
raise ValueError("Parent is missing function: lastCommandFailed()")
setattr(parent, "do_%s" % self.name, self.doKeywords)
setattr(parent, "complete_%s" % self.name, self.completeKeywords)
setattr(parent, "help_%s" % self.name, self.helpKeywords)
self.__parent = parent
def addChild(self, child):
self.__children.append(child)
def cleanup(self):
for child in self.__children:
child.cleanup()
def addCollection(self, collection):
"""
:type collection: ShellCollection
"""
self.__collection[collection.name] = collection
collection.setParent(self)
def addProperty(self, property):
"""
:type property: ShellProperty
"""
self.__collection[property.name] = property
property.setParent(self)
def addFunction(self, function):
"""
:type function: ShellFunction
"""
self.__collection[function.name] = function
function.setParent(self)
def addShellProperty(self, name, getter, setter=None, validator=None, completer=None, help_arguments=None, help_message=None, pretty_attribute=None, model=None):
""" @rtype: ShellProperty """
shell_property = ShellProperty(name, getter, setter, validator, completer, help_arguments, help_message, pretty_attribute)
self.addProperty(shell_property)
if model is None:
model = self
self.__model_tracker[name] = model
return shell_property
def getModelForProperty(self, property_name):
return self.__model_tracker[property_name]
def addShellFunction(self, name, function, completer=None, help_arguments=None, help_message=None, model=None):
""" @rtype: ShellFunction """
func = ShellFunction(name, function, completer, help_arguments, help_message)
self.addFunction(func)
if model is None:
model = self
self.__model_tracker[name] = model
return func
def getModelForFunction(self, name):
return self.__model_tracker[name]
@property
def name(self):
return self.__name
def shellContext(self):
""" :rtype: ert_gui.shell.libshell.ShellContext """
return self.__parent.shellContext()
def lastCommandFailed(self, message):
self.__parent.lastCommandFailed(message)
def findKeywords(self):
return self.__collection.keys()
def completeKeywords(self, text, line, begidx, endidx):
arguments = shlex.split(line)
assert arguments[0] == self.name
line = line[len(self.name) + 1:]
begidx = begidx - len(self.name) + 1
endidx = endidx - len(self.name) + 1
keyword, sep, arguments = line.partition(' ')
if begidx >= len(keyword) and keyword in self.findKeywords():
if hasattr(self, "complete_%s" % keyword):
func = getattr(self, "complete_%s" % keyword)
return func(text, line, begidx, endidx)
else:
return []
else:
return autoCompleteList(text, self.findKeywords())
def doKeywords(self, line):
keyword, sep, arguments = line.partition(' ')
if keyword.strip() == "":
self.printGuidance()
elif keyword in self.__collection:
func = getattr(self, "do_%s" % keyword)
return func(arguments)
else:
self.lastCommandFailed("Unknown keyword: '%s'" % keyword)
self.printGuidance()
def printGuidance(self):
print(self.command_help_message % self.name)
self.shellContext().shell().columnize(self.findKeywords(), getTerminalSize()[0])
def helpKeywords(self):
print(self.command_help_message % self.name)
keywords = self.findKeywords()
keyword_column_width = widthAsPercentageOfConsoleWidth(20)
parameter_column_width = widthAsPercentageOfConsoleWidth(30)
help_column_width = widthAsPercentageOfConsoleWidth(48)
help_format = " %-" + str(keyword_column_width) + "s %-" + str(parameter_column_width) + "s %-" + str(help_column_width) + "s"
print(help_format % ("Keyword", "Parameter(s)", "Help"))
for keyword in keywords:
message = "No help available!"
parameters = None
if hasattr(self, "help_tuple_%s" % keyword):
func = getattr(self, "help_tuple_%s" % keyword)
_, parameters, message = func()
message = textwrap.wrap(message, help_column_width)
print(help_format % (keyword, parameters, message[0]))
if len(message) > 1:
for line in message[1:]:
print(help_format % ("", "", line)) |
import os
import json
import tempfile
import webbrowser
import designer
from kivy.app import App
from kivy.uix.boxlayout import BoxLayout
from kivy.properties import ObjectProperty, ConfigParser, StringProperty
from kivy.uix.settings import Settings, InterfaceWithSidebar, MenuSidebar,\
ContentPanel, SettingsPanel
from designer.uix.settings import SettingList, SettingDict
from pygments.lexers.configs import IniLexer
class SpecContentPanel(ContentPanel):
def on_current_uid(self, *args):
result = super(SpecContentPanel, self).on_current_uid(*args)
if isinstance(self.current_panel, SpecCodeInput):
self.current_panel.load_spec()
return result
class SpecMenuSidebar(MenuSidebar):
def on_selected_uid(self, *args):
'''(internal) unselects any currently selected menu buttons, unless
they represent the current panel.
'''
for button in self.buttons_layout.children:
button.selected = button.uid == self.selected_uid
class SpecEditorInterface(InterfaceWithSidebar):
def open_buildozer_docs(self, *args):
webbrowser.open('http://buildozer.readthedocs.org')
class SpecSettingsPanel(SettingsPanel):
def get_value(self, section, key):
'''Return the value of the section/key from the :attr:`config`
ConfigParser instance. This function is used by :class:`SettingItem` to
get the value for a given section/key.
If you don't want to use a ConfigParser instance, you might want to
override this function.
'''
config = self.config
if not config:
return
if config.has_option(section, key):
return config.get(section, key)
else:
return ''
def set_value(self, section, key, value):
# some keys are not enabled by default on .spec. If the value is empty
# and this key is not on .spec, so we don't need to save it
if not value and not self.config.has_option(section, key):
return False
super(SpecSettingsPanel, self).set_value(section, key, value)
class SpecCodeInput(BoxLayout):
text_input = ObjectProperty(None)
| '''CodeInput with buildozer.spec text.
Instance of :class:`kivy.config.ObjectProperty` and defaults to None
'''
lbl_error = ObjectProperty(None)
'''(internal) Label to display errors.
Instance of :class:`kivy.config.ObjectProperty` and defaults to None
'''
spec_path = StringProperty( | '')
'''buildozer.spec path.
Instance of :class:`kivy.config.StringProperty` and defaults to ''
'''
__events__ = ('on_change', )
def __init__(self, **kwargs):
super(SpecCodeInput, self).__init__(**kwargs)
self.text_input.lexer = IniLexer()
def load_spec(self, *args):
'''Read the buildozer.spec and update the CodeInput
'''
self.lbl_error.color = [0, 0, 0, 0]
self.text_input.text = open(self.spec_path, 'r').read()
def _save_spec(self, *args):
'''Try to save the spec file. If there is a error, show the label.
If not, save the file and dispatch on_change
'''
designer = App.get_running_app().root
designer.project_watcher.stop()
f = tempfile.NamedTemporaryFile()
f.write(self.text_input.text)
try:
cfg = ConfigParser()
cfg.read(f.name)
except Exception:
self.lbl_error.color = [1, 0, 0, 1]
else:
spec = open(self.spec_path, 'w')
spec.write(self.text_input.text)
spec.close()
self.dispatch('on_change')
f.close()
designer.project_watcher.start_watching(
designer.project_loader.proj_dir)
def on_change(self, *args):
'''Event handler to dispatch a .spec modification
'''
pass
class BuildozerSpecEditor(Settings):
'''Subclass of :class:`kivy.uix.settings.Settings` responsible for
the UI editor of buildozer spec
'''
config_parser = ObjectProperty(None)
'''Config Parser for this class. Instance
of :class:`kivy.config.ConfigParser`
'''
def __init__(self, **kwargs):
super(BuildozerSpecEditor, self).__init__(**kwargs)
self.register_type('dict', SettingDict)
self.register_type('list', SettingList)
self.SPEC_PATH = ''
self.proj_dir = ''
self.config_parser = ConfigParser.get_configparser("buildozer_spec")
if self.config_parser is None:
self.config_parser = ConfigParser(name="buildozer_spec")
def load_settings(self, proj_dir):
'''This function loads project settings
'''
self.interface.menu.buttons_layout.clear_widgets()
self.proj_dir = proj_dir
self.SPEC_PATH = os.path.join(proj_dir, 'buildozer.spec')
_dir = os.path.dirname(designer.__file__)
_dir = os.path.split(_dir)[0]
self.config_parser.read(self.SPEC_PATH)
self.add_json_panel('Application', self.config_parser,
os.path.join(_dir, 'designer',
'settings', 'buildozer_spec_app.json'))
self.add_json_panel('Android', self.config_parser,
os.path.join(_dir, 'designer',
'settings', 'buildozer_spec_android.json'))
self.add_json_panel('iOS', self.config_parser,
os.path.join(_dir, 'designer',
'settings', 'buildozer_spec_ios.json'))
self.add_json_panel('Buildozer', self.config_parser,
os.path.join(_dir, 'designer',
'settings', 'buildozer_spec_buildozer.json'))
raw_spec = SpecCodeInput(spec_path=self.SPEC_PATH)
raw_spec.bind(on_change=self.on_spec_changed)
self.interface.add_panel(raw_spec, "buildozer.spec", raw_spec.uid)
menu = self.interface.menu
menu.selected_uid = menu.buttons_layout.children[-1].uid
def on_spec_changed(self, *args):
self.load_settings(self.proj_dir)
# force to show the last panel
menu = self.interface.menu
menu.selected_uid = menu.buttons_layout.children[0].uid
def create_json_panel(self, title, config, filename=None, data=None):
'''Override the original method to use the custom SpecSettingsPanel
'''
if filename is None and data is None:
raise Exception('You must specify either the filename or data')
if filename is not None:
with open(filename, 'r') as fd:
data = json.loads(fd.read())
else:
data = json.loads(data)
if type(data) != list:
raise ValueError('The first element must be a list')
panel = SpecSettingsPanel(title=title, settings=self, config=config)
for setting in data:
# determine the type and the class to use
if not 'type' in setting:
raise ValueError('One setting are missing the "type" element')
ttype = setting['type']
cls = self._types.get(ttype)
if cls is None:
raise ValueError(
'No class registered to handle the <%s> type' %
setting['type'])
# create a instance of the class, without the type attribute
del setting['type']
str_settings = {}
for key, item in setting.items():
str_settings[str(key)] = item
instance = cls(panel=panel, **str_settings)
# instance created, add to the panel
panel.add_widget(instance)
return panel
def on_config_change(self, *args):
designer = App.get_running_app().root
designer.project_watcher.stop()
self.config_parser.write()
super(BuildozerSpecEditor, self).on_config_change(*args)
designer.project_watcher.start_watching(
designer.project_loader.proj_dir)
|
from pyramid.view | import view_config
from pyramid.config import Configurator
@view_config(route_name='text', renderer='string')
def text(request):
return 'Hello, World!'
config = Configurator()
config.add_route('text', '/text')
config.scan()
app = config.make_wsgi_app | ()
|
iteral
def __init__ (self, value):
self.value = value
if is_a (value, VAR):
import pdb; pdb.set_trace()
def __repr__ (self):
return 'L%s' % (repr(self.value))
def __cmp__ (self, other):
if is_a (other, literal):
v = self.value
o = other.value
#return cmp ((v.kind,v.value), (o.kind,o.value))
return cmp (v, o)
else:
return -1
class constructor:
# matches a constructor
def __init__ (self, name, subs):
self.datatype, self.alt = name.split (':')
self.subs = subs
def __len__ (self):
# arity of this constructor
return len (self.subs)
def __repr__ (self):
return '(%s/%s %s)' % (self.datatype, self.alt, ' '.join ([repr(x) for x in self.subs]))
# bad match
class MatchError (Exception):
pass
class IncompleteMatch (Exception):
pass
FAIL = ['%%fail']
ERROR = ['%%match-error']
# The next step in this code is to try to optimize the generated tree, which should be a matter of
# using heuristics to pick which pattern out of several to begin with. This code always starts
# with the left-most pattern, and descends recursively; see first_pats_are() below.
class compiler:
def __init__ (self, context):
self.context = context
self.gensym_counter = 0
def gensym (self):
c = self.gensym_counter
self.gensym_counter += 1
return 'm%d' % (c,)
def compile (self, rules, vars):
# how many pattern args?
nrules = len (rules)
pats, result = rules[0]
npats = len (pats)
#vars = [ self.gensym() for x in range (npats) ]
for pats, result in rules[1:]:
# must have the same number of patterns in each
assert (len(pats) == npats)
rules0 = []
for pats, code in rules:
kinds = [self.kind (x) for x in pats]
rules0.append ((kinds, code))
return vars, self.match (vars, rules0, ERROR)
def kind (self, p):
if is_a (p, list) or is_a (p, tuple):
if is_a (p, list):
what = 'list'
else:
what = 'tuple'
if len(p) == 0:
# () -> (list:nil)
return constructor ('%s:nil' % what, [])
elif is_a (p[0], list) and p[0][0] == 'colon' and len(p[0]) == 3:
# a constructor
return constructor ('%s:%s' % (p[0][1], p[0][2]), [self.kind (x) for x in p[1:]])
else:
# (a b . c) => (list:cons ...)
# XXX create a metavariable for this dot
if p[0] == '.':
# cdr
return self.kind (p[1])
else:
return constructor ('%s:cons' % what, [self.kind (p[0]), self.kind (p[1:])])
elif is_a (p, VAR):
return variable (p)
else:
return literal (p)
def first_pats_are (self, rules, kind):
# are the first patterns in each rule of <kind>?
for pats, code in rules:
if not is_a (pats[0], kind):
return False
else:
return True
def match (self, vars, rules, default):
#print '-------- match -------------'
#pp ((vars, rules, default))
# the empty rule
if not vars:
if len(rules):
empty_pat, code = rules[0]
return code
else:
return default
# if every rule begins with a variable
# apply if every rule begins with a variable
if self.first_pats_are (rules, variable):
return self.variable_rule (vars, rules, default)
# if every rule is a constructor (i.e., no variables)
if self.first_pats_are (rules, constructor):
return self.constructor_rule (vars, rules, default)
# if every rule is a constant
if self.first_pats_are (rules, literal):
return self.constant_rule (vars, rules, default)
# we have a mixture of variables and constructors..
return self.mixture_rule (vars, rules, default)
def subst (self, var0, var1, code):
# this will record a subst to be applied during node building (nodes.py)
if var1 == '_':
# unless it's a wildcard, no need.
return code
elif is_a (code, list) and len(code) and code[0] == 'let_subst':
return ['let_subst', code[1] + [(var1, var0)], code[2]]
else:
return ['let_subst', [(var1, var0)], code]
def variable_rule (self, vars, rules, default):
# if every rule begins with a variable, we can remove that column
# from the set of patterns and substitute the var within each body.
var = vars[0]
vars = vars[1:]
rules0 = []
for pats, code in rules:
rules0.append ((pats[1:], self.subst (var, pats[0].name, code)))
return self.match (vars, rules0, default)
def fatbar (self, e1, e2):
if e1 == FAIL:
return e2
elif e2 == FAIL:
return e1
else:
return ['%%fatbar', e1, e2]
def get_arity (self, rules):
# given a set of polymorphic variant rules:
# 1) compute the constructor arity
# 2) verify that they're all the same
arity = len (rules[0][0][0])
for pats, code in rules[1:]:
if len(pats[0]) != arity:
raise MatchError ("arity mismatch in polymorphic variant pattern", rules)
return arity
def constructor_rule (self, vars, rules, default):
# Note: this rule is used for normal constructors *and* polymorphic variants.
# ok, group them by constructor (retaining the order within each constructor alt).
alts = {}
datatype = rules[0][0][0].datatype
if datatype != 'None':
dt = self.context.datatypes[datatype]
else:
# polymorphic variant
dt = None
for pats, code in rules:
alt = pats[0].alt
# XXX raise this as a real syntax error...
assert (pats[0].datatype == datatype)
if not alts.has_key (alt):
alts[alt] = [(pats, code)]
else:
alts[alt].append ((pats, code))
cases = []
if default != ERROR:
default0 = FAIL
else:
default0 = default
for alt, rules0 in alts.iteritems():
# new variables to stand for the fields of the constructor
if dt:
arity = dt.arity (alt)
else:
arity = self.get_arity (rules0)
vars0 = [self.gensym() for x in range (arity)]
wild = [True for x in vars0]
rules1 = []
for pats, code in rules0:
rul | es1.append ((pats[0].subs + pats[1:], code))
if len (pats[0].subs) != arity:
raise MatchError ("arity mismatch in variant pattern", rules0)
for i in rang | e (len (pats[0].subs)):
sub = pats[0].subs[i]
if not (is_a (sub, variable) and sub.name == '_'):
wild[i] = False
# if every pattern has a wildcard for this arg of the constructor,
# then use '_' rather than the symbol we generated.
vars1 = vars0[:]
for i in range (len (vars0)):
if wild[i]:
vars1[i] = '_'
cases.append (
[[['colon', None, alt]] + vars1, self.match (vars0 + vars[1:], rules1, default0)]
)
if dt:
if len(alts) < len (dt.alts):
# an incomplete vcase, stick in an else clause.
cases.append (['else', default0])
result = ['vcase', datatype, vars[0]] + cases
else:
# this will turn into 'pvcase' when the missing datatype is detected
result = ['vcase', vars[0]] + cases
if default != ERROR:
return self.fa |
er_corrupt_torrent: This event is emitted when a corrupt .torrent file in the watch folder is found.
The dictionary contains the name of the corrupt torrent file.
- new_version_available: This event is emitted when a new version of Tribler is available.
- tribler_started: An indicator that Tribler has completed the startup procedure and is ready to use.
- channel_discovered: An indicator that Tribler has discovered a new channel. The event contains the name,
description and dispersy community id of the discovered channel.
- torrent_discovered: An indicator that Tribler has discovered a new torrent. The event contains the infohash, name,
list of trackers, list of files with name and size, and the dispersy community id of the discovered torrent.
- torrent_removed_from_channel: An indicator that a torrent has been removed from a channel. | The event contains
the infohash and the dispersy id of the channel which contained the removed torrent.
- torrent_finished: A specific torrent has finished downloading. The event includes the infohash and name of the
torrent that has finished downloading.
- torrent_error: An error has occurred during the download process of a specific torrent. The event includes the
infohash and a readable string of the error message.
- tribler_exception: An exception has occ | urred in Tribler. The event includes a readable string of the error.
- market_ask: Tribler learned about a new ask in the market. The event includes information about the ask.
- market_bid: Tribler learned about a new bid in the market. The event includes information about the bid.
- market_ask_timeout: An ask has expired. The event includes information about the ask.
- market_bid_timeout: An bid has expired. The event includes information about the bid.
- market_transaction_complete: A transaction has been completed in the market. The event contains the transaction
that was completed.
- market_payment_received: We received a payment in the market. The events contains the payment information.
- market_payment_sent: We sent a payment in the market. The events contains the payment information.
- market_iom_input_required: The Internet-of-Money modules requires user input (like a password or challenge
response).
"""
def __init__(self, session):
resource.Resource.__init__(self)
self.session = session
self.events_requests = []
self.infohashes_sent = set()
self.channel_cids_sent = set()
self.session.add_observer(self.on_search_results_channels, SIGNAL_CHANNEL, [SIGNAL_ON_SEARCH_RESULTS])
self.session.add_observer(self.on_search_results_torrents, SIGNAL_TORRENT, [SIGNAL_ON_SEARCH_RESULTS])
self.session.add_observer(self.on_upgrader_started, NTFY_UPGRADER, [NTFY_STARTED])
self.session.add_observer(self.on_upgrader_finished, NTFY_UPGRADER, [NTFY_FINISHED])
self.session.add_observer(self.on_upgrader_tick, NTFY_UPGRADER_TICK, [NTFY_STARTED])
self.session.add_observer(self.on_watch_folder_corrupt_torrent,
NTFY_WATCH_FOLDER_CORRUPT_TORRENT, [NTFY_INSERT])
self.session.add_observer(self.on_new_version_available, NTFY_NEW_VERSION, [NTFY_INSERT])
self.session.add_observer(self.on_tribler_started, NTFY_TRIBLER, [NTFY_STARTED])
self.session.add_observer(self.on_channel_discovered, NTFY_CHANNEL, [NTFY_DISCOVERED])
self.session.add_observer(self.on_torrent_discovered, NTFY_TORRENT, [NTFY_DISCOVERED])
self.session.add_observer(self.on_torrent_removed_from_channel, NTFY_TORRENT, [NTFY_DELETE])
self.session.add_observer(self.on_torrent_finished, NTFY_TORRENT, [NTFY_FINISHED])
self.session.add_observer(self.on_torrent_error, NTFY_TORRENT, [NTFY_ERROR])
self.session.add_observer(self.on_market_ask, NTFY_MARKET_ON_ASK, [NTFY_UPDATE])
self.session.add_observer(self.on_market_bid, NTFY_MARKET_ON_BID, [NTFY_UPDATE])
self.session.add_observer(self.on_market_ask_timeout, NTFY_MARKET_ON_ASK_TIMEOUT, [NTFY_UPDATE])
self.session.add_observer(self.on_market_bid_timeout, NTFY_MARKET_ON_BID_TIMEOUT, [NTFY_UPDATE])
self.session.add_observer(self.on_market_transaction_complete,
NTFY_MARKET_ON_TRANSACTION_COMPLETE, [NTFY_UPDATE])
self.session.add_observer(self.on_market_payment_received, NTFY_MARKET_ON_PAYMENT_RECEIVED, [NTFY_UPDATE])
self.session.add_observer(self.on_market_payment_sent, NTFY_MARKET_ON_PAYMENT_SENT, [NTFY_UPDATE])
self.session.add_observer(self.on_resource_event, SIGNAL_RESOURCE_CHECK, [SIGNAL_LOW_SPACE])
self.session.add_observer(self.on_credit_minig_error, NTFY_CREDIT_MINING, [NTFY_ERROR])
self.session.add_observer(self.on_shutdown, NTFY_TRIBLER, [STATE_SHUTDOWN])
def write_data(self, message):
"""
Write data over the event socket if it's open.
"""
try:
message_str = json.dumps(message)
except UnicodeDecodeError:
# The message contains invalid characters; fix them
message_str = json.dumps(fix_unicode_dict(message))
if len(self.events_requests) == 0:
return
else:
[request.write(message_str + '\n') for request in self.events_requests]
def start_new_query(self):
self.infohashes_sent = set()
self.channel_cids_sent = set()
def on_search_results_channels(self, subject, changetype, objectID, results):
"""
Returns the channel search results over the events endpoint.
"""
query = ' '.join(results['keywords'])
for channel in results['result_list']:
channel_json = convert_db_channel_to_json(channel, include_rel_score=True)
if self.session.config.get_family_filter_enabled() and \
self.session.lm.category.xxx_filter.isXXX(channel_json['name']):
continue
if channel_json['dispersy_cid'] not in self.channel_cids_sent:
self.write_data({"type": "search_result_channel", "event": {"query": query, "result": channel_json}})
self.channel_cids_sent.add(channel_json['dispersy_cid'])
def on_search_results_torrents(self, subject, changetype, objectID, results):
"""
Returns the torrent search results over the events endpoint.
"""
query = ' '.join(results['keywords'])
for torrent in results['result_list']:
torrent_json = convert_search_torrent_to_json(torrent)
torrent_name = torrent_json['name']
torrent_json['relevance_score'] = torrent_json['relevance_score'] if 'relevance_score' in torrent_json \
else self.session.lm.torrent_db.relevance_score_remote_torrent(torrent_name)
if self.session.config.get_family_filter_enabled() and torrent_json['category'] == 'xxx':
continue
if 'infohash' in torrent_json and torrent_json['infohash'] not in self.infohashes_sent:
self.write_data({"type": "search_result_torrent", "event": {"query": query, "result": torrent_json}})
self.infohashes_sent.add(torrent_json['infohash'])
def on_upgrader_started(self, subject, changetype, objectID, *args):
self.write_data({"type": "upgrader_started"})
def on_upgrader_finished(self, subject, changetype, objectID, *args):
self.write_data({"type": "upgrader_finished"})
def on_upgrader_tick(self, subject, changetype, objectID, *args):
self.write_data({"type": "upgrader_tick", "event": {"text": args[0]}})
def on_watch_folder_corrupt_torrent(self, subject, changetype, objectID, *args):
self.write_data({"type": "watch_folder_corrupt_torrent", "event": {"name": args[0]}})
def on_new_version_available(self, subject, changetype, objectID, *args):
self.write_data({"type": "new_version_available", "event": {"version": args[0]}})
def on_tribler_started(self, subject, changetype, objectID, *args):
|
"""Tests for the template tests."""
import unittest
from grow.templates import tests
class BuiltinTestsTestCase(unittest.TestCase):
def test_subset_filter(self):
"""Provided value is a subset when has all the required values."""
value = ['banana', 'apple']
test_value = ['banana']
self.assertTrue(tests.is_subset_of(value, test_value))
def test_subset_filter_equal(self):
"""Provided value is a subset when equal."""
value = ['banana']
test_value = ['banana']
self.assertTrue(tests.is_subset_of(value, test_value))
def test_subset_filter_not(self):
"""Provided value is not a subset when missing values."""
value = ['banana']
test_value = ['banana', 'apple']
self.assertFalse(tests.is_subset_of(value, test_value))
| def test_subset_filter_none(self):
"""Provided value is a subset when both are blank."""
value = []
test_value = []
self.assertTrue(tests.is_subset_of(value, test_value))
def test_superset_filter(self):
"""Provided value is a superset when missing some of the values."""
value = ['banana']
test_value = ['banana', 'apple']
self.assertTrue(tests.is_superset_of(value, te | st_value))
def test_superset_filter_equal(self):
"""Provided value is a superset when equal."""
value = ['banana']
test_value = ['banana']
self.assertTrue(tests.is_superset_of(value, test_value))
def test_superset_filter_not(self):
"""Provided value is not a superset when has extra values."""
value = ['banana', 'apple']
test_value = ['banana']
self.assertFalse(tests.is_superset_of(value, test_value))
def test_superset_filter_none(self):
"""Provided value is a superset when both are blank."""
value = []
test_value = []
self.assertTrue(tests.is_superset_of(value, test_value))
if __name__ == '__main__':
unittest.main()
|
import pytest
from cfme.containers.provider import ContainersProvider
from cfme.markers.env_markers.provider import providers
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.providers import ProviderFilter
from cfme.utils.wait import wait_for
pytestmark = [
pytest.mark.usefixtures('setup_provider'),
pytest.mark.tier(1),
pytest.mark.provider(gen_func=providers,
filters=[ProviderFilter(classes=[ContainersProvider],
required_flags=['cockpit'])],
scope='function')]
@pytest.mark.uncollectif(lambda appliance: appliance.version < "5.9",
reason='Cockpit Feature is only available in 5.9 and greater')
@pytest.mark.parametrize('cockpit', [False, True], ids=['disabled', 'enabled'])
def test_cockpit_button_access(appliance, provider, cockpit, request):
""" The test verifies the existence of cockpit "Web Console"
button on each node, click the button if enabled, verify no errors are displayed.
"""
request.addfinalizer(lambda: appliance.server.settings.disable_server_roles('cockpit_ws'))
if cockpit:
appliance.server.settings.enable_server_roles('cockpit_ws')
wait_for(lambda: appliance.server_roles['cockpit_ws'] is True, delay=10, timeout=300)
elif not cockpit:
appliance.server.settings.disable_server_roles('cockpit_ws')
wait_for(lambda: appliance.server_roles['cockpit_ws'] is False, delay=10, timeout=300)
else:
pytest.skip("Cockpit should be either enabled or disabled.")
collection = appliance.collections.container_nodes
nodes = collection.all()
for node in nodes:
view = (navigate_to(node, 'Details') | if node else
pytest.skip("Could not determine node of {}".format(provider.name)))
if cockpit:
appliance.server.browser.refresh()
assert not view.toolbar.web_console.disabled
view.toolbar.web_console.click()
webconsole = node.vm_console
webconsole.switch_to_console()
assert not view.is_displayed
assert node.name in appliance.server.browser.url
webconsole.close_console_window()
| assert view.is_displayed
view.flash.assert_no_error()
else:
appliance.server.browser.refresh()
assert view.toolbar.web_console.disabled
|
#!/ | usr/bin/env python
import os
import sys
if __name__ == "__main__":
# testing environment
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "blastplus.test_settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sy | s.argv)
|
exec_command --- execute command in a specified directory and
in the modified environment.
find_executable --- locate a command using info from environment
variable PATH. Equivalent to posix `which`
command.
Author: Pearu Peterson <pearu@cens.ioc.ee>
Created: 11 January 2003
Requires: Python 2.x
Successfully tested on:
======== ============ =================================================
os.name sys.platform comments
======== ============ =================================================
posix linux2 Debian (sid) Linux, Python 2.1.3+, 2.2.3+, 2.3.3
PyCrust 0.9.3, Idle 1.0.2
posix linux2 Red Hat 9 Linux, Python 2.1.3, 2.2.2, 2.3.2
posix sunos5 SunOS 5.9, Python 2.2, 2.3.2
posix darwin Darwin 7.2.0, Python 2.3
nt win32 Windows Me
Python 2.3(EE), Idle 1.0, PyCrust 0.7.2
Python 2.1.1 Idle 0.8
nt win32 Windows 98, Python 2.1.1. Idle 0.8
nt win32 Cygwin 98-4.10, Python 2.1.1(MSC) - echo tests
fail i.e. redefining environment variables may
not work. FIXED: don't use cygwin echo!
Comment: also `cmd /c echo` will not work
but redefining environment variables do work.
posix cygwin Cygwin 98-4.10, Python 2.3.3(cygming special)
nt win32 Windows XP, Python 2.3.3
======== ============ =================================================
Known bugs:
* Tests, that send messages to stderr, fail when executed from MSYS prompt
because the messages are lost at some point.
"""
from __future__ import division, absolute_import, print_function
__all__ = ['exec_command', 'find_executable']
import os
import sys
import subprocess
from numpy.distutils.misc_util import is_sequence, make_temp_file
from numpy.distutils import log
def temp_file_name():
fo, name = make_temp_file()
fo.close()
return name
def get_pythonexe():
pythonexe = sys.executable
if os.name in ['nt', 'dos']:
fdir, fn = os.path.split(pythonexe)
fn = fn.upper().replace('PYTHONW', 'PYTHON')
pythonexe = os.path.join(fdir, fn)
assert os.path.isfile(pythonexe), '%r is not a file' % (pythonexe,)
return pythonexe
def find_executable(exe, path=None, _cache={}):
"""Return full path of a executable or None.
Symbolic links are not followed.
"""
key = exe, path
try:
return _cache[key]
except KeyError:
pass
log.debug('find_executable(%r)' % exe)
orig_exe = exe
if path is None:
path = os.environ.get('PATH', os.defpath)
if os.name=='posix':
realpath = os.path.realpath
else:
realpath = lambda a:a
if exe.startswith('"'):
exe = exe[1:-1]
suffixes = ['']
if os.name in ['nt', 'dos', 'os2']:
fn, ext = os.path.splitext(exe)
extra_suffixes = ['.exe', '.com', '.bat']
if ext.lower() not in extra_suffixes:
suffixes = extra_suffixes
if os.path.isabs(exe):
paths = ['']
else:
paths = [ os.path.abspath(p) for p in path.split(os.pathsep) ]
for path in paths:
fn = os.path.join(path, exe)
for s in suffixes:
f_ext = fn+s
if not os.path.islink(f_ext):
f_ext = realpath(f_ext)
if os.path.isfile(f_ext) and os.access(f_ext, os.X_OK):
log.info('Found executable %s' % f_ext)
_cache[key] = f_ext
return f_ext
log.warn('Could not locate executable %s' % orig_exe)
return None
############################################################
def _preserve_environment( names ):
log.debug('_preserve_environment(%r)' % (names))
env = {}
for name in names:
env[name] = os.environ.get(name)
return env
def _update_environment( **env ):
log.debug('_update_environment(...)')
for name, value in env.items():
os.environ[name] = value or ''
def _supports_fileno(stream):
"""
Returns True if 'stream' supports the file descriptor and allows fileno().
"""
if hasattr(stream, 'fileno'):
try:
stream.fileno()
return True
except IOError:
return False
else:
return False
def exec_command(command, execute_in='', use_shell=None, use_tee=None,
_with_python = 1, **env ):
"""
Return (status,output) of executed command.
Parameters
----------
command : str
A concatenated string of executable and arguments.
execute_in : str
Before running command ``cd execute_in`` and after ``cd -``.
use_shell : {bool, None}, optional
If True, execute ``sh -c command``. Default None (True)
use_tee : {bool, None}, optional
If True use tee. Default None (True)
Returns
-------
res : str
Both stdout and stderr messages.
Notes
-----
On NT, DOS systems the returned status is correct for external commands.
Wild cards will not work for non-posix systems or when use_shell=0.
"""
log.debug('exec_command(%r,%s)' % (command,\
','.join(['%s=%r'%kv for kv in env.items()])))
if use_tee is None:
use_tee = os.name=='posix'
if use_shell is None:
use_shell = os.name=='posix'
execute_in = os.path.abspath(execute_in)
oldcwd = os.path.abspath(os.getcwd())
if __name__[-12:] == 'exec_command':
exec_dir = os.path.dirname(os.path.abspath(__file__))
elif os.path.isfile('exec_command.py'):
exec_dir = os.path.abspath('.')
else:
exec_dir = os.path.abspath(sys.argv[0])
if os.path.isfile(exec_dir):
exec_dir = os.path.dirname(exec_dir)
if oldcwd!=execute_in:
os.chdir(execute_in)
log.debug('New cwd: %s' % execute_in)
else:
log.debug('Retaining cwd: %s' % oldcwd)
oldenv = _preserve_environment( list(env.keys()) )
_update_environment( **env )
try:
st = _exec_command(command,
use_shell=use_shell,
use_tee=use_tee,
**env)
finally:
if oldcwd!=execute_in:
os.chdir(oldcwd)
log.debug('Restored cwd to %s' % oldcwd)
_update_environment(**oldenv)
return st
def _exec_command(command, use_shell=None, use_tee = None, **env):
"""
Internal workhorse for exec_command().
"""
if use_shell is None:
use_shell = os.name=='posix'
if use_tee is None:
use_tee = os.name=='posix'
if os.name == 'posix' and use_shell:
# On POSIX, subprocess always uses /bin/sh, override
sh = os.environ.get('SHELL', '/bin/sh')
if is_sequence(command):
command = [sh, '-c', ' '.join(command)]
else:
command = [sh, '-c', command]
use_shell = False
elif os.name == 'nt' and is_sequence(command):
# On Windows, join the string for CreateProcess() ourselves as
# subprocess does it a bit differently
command = ' '.join(_quote_arg(arg) for arg in command)
# Inherit environment by default
env = env or None
try:
proc = subprocess.Popen(command, shell=use_shell, env=env,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True)
except Environm | entError:
# Return 127, as os.spawn*() and /bin/sh do
return 127, ''
text, err = proc.communicate()
# Another historical oddity
if text[-1:] == '\n':
text = text[:-1]
if use_tee and text:
print(text)
return proc.returncode, text
def _quote_arg(arg):
"" | "
Quote the argument for safe use in a shell command line.
"""
# If there is a quote in the string, assume relevants parts of the
# string are already quoted (e.g. '-I"C:\\Program Files\\..."')
if '"' not in |
ral configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'EbookLib'
copyright = u'2014, Aleksandar Erkalovic'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.16'
# The full version, including alpha/beta/rc tags.
release = '0.16'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'EbookLibdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'EbookLib.tex', u'EbookLib Documentation',
u'Aleksandar Erkalovic', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'ebooklib', u'EbookLib Documentation',
[u'Aleksandar Erkalovic'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for T | exin | fo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'EbookLib', u'EbookLib Documentation',
u'Aleksandar Erkalovic', 'EbookLib', 'Python library for EPUB and Kindle formats.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'EbookLib'
epub_author = u'Aleksandar Erkalovic'
epub_publisher = u'Aleksandar Erkalovic'
epub_copyright = u'2014, Aleksandar Erkalovic'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of |
#common sets | up the conduit environment
from common import *
#setup test
test = SimpleSyncTest()
#Setup the key to sync
gconf = test.get_dataprovider("GConfTwoWay")
gconf.module.whitelist = ['/apps/metacity/general/num_workspaces']
folder = test.get_dataprovider("TestFolderTwoWay")
test.prepare(gconf, folder)
test.set_two_way_policy({"conflict":"ask","deleted":"ask"})
test.set_two_way_sync(True)
a = test.get_source_count()
b = test.get_sink_count()
ok("Got items to sync (%s,%s)" % (a,b), a == 1 and b == 0) |
for i in (1,2,3,4):
if i > 1:
#Now modify the file
f = folder.module.get(
folder.module.get_all()[0]
)
f._set_file_mtime(datetime.datetime(2008,1,i))
a,b = test.sync()
aborted,errored,conflicted = test.get_sync_result()
ok("Sync #%s: Completed without conflicts" % i, aborted == False and errored == False and conflicted == False)
ok("Sync #%s: All items (%s,%s)" % (i,a,b), a == b and a == 1)
finished()
|
import re
from vogen.voparser import VoParser, VoVariable
class AS3VoParser( VoParser ):
def parse( self, input_string, verbose ):
self.input_string = input_string
self.verbose = verbose
class_names = re.findall(r"class (\w+)", self.input_string );
#Find the class_name
if len(class_names) > 0 :
if verbose :
print "Found Class"+class_names[0]
class_name = class_names[0]
else:
print "Couldn't find class_name in Source File"
return False
#Find the properties
variables = list()
for variable in re.findall(r"private var ([\w\[\]]+) ?: ?(\w+)", self.input_string ):
vo_variable = VoVariable( variable[0], variable[1] )
variables.append( vo_variable )
if self.verbose :
print "Found Property: " + vo_variable.__str__()
if len( variables ) <= 0 :
print "Couldn't find any variables in Source File, can't build a vo"
return False
return self.build_class( variables, class_name)
def build_class(self, variables, cl | ass_name ):
return_text = self.input_string
#Rename all existing variables
for variable in variables :
return_text = return_text.replace( variable.variable_name, "_"+variable.variable_name )
#Strip the last bracket
return_text = return_text.rstrip('}')
return_text = return_text.rstrip('}\n')
#Print the Constructor
return_text += "\n\t\tpublic function " + class_name + "( "
for variable in va | riables :
return_text += "\n\t\t\t" + variable.variable_name + " : " + variable.variable_type + " ,"
return_text = return_text.rstrip(', ')
return_text += "\n\t\t){"
for variable in variables :
return_text += "\n\t\t\tthis._" + variable.variable_name + " = " + variable.variable_name + ";"
return_text += "\n\t\t}"
#A bit of White Space
return_text += "\n"
#Print the Getters
for variable in variables :
return_text += "\n\t\tpublic function get " + variable.variable_name + "() : " + variable.variable_type + "{"
return_text += "\n\t\t\treturn this._" + variable.variable_name + ";"
return_text += "\n\t\t}"
return_text += "\n"
return_text += "\n\t}"
return_text += "\n}"
return return_text |
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
from professor.models import Professor, DiaSemana, DisponibilidadeAula
from professor.models import Professor, DiaSemana, DisponibilidadeAula
class SimpleTest(TestCase) | :
def adicionaDisponibilida | de(self):
p = Professor.objects.get(id=1)
p.informarDisponibilidade("qua", "14:30")
p = Professor.objects.get(id=1)
result = p.getDisponibilidadeAulas()
self.assertEqual(result[0].hora, "14:30")
self.assertEqual(result[0].diaSemana.nome_curto, "qua")
self.assertEqual(result[0].diaSemana.nome_curto, "quar") |
from pikka_bird_collector.parsers.table import Table as Parser
from .base_port_command import BasePortCommand, Base
class Mysql(BasePortCommand):
"""
Collector for MySQL (https://www.mysql.com/).
The collector is enabled whenever non-empty settings are passed.
Multiple instances running on the same box are supported; just specify
each port within settings.
By default, core status, master status, slave status, and slave hosts
are gathered. Optionally, variables can be gathered.
Because MySQL metrics are inconsistent in their representation of
booleans (e.g. `ON`, `YES`, `Yes`) and to minimise payload size and
downstream storage, all values are remapped if they match these. This
probably won't cause you problems, but if encounter a string which is no
longer a string, this is probably why. :)
DEPENDENCIES:
mysql
Available in PATH.
SETTINGS:
minimal:
{
3306: None}
supported:
{
3306: {
'user': "USER",
'password': "PASSWORD",
'collect': {
'master_status': False,
'slave_status': False,
'slave_hosts': False,
'variables': True}}}
"""
COLLECT_SETTING_DEFAULTS = {
'master_status': True,
'slave_hosts': True,
'slave_status': True,
'variables': False}
CMD_SHOW_MASTER_STATUS = 'SHOW MASTER STATUS'
CMD_SHOW_SLAVE_HOSTS = 'SHOW SLAVE HOSTS'
CMD_SHOW_SLAVE_STATUS = 'SHOW SLAVE STATUS'
CMD_SHOW_STATUS = 'SHOW /*!50002 GLOBAL */ STATUS'
CMD_SHOW_VARIABLES = 'SHOW VARIABLES'
PARSE_BOOLS = { # the stringy booleans are inconsistent
'ON': True,
'OFF': False,
'YES': True,
'NO': False,
'Yes': True,
'No': False}
@staticmethod
def command_tool(port, settings, command):
settings = settings or {}
c = ['mysql',
'--host', '127.0.0.1', # socket not (yet) supported
'--port', port,
'--execute', command,
'--batch',
'--raw',
'--column-names']
|
if settings.get('user'):
c.append('--user=%s' % settings['user'])
if settings.get('password'):
c.append('--password=%s' % settings['password'])
return c
def collect_port(self, po | rt, settings):
metrics = {}
o = self.command_output(port, settings, self.CMD_SHOW_STATUS)
parser = Parser(
converter_key=Base.parse_str_setting_key,
converter_value=Mysql.__parse_str_setting_value)
ms = parser.parse(o)
if len(ms):
metrics['status'] = ms
else:
return metrics # service down; give up
if self.collect_setting('master_status', settings):
o = self.command_output(port, settings, self.CMD_SHOW_MASTER_STATUS)
parser = Parser(
converter_key=Base.parse_str_setting_key,
converter_value=Mysql.__parse_str_setting_value,
tag_header_col='file')
ms = parser.parse(o)
if len(ms):
metrics['master_status'] = ms
if self.collect_setting('slave_status', settings):
o = self.command_output(port, settings, self.CMD_SHOW_SLAVE_STATUS)
parser = Parser(
converter_key=Base.parse_str_setting_key,
converter_value=Mysql.__parse_str_setting_value,
transpose=True)
ms = parser.parse(o)
if len(ms):
metrics['slave_status'] = ms
if self.collect_setting('slave_hosts', settings):
o = self.command_output(port, settings, self.CMD_SHOW_SLAVE_HOSTS)
parser = Parser(
converter_key=Base.parse_str_setting_key,
converter_value=Mysql.__parse_str_setting_value,
tag_header_col='server_id')
ms = parser.parse(o)
if len(ms):
metrics['slave_hosts'] = ms
if self.collect_setting('variables', settings):
o = self.command_output(port, settings, self.CMD_SHOW_VARIABLES)
parser = Parser(
converter_key=Base.parse_str_setting_key,
converter_value=Mysql.__parse_str_setting_value)
ms = parser.parse(o)
if len(ms):
metrics['variables'] = ms
return metrics
@staticmethod
def __parse_str_setting_value(value):
v = Base.parse_str_setting_value(value)
if v in Mysql.PARSE_BOOLS:
v = Mysql.PARSE_BOOLS[v]
return v
|
import itertools
import numpy as np
import pandas as pd
import pytest
from numpy.testing import assert_allclose
from pvlib import atmosphere
from pvlib import solarposition
latitude, longitude, tz, altitude = 32.2, -111, 'US/Arizona', 700
times = pd.date_range(start='20140626', end='20140626', freq='6h', tz=tz)
ephem_data = solarposition.get_solarposition(times, latitude, longitude)
# need to add physical tests instead of just functional tests
def test_pres2alt():
atm | osphere.pres2alt(100000)
def test_alt2press():
atmosphere.pres2alt(1000)
@pytest.mark.parametrize("model",
['simple', 'kasten1966', 'youngirvine1967', 'kastenyoung1989',
'gueymard1993', 'young1994', 'pickering2002'])
def test_airmass(model):
out = atmosphere.relativeairmass(ephem_data['zenith'], model)
assert isinstance(out, pd.Series)
out | = atmosphere.relativeairmass(ephem_data['zenith'].values, model)
assert isinstance(out, np.ndarray)
def test_airmass_scalar():
assert not np.isnan(atmosphere.relativeairmass(10))
def test_airmass_scalar_nan():
assert np.isnan(atmosphere.relativeairmass(100))
def test_airmass_invalid():
with pytest.raises(ValueError):
atmosphere.relativeairmass(ephem_data['zenith'], 'invalid')
def test_absoluteairmass():
relative_am = atmosphere.relativeairmass(ephem_data['zenith'], 'simple')
atmosphere.absoluteairmass(relative_am)
atmosphere.absoluteairmass(relative_am, pressure=100000)
def test_absoluteairmass_numeric():
atmosphere.absoluteairmass(2)
def test_absoluteairmass_nan():
np.testing.assert_equal(np.nan, atmosphere.absoluteairmass(np.nan))
def test_gueymard94_pw():
temp_air = np.array([0, 20, 40])
relative_humidity = np.array([0, 30, 100])
temps_humids = np.array(
list(itertools.product(temp_air, relative_humidity)))
pws = atmosphere.gueymard94_pw(temps_humids[:, 0], temps_humids[:, 1])
expected = np.array(
[ 0.1 , 0.33702061, 1.12340202, 0.1 ,
1.12040963, 3.73469877, 0.1 , 3.44859767, 11.49532557])
assert_allclose(pws, expected, atol=0.01)
@pytest.mark.parametrize("module_type,expect", [
('cdte', np.array(
[[ 0.9905102 , 0.9764032 , 0.93975028],
[ 1.02928735, 1.01881074, 0.98578821],
[ 1.04750335, 1.03814456, 1.00623986]])),
('monosi', np.array(
[[ 0.9776977 , 1.02043409, 1.03574032],
[ 0.98630905, 1.03055092, 1.04736262],
[ 0.98828494, 1.03299036, 1.05026561]])),
('polysi', np.array(
[[ 0.9770408 , 1.01705849, 1.02613202],
[ 0.98992828, 1.03173953, 1.04260662],
[ 0.99352435, 1.03588785, 1.04730718]])),
('cigs', np.array(
[[ 0.9745919 , 1.02821696, 1.05067895],
[ 0.97529378, 1.02967497, 1.05289307],
[ 0.97269159, 1.02730558, 1.05075651]])),
('asi', np.array(
[[ 1.0555275 , 0.87707583, 0.72243772],
[ 1.11225204, 0.93665901, 0.78487953],
[ 1.14555295, 0.97084011, 0.81994083]]))
])
def test_first_solar_spectral_correction(module_type, expect):
ams = np.array([1, 3, 5])
pws = np.array([1, 3, 5])
ams, pws = np.meshgrid(ams, pws)
out = atmosphere.first_solar_spectral_correction(pws, ams, module_type)
assert_allclose(out, expect, atol=0.001)
def test_first_solar_spectral_correction_supplied():
# use the cdte coeffs
coeffs = (0.87102, -0.040543, -0.00929202, 0.10052, 0.073062, -0.0034187)
out = atmosphere.first_solar_spectral_correction(1, 1, coefficients=coeffs)
expected = 0.99134828
assert_allclose(out, expected, atol=1e-3)
def test_first_solar_spectral_correction_ambiguous():
with pytest.raises(TypeError):
atmosphere.first_solar_spectral_correction(1, 1)
def test_kasten96_lt():
"""Test Linke turbidity factor calculated from AOD, Pwat and AM"""
amp = np.array([1, 3, 5])
pwat = np.array([0, 2.5, 5])
aod_bb = np.array([0, 0.1, 1])
lt_expected = np.array(
[[[1.3802, 2.4102, 11.6802],
[1.16303976, 2.37303976, 13.26303976],
[1.12101907, 2.51101907, 15.02101907]],
[[2.95546945, 3.98546945, 13.25546945],
[2.17435443, 3.38435443, 14.27435443],
[1.99821967, 3.38821967, 15.89821967]],
[[3.37410769, 4.40410769, 13.67410769],
[2.44311797, 3.65311797, 14.54311797],
[2.23134152, 3.62134152, 16.13134152]]]
)
lt = atmosphere.kasten96_lt(*np.meshgrid(amp, pwat, aod_bb))
assert np.allclose(lt, lt_expected, 1e-3)
return lt
def test_angstrom_aod():
"""Test Angstrom turbidity model functions."""
aod550 = 0.15
aod1240 = 0.05
alpha = atmosphere.angstrom_alpha(aod550, 550, aod1240, 1240)
assert np.isclose(alpha, 1.3513924317859232)
aod700 = atmosphere.angstrom_aod_at_lambda(aod550, 550, alpha)
assert np.isclose(aod700, 0.10828110997681031)
def test_bird_hulstrom80_aod_bb():
"""Test Bird_Hulstrom broadband AOD."""
aod380, aod500 = 0.22072480948195175, 0.1614279181106312
bird_hulstrom = atmosphere.bird_hulstrom80_aod_bb(aod380, aod500)
assert np.isclose(0.11738229553812768, bird_hulstrom)
|
import m | atplotlib.pyplot as plt
import numpy as np
def logHist(X, N=30,fig=None, noclear=False, pdf=False, **kyw | ds):
'''
Plot logarithmic histogram or probability density function from
sampled data.
Args:
X (numpy.ndarray): 1-D array of sampled values
N (Optional[int]): Number of bins (default 30)
fig (Optional[int]): Figure number (default None)
noclear (Optioanl[bool]): Clear figure (default False)
pdf (Optional[bool]): If True normalize by bin width (default False)
and display as curve instead of bar chart.
Note: results are always normalized by number of samples
**kywds: Arbitrary keyword arguments passed to matplotlib.pyplot.bar
(or matplotlib.pyplot.semilogx if pdf is True)
Returns:
x (ndarray): abscissa values of frequencies
n (ndarray): (normalized) frequency values
'''
x = np.logspace(np.log10(np.min(X)),np.log10(np.max(X)),N+1)
n,x = np.histogram(X,bins=x)
n = n/float(X.size)
plt.figure(fig)
if not noclear: plt.clf()
if pdf:
n /= np.diff(x)
x = x[:-1]+np.diff(x)/2
plt.semilogx(x,n,**kywds)
else:
plt.bar(x[:len(x)-1],n,width=np.diff(x),**kywds)
a = plt.gca()
a.set_xlim(10.**np.floor(np.log10(np.min(X))),10.**np.ceil(np.log10(np.max(X))))
a.set_xscale('log')
plt.axis()
return x,n
|
"""
Create quiz TimeSpecialCase for all students in a particular lab/tutorial section.
Usage will be like:
./manage.py special_case_tutorial 2020su-cmpt-120-d1 q1 D101 '2021-10-07T09:30' '2021-10-07T10:30'
"""
import datetime
from django.core.management.base import BaseCommand
from django.db im | port transaction
from iso8601 import iso8601
from coredata.models import CourseOffering, Member
from quizzes.models im | port Quiz, TimeSpecialCase
def parse_datetime(s: str) -> datetime.datetime:
return iso8601.parse_date(s).replace(tzinfo=None)
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('offering_slug', type=str, help='CourseOffering slug')
parser.add_argument('activity_slug', type=str, help='the slug of the Activity with the quiz')
parser.add_argument('section', type=str, help='lab/tutorial section to modify')
parser.add_argument('start_time', type=parse_datetime, help='start time for this section')
parser.add_argument('end_time', type=parse_datetime, help='end time for this section')
def handle(self, *args, **options):
offering_slug = options['offering_slug']
activity_slug = options['activity_slug']
section = options['section']
start_time = options['start_time']
end_time = options['end_time']
offering = CourseOffering.objects.get(slug=offering_slug)
quiz = Quiz.objects.get(activity__slug=activity_slug, activity__offering=offering)
members = Member.objects.filter(offering=offering, role='STUD', labtut_section=section)
with transaction.atomic():
for m in members:
TimeSpecialCase.objects.update_or_create(
quiz=quiz, student=m,
defaults={'start': start_time, 'end': end_time}
)
|
# -*- coding: utf-8 -*-
from django import forms
from django.conf import settings
from django.utils.safestring import mark_safe
from captcha import client
class ReCaptcha(forms.widgets.Widget):
if getattr(settings, "RECAPTCHA_NOCAPTCHA", False):
recaptcha_response_name = 'g-recaptcha-response'
recaptcha_challenge_name = 'g-recaptcha-response'
else:
recaptcha_challenge_name = 'recaptcha_challenge_field'
recaptcha_response_name = 'recaptcha_response_field'
def __init__(self, public_key=None, use_ssl=None, attrs={}, *args,
**kwargs):
self.public_key = public_key if public_key else \
settings | .RECAPTCHA_PUBLIC_KEY
self.use_ssl = use_ssl if use_ssl is not None else getattr(
settings, 'RECAPTCHA_USE_SSL', False)
self.js_attrs = attrs
super(ReCaptcha, self).__init__(*args, **kwargs)
def render(self, name, value, attrs=None):
return mark_safe(u'%s' % client.displayhtml(
self.public_key,
self.js_attrs, use_ssl=self.use_ssl))
def value_from_datadict(se | lf, data, files, name):
return [
data.get(self.recaptcha_challenge_name, None),
data.get(self.recaptcha_response_name, None)
]
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import numpy as np
import tvm
from tvm import relay
from tvm.contrib.nvcc import have_fp16
def test_basic_build():
tgt = "llvm"
ctx = tvm.cpu()
# func
a = relay.var("a", dtype="float32", shape=(16, 8))
b = relay.var("b", dtype="float32", shape=(8, 8))
c = relay.var("c", dtype="float32", shape=(16, 8))
x = relay.nn.dense(a, b)
y = relay.nn.relu(x)
z = y + c
func = relay.Function([a, b, c], z)
A = tvm.nd.array(np.random.uniform(-1, 1, (16, 8)).astype("float32"), ctx=ctx)
B = tvm.nd.array(np.random.uniform(-1, 1, (8, 8)).astype("float32"), ctx=ctx)
C = tvm.nd.array(np.random.uniform(-1, 1, (16, 8)).astype("float32"), ctx=ctx)
params = {
"b" : B,
"c" : C
}
# build
targets = {
tvm.expr.IntImm("int32", ctx.device_type): tgt
}
g_json, mmod, params = relay.build(relay.Module.from_expr(func), targets, "llvm", params=params)
# test
rt = tvm.contrib.graph_runtime.create(g_json, mmod, ctx)
rt.set_input("a", A)
rt.load_params(relay.save_param_dict(params))
rt.run()
out = rt.get_output(0)
np.testing.assert_allclose(out.asnumpy(), np.maximum(np.dot(A.asnumpy(),
B.asnumpy().T),
0) + C.asnumpy(),
atol=1e-5, rtol=1e-5)
de | f test_fp16_build():
dtype = "float16"
if not tvm.module.enabled("cuda") or not tvm.gpu(0).exist:
print("skip because cuda is not enabled.")
return
ctx = tvm.gpu(0)
| if dtype == "float16" and not have_fp16(ctx.compute_version):
print("skip because gpu does not support fp16")
return
x = relay.var("x", dtype=dtype, shape=(4, 4))
y = relay.var("y", dtype=dtype, shape=(4, 4))
z = x + y
func = relay.Function([x, y], z)
X = tvm.nd.array(np.random.uniform(-1, 1, (4, 4)).astype(dtype), ctx=ctx)
Y = tvm.nd.array(np.random.uniform(-1, 1, (4, 4)).astype(dtype), ctx=ctx)
params = {
"x": X,
"y": Y,
}
# build
g_json, mmod, params = relay.build(func, "cuda", params=params)
# test
rt = tvm.contrib.graph_runtime.create(g_json, mmod, ctx)
rt.load_params(relay.save_param_dict(params))
rt.run()
out = rt.get_output(0)
np.testing.assert_allclose(out.asnumpy(), X.asnumpy() + Y.asnumpy(),
atol=1e-5, rtol=1e-5)
def test_fp16_conversion():
def check_conversion(tgt, ctx):
if not tvm.module.enabled(tgt):
print("skip because {} is not enabled.".format(tgt))
return
elif tgt == "cuda" and ctx.exist and not have_fp16(ctx.compute_version):
print("skip because gpu does not support fp16")
return
n = 10
for (src, dst) in [('float32', 'float16'), ('float16', 'float32')]:
x = relay.var("x", relay.TensorType((n,), src))
y = x.astype(dst)
func = relay.Function([x], y)
# init input
X = tvm.nd.array(n * np.random.randn(n).astype(src) - n / 2)
# build
with relay.build_config(opt_level=1):
g_json, mmod, params = relay.build(relay.Module.from_expr(func), tgt)
# test
rt = tvm.contrib.graph_runtime.create(g_json, mmod, ctx)
rt.set_input("x", X)
rt.run()
out = rt.get_output(0)
np.testing.assert_allclose(out.asnumpy(), X.asnumpy().astype(dst),
atol=1e-5, rtol=1e-5)
for target, ctx in [('llvm', tvm.cpu()), ('cuda', tvm.gpu())]:
check_conversion(target, ctx)
if __name__ == "__main__":
test_basic_build()
test_fp16_build()
test_fp16_conversion()
|
"""Support for Hangouts."""
import logging
from hangups.auth import GoogleAuthError
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components.conversation.util import create_matcher
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
from homeassistant.core import HomeAssistant
from homeassistant.helpers import dispatcher, intent
import homeassistant.helpers.config_validation as cv
# We need an import from .config_flow, without it .config_flow is never loaded.
from .config_flow import HangoutsFlowHandler # noqa: F401
from .const import (
CONF_BOT,
CONF_DEFAULT_CONVERSATIONS,
CONF_ERROR_SUPPRESSED_CONVERSATIONS,
CONF_INTENTS,
CONF_MATCHERS,
CONF_REFRESH_TOKEN,
CONF_SENTENCES,
DOMAIN,
EVENT_HANGOUTS_CONNECTED,
EVENT_HANGOUTS_CONVERSATIONS_CHANGED,
EVENT_HANGOUTS_CONVERSATIONS_RESOLVED,
INTENT_HELP,
INTENT_SCHEMA,
MESSAGE_SCHEMA,
SERVICE_RECONNECT,
SERVICE_SEND_MESSAGE,
SERVICE_UPDATE,
TARGETS_SCHEMA,
)
from .hangouts_bot import HangoutsBot
from .intents import HelpIntent
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Optional(CONF_INTENTS, default={}): vol.Schema(
{cv.string: INTENT_SCHEMA}
),
vol.Optional(CONF_DEFAULT_CONVERSATIONS, default=[]): [TARGETS_SCHEMA],
vol.Optional(CONF_ERROR_SUPPRESSED_CONVERSATIONS, default=[]): [
TARGETS_SCHEMA
],
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Set up the Hangouts bot component."""
if (config := config.get(DOMAIN)) is None:
hass.data[DOMAIN] = {
CONF_INTENTS: {},
CONF_DEFAULT_CONVERSATIONS: [],
CONF_ERROR_SUPPRESSED_CONVERSATIONS: [],
}
return True
hass.data[DOMAIN] = {
CONF_INTENTS: config[CONF_INTENTS],
CONF_DEFAULT_CONVERSATIONS: config[CONF_DEFAULT_CONVERSATIONS],
CONF_ERROR_SUPPRESSED_CONVERSATIONS: config[
CONF_ERROR_SUPPRESSED_CONVERSATIONS
],
}
if (
hass.data[DOMAIN][CONF_INTENTS]
and INTENT_HELP not in hass.data[DOMAIN][CONF_INTENTS]
):
hass.data[DOMAIN][CONF_INTENTS][INTENT_HELP] = {CONF_SENTENCES: ["HELP"]}
for data in hass.data[DOMAIN][CONF_INTENTS].values():
matchers = []
for sentence in data[CONF_SENTENCES]:
matchers.append(create_matcher(sentence))
data[CONF_MATCHERS] = matchers
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}
)
)
return True
async def async_setup_entry(hass: HomeAssistant, config: ConfigEntry) -> bool:
"""Set up a config entry."""
try:
bot = HangoutsBot(
hass,
config.data.get(CONF_REFRESH_ | TOKEN),
hass.data[DOMAIN][CONF_INTENTS],
| hass.data[DOMAIN][CONF_DEFAULT_CONVERSATIONS],
hass.data[DOMAIN][CONF_ERROR_SUPPRESSED_CONVERSATIONS],
)
hass.data[DOMAIN][CONF_BOT] = bot
except GoogleAuthError as exception:
_LOGGER.error("Hangouts failed to log in: %s", str(exception))
return False
dispatcher.async_dispatcher_connect(
hass, EVENT_HANGOUTS_CONNECTED, bot.async_handle_update_users_and_conversations
)
dispatcher.async_dispatcher_connect(
hass, EVENT_HANGOUTS_CONVERSATIONS_CHANGED, bot.async_resolve_conversations
)
dispatcher.async_dispatcher_connect(
hass,
EVENT_HANGOUTS_CONVERSATIONS_RESOLVED,
bot.async_update_conversation_commands,
)
config.async_on_unload(
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, bot.async_handle_hass_stop)
)
await bot.async_connect()
hass.services.async_register(
DOMAIN,
SERVICE_SEND_MESSAGE,
bot.async_handle_send_message,
schema=MESSAGE_SCHEMA,
)
hass.services.async_register(
DOMAIN,
SERVICE_UPDATE,
bot.async_handle_update_users_and_conversations,
schema=vol.Schema({}),
)
hass.services.async_register(
DOMAIN, SERVICE_RECONNECT, bot.async_handle_reconnect, schema=vol.Schema({})
)
intent.async_register(hass, HelpIntent(hass))
return True
async def async_unload_entry(hass: HomeAssistant, _: ConfigEntry) -> bool:
"""Unload a config entry."""
bot = hass.data[DOMAIN].pop(CONF_BOT)
await bot.async_disconnect()
return True
|
from sys import argv
fo | r line in ope | n(argv[1]):
tweet_id, hashtags, hashtags_count, user_id, created_at, followers_count, score, text = line.replace('\n', '').split('\t')
print ','.join([tweet_id, user_id, created_at, followers_count, score, text])
|
delClass(self):
'''reimplemented from :class:`TaurusBaseWidget`'''
return taurus.core.taurusdevice.TaurusDevice
def onChooseScanDirButtonClicked(self):
ret = Qt.QFileDialog.getExistingDirectory (self, 'Choose directory for saving files', self.ui.pathLE.text())
if ret:
self.ui.pathLE.setText(ret)
self.ui.pathLE.emit(Qt.SIGNAL('textEdited (QString)'), ret)
def onDialogButtonClicked(self, button):
role = self.ui.buttonBox.buttonRole(button)
if role == Qt.QDialogButtonBox.ApplyRole:
self.writeExperimentConfiguration(ask=False)
elif role == Qt.QDialogButtonBox.ResetRole:
self._reloadConf()
def closeEvent(self, event):
'''This event handler receives widget close events'''
if self.isDataChanged():
self.writeExperimentConfiguration(ask=True)
Qt.QWidget.closeEvent(self, event)
def setModel(self, model):
'''reimplemented from :class:`TaurusBaseWidget`'''
TaurusBaseWidget.setModel(self, model)
self._reloadConf(force=True)
#set the model of some child widgets
door = self.getModelObj()
if door is None: return
tghost = taurus.Database().getNormalName() #@todo: get the tghost from the door model instead
msname = door.macro_server.getFullName()
self.ui.taurusModelTree.setModel(tghost)
self.ui.sardanaElementTree.setModel(msname)
def _reloadConf(self, force=False):
if not force and self.isDataChanged():
op = Qt.QMessageBox.question(self, "Reload info from door",
"If you reload, all current experiment configuration changes will be lost. Reload?",
Qt.QMessageBox.Yes | Qt.QMessageBox.Cancel)
if op != Qt.QMessageBox.Yes:
return
door = self.getModelObj()
if door is None: return
conf = door.getExperimentConfiguration()
self._originalConfiguration = copy.deepcopy(conf)
self.setLocalConfig(conf)
self._setDirty(False)
self._dirtyMntGrps = set()
#set a list of available channels
avail_channels = {}
for ch_info in door.macro_server.getExpChannelElements().values():
avail_channels[ch_info.full_name] = ch_info.getData()
self.ui.channelEditor.getQModel().setAvailableChannels(avail_channels)
def _setDirty(self, dirty):
self._dirty = dirty
self._updateButtonBox()
def isDataChanged(self):
"""Tells if the local data has been modified since it was last refreshed
:return: (bool) True if he local data has been modified since it was last refreshed
"""
return bool(self._dirty or self.ui.channelEditor.getQModel().isDataChanged() or self._dirtyMntGrps)
def _updateButtonBox(self, *args, **kwargs):
self.ui.buttonBox.setEnabled(self.isDataChanged())
def getLocalConfig(self):
return self._localConfig
def setLocalConfig(self, conf):
'''gets a ExpDescription dictionary and sets up the | widget'''
self._localConfig = conf
#set the Channel Editor
activeMntGrpName = self._localConfig['ActiveMntGrp'] or ''
if activeMntGrpName in self._localConfig['MntGrpConfigs']:
mgconfig = self._localConfig['MntGrpConfigs'][activeMntGrpName]
self.ui.cha | nnelEditor.getQModel().setDataSource(mgconfig)
#set the measurement group ComboBox
self.ui.activeMntGrpCB.clear()
mntGrpLabels = []
for _, mntGrpConf in self._localConfig['MntGrpConfigs'].items():
# get labels to visualize names with lower and upper case
mntGrpLabels.append(mntGrpConf['label'])
self.ui.activeMntGrpCB.addItems(sorted(mntGrpLabels))
idx = self.ui.activeMntGrpCB.findText(activeMntGrpName,
# case insensitive find
Qt.Qt.MatchFixedString)
self.ui.activeMntGrpCB.setCurrentIndex(idx)
#set the system snapshot list
psl = self._localConfig.get('PreScanSnapshot') #I get it before clearing because clear() changes the _localConfig
# TODO: For Taurus 4 compatibility
psl_fullname = []
for name, display in psl:
psl_fullname.append(("tango://%s" % name, display))
self.ui.preScanList.clear()
self.ui.preScanList.addModels(psl_fullname)
#other settings
self.ui.filenameLE.setText(", ".join(self._localConfig['ScanFile']))
self.ui.pathLE.setText(self._localConfig['ScanDir'] or '')
self.ui.compressionCB.setCurrentIndex(self._localConfig['DataCompressionRank'] + 1)
def writeExperimentConfiguration(self, ask=True):
'''sends the current local configuration to the door
:param ask: (bool) If True (default) prompts the user before saving.
'''
if ask:
op = Qt.QMessageBox.question(self, "Save configuration?",
'Do you want to save the current configuration?\n(if not, any changes will be lost)',
Qt.QMessageBox.Yes | Qt.QMessageBox.No)
if op != Qt.QMessageBox.Yes:
return False
conf = self.getLocalConfig()
#make sure that no empty measurement groups are written
for mgname, mgconfig in conf.get('MntGrpConfigs', {}).items():
if mgconfig is not None and not mgconfig.get('controllers'):
mglabel = mgconfig['label']
Qt.QMessageBox.information(self, "Empty Measurement group",
"The measurement group '%s' is empty. Fill it (or delete it) before applying" % mglabel,
Qt.QMessageBox.Ok)
self.changeActiveMntGrp(mgname)
return False
#check if the currently displayed mntgrp is changed
if self.ui.channelEditor.getQModel().isDataChanged():
self._dirtyMntGrps.add(self._localConfig['ActiveMntGrp'])
door = self.getModelObj()
door.setExperimentConfiguration(conf, mnt_grps=self._dirtyMntGrps)
self._originalConfiguration = copy.deepcopy(conf)
self._dirtyMntGrps = set()
self.ui.channelEditor.getQModel().setDataChanged(False)
self._setDirty(False)
self.emit(Qt.SIGNAL('experimentConfigurationChanged'), copy.deepcopy(conf))
return True
def changeActiveMntGrp(self, activeMntGrpName):
activeMntGrpName = str(activeMntGrpName)
if self._localConfig is None:
return
if activeMntGrpName == self._localConfig['ActiveMntGrp']:
return #nothing changed
if activeMntGrpName not in self._localConfig['MntGrpConfigs']:
raise KeyError('Unknown measurement group "%s"' % activeMntGrpName)
#add the previous measurement group to the list of "dirty" groups if something was changed
if self.ui.channelEditor.getQModel().isDataChanged():
self._dirtyMntGrps.add(self._localConfig['ActiveMntGrp'])
self._localConfig['ActiveMntGrp'] = activeMntGrpName
i = self.ui.activeMntGrpCB.findText(activeMntGrpName,
# case insensitive find
Qt.Qt.MatchFixedString)
self.ui.activeMntGrpCB.setCurrentIndex(i)
mgconfig = self._localConfig['MntGrpConfigs'][activeMntGrpName]
self.ui.channelEditor.getQModel().setDataSource(mgconfig)
self._setDirty(True)
def createMntGrp(self):
'''creates a new Measurement Group'''
if self._localConfig is None:
return
mntGrpName, ok = Qt.QInputDialog.getText(self, "New Measurement Group",
"Enter a name for the new measurement Group")
if not ok: return
mntGrpName = str(mntGrpName)
#check that the given name is not an existing pool element
ms = self.getModelObj().macro_server
poolElementNames = [v.name for v in |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF | DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/loot/loot_schematic/shared_geonosian_reinforcement_core_schematic.iff"
result.attribute_template_id = -1
result.stfNa | me("craft_weapon_ingredients_n","geonosian_reinforcement_core_schematic")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result |
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import
from .decorators import Vectorize, GU | Vectorize, vectorize, guvectorize
from ._internal import PyUFunc_None, PyUFunc_Zero, PyUFunc_One
from . import _internal, array_exprs
if hasattr(_internal, 'PyUFunc_ReorderableNone'):
PyUFunc_ReorderableNone | = _internal.PyUFunc_ReorderableNone
del _internal, array_exprs
def _init():
def init_vectorize():
from numba.cuda.vectorizers import CUDAVectorize
return CUDAVectorize
def init_guvectorize():
from numba.cuda.vectorizers import CUDAGUFuncVectorize
return CUDAGUFuncVectorize
Vectorize.target_registry.ondemand['cuda'] = init_vectorize
GUVectorize.target_registry.ondemand['cuda'] = init_guvectorize
_init()
del _init
|
tion with
objects of type :class:`.MutableComposite`.
Supporting Pickling
--------------------
As is the case with :class:`.Mutable`, the :class:`.MutableComposite` helper
class uses a ``weakref.WeakKeyDictionary`` available via the
:meth:`MutableBase._parents` attribute which isn't picklable. If we need to
pickle instances of ``Point`` or its owning class ``Vertex``, we at least need
to define a ``__getstate__`` that doesn't include the ``_parents`` dictionary.
Below we define both a ``__getstate__`` and a ``__setstate__`` that package up
the minimal form of our ``Point`` class::
class Point(MutableComposite):
# ...
def __getstate__(self):
return self.x, self.y
def __setstate__(self, state):
self.x, self.y = state
As with :class:`.Mutable`, the :class:`.MutableComposite` augments the
pickling process of the parent's object-relational state so that the
:meth:`MutableBase._parents` collection is restored to all ``Point`` objects.
"""
from ..orm.attributes import flag_modified
from .. import event, types
from ..orm import mapper, object_mapper, Mapper
from ..util import memoized_property
import weakref
class MutableBase(object):
"""Common base class to :class:`.Mutable`
and :class:`.MutableComposite`.
"""
@memoized_property
def _parents(self):
"""Dictionary of parent object->attribute name on the parent.
This attribute is a so-called "memoized" property. It initializes
itself with a new ``weakref.WeakKeyDictionary`` the first time
it is accessed, returning the same object upon subsequent access.
"""
return weakref.WeakKeyDictionary()
@classmethod
def coerce(cls, key, value):
"""Given a value, coerce it into the target type.
Can be overridden by custom subclasses to coerce incoming
data into a particular type.
By default, raises ``ValueError``.
This method is called in different scenarios depending on if
the parent class is of type :class:`.Mutable` or of type
:class:`.MutableComposite`. In the case of the former, it is called
for both attribute-set operations as well as during ORM loading
operations. For the latter, it is only called during attribute-set
operations; the mechanics of the :func:`.composite` construct
handle coercion during load operations.
:param key: string name of the ORM-mapped attribute being set.
:param value: the incoming value.
:return: the method should return the coerced value, or raise
``ValueError`` if the coercion cannot be completed.
"""
if value is None:
return None
msg = "Attribute '%s' does not accept objects of type %s"
raise ValueError(msg % (key, type(value)))
@classmethod
def _listen_on_attribute(cls, attribute, coerce, parent_cls):
"""Establish this type as a mutation listener for the given
mapped descriptor.
"""
key = attribute.key
if parent_cls is not attribute.class_:
return
# rely on "propagate" here
parent_cls = attribute.class_
def load(state, *args):
"""Listen for objects loaded or refreshed.
Wrap the target data member's value with
``Mutable``.
"""
val = state.dict.get(key, None)
if val is not None:
if coerce:
val = cls.coerce(key, val)
state.dict[key] = val
val._parents[state.obj()] = key
def set(target, value, oldvalue, initiator):
"""Listen for set/replace events on the target
data member.
Establish a weak reference to the parent object
on the incoming value, remove it for the one
outgoing.
"""
if value is oldvalue:
return value
if not isinstance(value, cls):
value = cls.coerce(key, value)
if value is not None:
value._parents[target.obj()] = key
if isinstance(oldvalue, cls):
oldvalue._parents.pop(target.obj(), None)
return value
def pickle(state, state_dict):
val = state.dict.get(key, None)
if val is not None:
if 'ext.mutable.values' not in state_dict:
state_dict['ext.mutable.values'] = []
state_dict['ext.mutable.values'].append(val)
def unpickle(state, state_dict):
if 'ext.mutable.values' in state_dict:
for val in state_dict['ext.mutable.values']:
val._parents[state.obj()] = key
event.listen(parent_cls, 'load', load,
raw=True, propagate=True)
event.listen(parent_cls, 'refresh', load,
raw=True, propagate=True)
event.listen(attribute, 'set', set,
raw=True, retval=True, propagate=True)
event.listen(parent_cls, 'pickle', pickle,
raw=True, propagate=True)
event.listen(parent_cls, 'unpickle', unpickle,
raw=True, propagate=True)
class Mutable(MutableBase):
"""Mixin that defines transparent propagation of change
events to a parent object.
See the example in :ref:`mutable_scalars` for usage information.
"""
def changed(self):
"""Subclasses should call this method whenever change events occur."""
for parent, key in self._parents.items():
flag_modified(parent, key)
@classmethod
def associate_with_attribute(cls, attribute):
"""Establish this type as a mutation listener for the given
mapped descriptor.
"""
cls._listen_on_attribute(attribute, True, attribute.class_)
@classmethod
def associate_with(cls, sqltype):
"""Associate this wrapper with all future mapped columns
of the given type.
This is a convenience method that calls
``associate_with_attribute`` automatically.
.. warning::
The listeners established by this method are *global*
to all mappers, and are *not* garbage collected. Only use
:meth:`.associate_with` for types that are permanent to an
application, not with ad-hoc types else this will cause unbounded
growth in memory usage.
"""
def listen_for_type(mapper, class_):
for prop in mapper.column_attrs:
if isinstance(prop.columns[0].type, sqltype):
cls.associate_with_attribute(getattr(class_, prop.key))
event.listen(mapper, 'mapper_configured', listen_for_type)
@classmethod
def as_mutable(cls, sqltype):
"""Associate a sql type with this mutable Python type.
This establishes listeners that will detect ORM mappings against
the given type, adding mutation event trackers to those mappings.
The type is returned, unconditionally as an instance, so that
:meth:`.as_mutable` can be used inline::
Table('mytable', metadata,
Column('id', Integer, primary_key=True),
Column('data', MyMutableType.as_mutable(PickleType))
)
Note that the returned type is always an instance, even if a class
is given, and that only columns which are declared specifically with
that type instance receive additional instrumentation.
To associate a particular mutable type with all occurrences of a
particular type, use the :meth:`.Mutable.associate_with` classmethod
of the particular :class:`.Mutable` subclass to e | stablish a global
ass | ociation.
.. warning::
The listeners established by this method are *global*
to all mappers, and are *not* garbage collected. Only use
:meth:`.as_mutable` for types that are permanent to an application,
not with ad-hoc types else this will cause unbounded growth
|
#!/usr/bin/env python
import sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import messagebird
# ACCESS_KEY = ''
# MESSAGE_ID = ''
try:
ACCESS_KEY
except NameError:
print('You need to set an ACCESS_KEY constant in this file')
sys.exit(1)
try:
MESSAGE_ID
except NameError:
print('You need to set a MESSAGE_ID constant in this file')
sys.exit(1)
try:
# Create a MessageBird client with the specified ACCESS_KEY.
client = messagebird.Client(ACCESS_KEY)
# Fetch the VoiceMessage object for the specified MESSAGE_ID.
vmsg = client.voice_message(MESSAGE_ID)
# Print th | e object information.
print('\nThe following information was returned as a VoiceMessage object:\n')
print(' id : %s' % vmsg.id)
print(' href : %s' % vmsg.href)
print(' originator : %s' % vmsg.originator)
print(' body : %s' % vmsg.body)
print(' reference : %s' % vmsg.reference)
print(' language : %s' % vm | sg.language)
print(' voice : %s' % vmsg.voice)
print(' repeat : %s' % vmsg.repeat)
print(' ifMachine : %s' % vmsg.ifMachine)
print(' scheduledDatetime : %s' % vmsg.scheduledDatetime)
print(' createdDatetime : %s' % vmsg.createdDatetime)
print(' recipients : %s\n' % vmsg.recipients)
except messagebird.client.ErrorException as e:
print('\nAn error occured while requesting a VoiceMessage object:\n')
for error in e.errors:
print(' code : %d' % error.code)
print(' description : %s' % error.description)
print(' parameter : %s\n' % error.parameter)
|
ormat(user._id, token))
res = res.follow()
assert_equal(res.status_code, 302)
assert_in('dashboard', res.location)
assert_equal(len(mock_mail.call_args_list), 1)
session = Session.find(
Q('data.auth_user_id', 'eq', user._id)
).sort(
'-date_modified'
).limit(1)[0]
assert_equal(len(session.data['status']), 1)
def test_get_user_by_id(self):
user = UserFactory()
assert_equal(User.load(user._id), user)
def test_get_user_by_email(self):
user = UserFactory()
assert_equal(auth.get_user(email=user.username), user)
def test_get_user_with_wrong_password_returns_false(self):
user = UserFactory.build()
user.set_password('killerqueen')
assert_false(
auth.get_user(email=user.username, password='wrong')
)
class TestAuthObject(OsfTestCase):
def test_repr(self):
auth = AuthFactory()
rep = repr(auth)
assert_in(str(auth.user), rep)
def test_factory(self):
auth_obj = AuthFactory()
assert_true(isinstance(auth_obj.user, auth.User))
def test_from_kwargs(self):
user = UserFactory()
request_args = {'view_only': 'mykey'}
kwargs = {'user': user}
auth_obj = Auth.from_kwargs(request_args, kwargs)
assert_equal(auth_obj.user, user)
assert_equal(auth_obj.private_key, request_args['view_only'])
def test_logged_in(self):
user = UserFactory()
auth_obj = Auth(user=user)
assert_true(auth_obj.logged_in)
auth2 = Auth(user=None)
assert_false(auth2.logged_in)
class TestPrivateLink(OsfTestCase):
def setUp(self):
super(TestPrivateLink, self).setUp()
self.flaskapp = Flask('testing_private_links')
@self.flaskapp.route('/project/<pid>/')
@must_be_contributor
def project_get(**kwargs):
return 'success', 200
self.app = TestApp(self.flaskapp)
self.user = AuthUserFactory()
self.project = ProjectFactory(is_public=False)
self.link = PrivateLinkFactory()
self.link.nodes.append(self.project)
self.link.save()
@mock.patch('website.project.decorators.Auth.from_kwargs')
def test_has_private_link_key(self, mock_from_kwargs):
mock_from_kwargs.return_value = Auth(user=None)
res = self.app.get('/project/{0}'.format(self.project._primary_key),
{'view_only': self.link.key})
res = res.follow()
assert_equal(res.status_code, 200)
assert_equal(res.body, 'success')
@mock.patch('website.project.decorators.Auth.from_kwargs')
def test_does_not_have_key(self, mock_from_kwargs):
mock_from_kwargs.return_value = Auth(user=None)
res = self.app.get('/project/{0}'.format(self.project._primary_key),
{'key': None})
assert_is_redirect(res)
# Flask app for testing view decorators
decoratorapp = Flask('decorators')
@must_be_contributor
def view_that_needs_contributor(**kwargs):
return kwargs.get('node') or kwargs.get('parent')
class AuthAppTestCase(OsfTestCase):
def setUp(self):
self.ctx = decoratorapp.test_request_context()
self.ctx.push()
def tearDown(self):
self.ctx.pop()
class TestMustBeContributorDecorator(AuthAppTestCase):
def setUp(self):
super(TestMustBeContributorDecorator, self).setUp()
self.contrib = AuthUserFactory()
self.project = ProjectFactory()
self.project.add_contributor(self.contrib, auth=Auth(self.project.creator))
self.project.save()
def test_must_be_contributor_when_user_is_contributor(self):
result = view_that_needs_contributor(
pid=self.project._primary_key,
user=self.contrib)
assert_equal(result, self.project)
def test_must_be_contributor_when_user_is_not_contributor_raises_error(self):
non_contributor = AuthUserFactory()
with assert_raises(HTTPError):
view_that_needs_contributor(
pid=self.project._primary_key,
user=non_contributor
)
def test_must_be_contributor_no_user(self):
res = view_that_needs_contributor(
pid=self.project._primary_key,
user=None,
)
assert_is_redirect(res)
# redirects to login url
redirect_url = res.headers['Location']
login_url = cas.get_login_url(service_url='http://localhost/')
assert_equal(redirect_url, login_url)
def test_must_be_contributor_parent_admin(self):
user = UserFactory()
node = NodeFactory(parent=self.project, creator=user)
res = view_that_needs_contributor(
pid=self.project._id,
nid=node._id,
user=self.project.creator,
)
assert_equal(res, node)
def test_must_be_contributor_parent_write(self):
user = UserFactory()
node = NodeFactory(parent=self.project, creator=user)
self.project.set_permissions(self.project.creator, ['read', 'write'])
self.project.save()
with assert_raises(HTTPError) as exc_info:
view_that_needs_contributor(
pid=self.project._id,
nid=node._id,
user=self.project.creator,
)
assert_equal(exc_info.exception.code, 403)
@must_be_logged_in
def protected(**kwargs):
return 'open sesame'
@must_have_permission('dance')
def thriller(**kwargs):
return 'chiller'
class TestPermissionDecorators(AuthAppTestCase):
@mock.patch('framework.auth.decorators.Auth.from_kwargs')
def test_must_be_logged_in_decorator_with_user(self, mock_from_kwargs):
user = UserFactory()
mock_from_kwargs.return_value = Auth(user=user)
protected()
@mock.patch('framework.auth.decorators.Auth.from_kwargs')
def test_must_be_logged_in_decorator_with_no_user(self, mock_from_kwargs):
mock_from_kwargs.return_value = Auth()
resp = protected()
assert_true(isinstance(resp, BaseResponse))
login_url = cas.get_login_url(service_url='http: | //localhost/')
assert_in(login_url, resp.headers.get('location'))
@mock.patch(' | website.project.decorators._kwargs_to_nodes')
@mock.patch('framework.auth.decorators.Auth.from_kwargs')
def test_must_have_permission_true(self, mock_from_kwargs, mock_to_nodes):
project = ProjectFactory()
project.add_permission(project.creator, 'dance')
mock_from_kwargs.return_value = Auth(user=project.creator)
mock_to_nodes.return_value = (None, project)
thriller(node=project)
@mock.patch('website.project.decorators._kwargs_to_nodes')
@mock.patch('framework.auth.decorators.Auth.from_kwargs')
def test_must_have_permission_false(self, mock_from_kwargs, mock_to_nodes):
project = ProjectFactory()
mock_from_kwargs.return_value = Auth(user=project.creator)
mock_to_nodes.return_value = (None, project)
with assert_raises(HTTPError) as ctx:
thriller(node=project)
assert_equal(ctx.exception.code, http.FORBIDDEN)
@mock.patch('website.project.decorators._kwargs_to_nodes')
@mock.patch('framework.auth.decorators.Auth.from_kwargs')
def test_must_have_permission_not_logged_in(self, mock_from_kwargs, mock_to_nodes):
project = ProjectFactory()
mock_from_kwargs.return_value = Auth()
mock_to_nodes.return_value = (None, project)
with assert_raises(HTTPError) as ctx:
thriller(node=project)
assert_equal(ctx.exception.code, http.UNAUTHORIZED)
def needs_addon_view(**kwargs):
return 'openaddon'
class TestMustHaveAddonDecorator(AuthAppTestCase):
def setUp(self):
super(TestMustHaveAddonDecorator, self).setUp()
self.project = ProjectFactory()
@mock.patch('website.project.decorators._kwargs_to_nodes')
def test_must_have_addon_node_true(self, mock_kwargs_to_nodes):
mock_kwargs_to_nodes.return_value = (None, self.project)
self.project.add_addon('g |
import json
import etcd
from tendrl | .gluster_bridge.atoms.volume.set import Set
class SetVolumeOption(object):
def __init__(self, api_job):
super(SetVolumeOption, self).__init__()
self.api_job = api_job
self.atom = SetVolumeOption
def start(self):
attributes = json.loads(self.api_job['attributes'].decode('utf-8'))
vol_name = attrib | utes['volname']
option = attributes['option_name']
option_value = attributes['option_value']
self.atom().start(vol_name, option, option_value)
self.api_job['status'] = "finished"
etcd.Client().write(self.api_job['request_id'],
json.dumps(self.api_job))
|
#!/usr/bin/python
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language go | verning permissions and
# limitations under the Lic | ense.
"""This example gets a user team association by the user and team ID.
To determine which teams exist, run get_all_teams.py. To determine which users
exist, run get_all_users.py.
Tags: UserTeamAssociationService.getUserTeamAssociation
"""
__author__ = 'api.shamjeff@gmail.com (Jeff Sham)'
# Locate the client library. If module was installed via "setup.py" script, then
# the following two lines are not needed.
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import DfpClient
# Initialize client object.
client = DfpClient(path=os.path.join('..', '..', '..', '..'))
# Initialize appropriate service.
user_team_association_service = client.GetService(
'UserTeamAssociationService', version='v201308')
# Set the IDs of the user and team to get the association for.
user_id = 'INSERT_USER_ID_HERE'
team_id = 'INSERT_TEAM_ID_HERE'
# Get user team association.
user_team_association = user_team_association_service.GetUserTeamAssociation(
team_id, user_id)[0]
# Display results.
print ('User team association between user with ID \'%s\' and team with ID '
'\'%s\' was found.' % (user_team_association['userId'],
user_team_association['teamId']))
|
import re
from ert_gui.ide.keywords.definitions import ArgumentDefinition
class RangeStringArgument(ArgumentDefinition):
NOT_A_VALID_RANGE_STRING = "The input should be of the type: <b><pre>\n\t1,3-5,9,17\n</pre></b>i.e. integer values separated by commas, and dashes to represent ranges."
VALUE_NOT_IN_RANGE = "A value must be in the range from 0 to %d."
PATTERN = re.compile("^[0-9\-, \t]+$")
RANGE_PATTERN = re.compile("^[ \t]*([0-9]+)[ \t]*-[ \t]*([0-9]+)[ \t]*$")
NUMBER_PATTERN = re.compile("^[ \t]*([0-9]+)[ \t]*$")
def __init__(self, max_value=None, **kwargs):
super(RangeStringArgument, self).__init__(**kwargs)
self.__max_value = max_value
def validate(self, token):
validation_status = super(RangeStringArgument, self).validate(token)
if not validation_status:
return validation_status
else:
match = RangeStringArgument.PATTERN.match(token)
if match is None:
validation_status.setFailed()
validation_status.addToMessage(RangeStringArgument.NOT_A_VALID_RANGE_STRING)
else:
groups = token.split(",")
for group in groups:
range_match = RangeStringArgument.RANGE_PATTERN.match(group)
number_match = RangeStringArgument.NUMBER_PATTERN.match(group)
if range_match is None and number_match is None:
validation_status.setFailed()
validation_status.addToMessage(RangeStringArgument.NOT_A_VALID_RANGE_STRING)
break
if range_match:
num_1 = int(range_match.group(1))
num_2 = int(range_match.group(2))
if not num_2 > num_1:
validation_status.setFailed()
validation_status.addToMessage(RangeStringArgument.NOT_A_VALID_RANGE_STRING)
break
if self.__max_value is not None and (num_1 >= self.__max_value or num_2 >= self.__max_value):
validation_status.setFailed()
validation_status.addToMessage(RangeSt | ringArgument.VALUE_NOT_IN_RANGE % (self.__max_value - 1))
break
if number_match and self.__max_value is not None:
num = int(number_match.group(1))
if num >= self.__max_value:
validati | on_status.setFailed()
validation_status.addToMessage(RangeStringArgument.VALUE_NOT_IN_RANGE % (self.__max_value - 1))
break
validation_status.setValue(token)
return validation_status
|
seful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""TOC (i.e. AIM) support for Instance Messenger."""
# System Imports
import string, re
# Twisted Imports
from twisted.protocols import toc
from twisted.im.locals import ONLINE, OFFLINE, AWAY
from twisted.internet import defer, reactor, protocol
from twisted.internet.defer import succeed
# Sibling Imports
import basesupport, interfaces, locals
def dehtml(text):
text=string.replace(text,"<br>","\n")
text=string.replace(text,"<BR>","\n")
text=string.replace(text,"<Br>","\n") # XXX make this a regexp
text=string.replace(text,"<bR>","\n")
text=re.sub('<.*?>','',text)
text=string.replace(text,'>','>')
text=string.replace(text,'<','<')
text=string.replace(text,'&','&')
text=string.replace(text,' ',' ')
text=string.replace(text,'"','"')
return text
def html(text):
text=string.replace(text,'"','"')
text=string.replace(text,'&','&')
text=string.replace(text,'<','<')
text=string.replace(text,'>','>')
text=string.replace(text,"\n","<br>")
return '<font color="#000000" back="#ffffff" size=3>%s</font>'%text
class TOCPerson(basesupport.AbstractPerson):
def isOnline(self):
return self.status != OFFLINE
def getStatus(self):
return self.status
def getIdleTime(self):
return str(self.idletime)
def setStatusAndIdle(self, status, idletime):
if self.account.client is None:
raise locals.OfflineError
self.status = status
self.idletime = idletime
self.account.client.chat.getContactsList().setContactStatus(self)
def sendMessage(self, text, meta=None):
if self.account.client is None:
raise locals.OfflineError
if meta:
if meta.get("style", None) == "emote":
text="* "+text+"* "
self.account.client.say(self.name,html(text))
return succeed(text)
class TOCGroup(basesupport.AbstractGroup):
__implements__ = (interfaces.IGroup,)
def __init__(self, name, tocAccount):
basesupport.AbstractGroup.__init__(self, name, tocAccount)
self.roomID = self.client.roomID[self.name]
def sendGroupMessage(self, text, meta=None):
if self.account.client is None:
raise locals.OfflineError
if meta:
if meta.get("style", None) == "emote":
text="* "+text+"* "
self.account.client.chat_say(self.roomID,html(text))
return succeed(text)
def leave(self):
if self.account.client is None:
raise locals.OfflineError
self.account.client.chat_leave(self.roomID)
class TOCProto(basesupport.AbstractClientMixin, toc.TOCClient):
def __init__(self, account, chatui, logonDeferred):
toc.TOCClient.__init__(self, account.username, account.password)
basesupport.AbstractClientMixin.__init__(self, account, chatui,
logonDeferred)
self.roomID = {}
self.roomIDreverse = {}
def _debug(self, m):
pass #print '<toc debug>', repr(m)
def getGroupConversation(self, name, hide=0):
return self.chat.getGroupConversation(
self.chat.getGroup(name, self), hide)
def addContact(self, name):
self.add_buddy([name])
if not self._buddylist.has_key('TwistedIM'):
self._buddylist['TwistedIM'] = []
if name in self._buddylist['TwistedIM']:
# whoops, don't add again
return
self._buddylist['TwistedIM'].append(name)
self.set_config(self._config_mode, self._buddylist, self._permit, self._deny)
def getPerson(self,name):
return self.chat.getPerson(name, self)
def onLine(self):
self.account._isOnline = 1
#print '$$!&*$&!(@$*& TOC ONLINE *!#@&$(!*%&'
def gotConfig(self, mode, buddylist, permit, deny):
#print 'got toc config', repr(mode), repr(buddylist), repr(permit), repr(deny)
self._config_mode = mode
self._buddylist = buddylist
self._permit = permit
self._deny = deny
if permit:
self._debug('adding permit')
self.add_permit(permit)
if deny:
self._debug('adding deny')
self.add_deny(deny)
clist=[]
for k in buddylist.keys():
self.add_buddy(buddylist[k])
for name in buddylist[k]:
self.getPerson(name).setStatusAndIdle(OFFLINE, '--')
self.signon()
name = None
def tocNICK(self,data):
if not self.name:
print 'Waiting for second NICK', data
self.name=data[0]
self.accountName = '%s (TOC)' % self.name
self.chat.getContactsList()
else:
print 'reregistering...?', data
self.name=data[0]
# self.accountName = "%s (TOC)"%data[0]
if self._logonDeferred is not None:
self._logonDeferred.callback(self)
### Error Messages
def hearError(self, code, args):
print '*** TOC ERROR ***', repr(code), repr(args)
def hearWarning(self, newamount, username):
print '*** TOC WARNING | ***', repr(newamount), repr(username)
### Buddy Messages
def hearMessage(self,username,message,autoreply):
if autoreply:
message='<AUTO-REPLY>: '+message
self.chat.getConversation(self.getPerson(username)
).showMessage(dehtml(message))
def updateBuddy(self,username,online,evilness,signontime,idletime,userc | lass,away):
if away:
status=AWAY
elif online:
status=ONLINE
else:
status=OFFLINE
self.getPerson(username).setStatusAndIdle(status, idletime)
### Group Chat
def chatJoined(self, roomid, roomname, users):
self.roomID[roomname]=roomid
self.roomIDreverse[roomid]=roomname
self.getGroupConversation(roomname).setGroupMembers(users)
def chatUpdate(self,roomid,member,inroom):
group=self.roomIDreverse[roomid]
if inroom:
self.getGroupConversation(group).memberJoined(member)
else:
self.getGroupConversation(group).memberLeft(member)
def chatHearMessage(self, roomid, username, message):
if toc.normalize(username) == toc.normalize(self.name):
return # ignore the message
group=self.roomIDreverse[roomid]
self.getGroupConversation(group).showGroupMessage(username, dehtml(message))
def chatHearWhisper(self, roomid, username, message):
print '*** user whispered *** ', roomid, username, message
def chatInvited(self, roomid, roomname, username, message):
print '*** user invited us to chat *** ',roomid, roomname, username, message
def chatLeft(self, roomid):
group=self.roomIDreverse[roomid]
self.getGroupConversation(group,1)
del self.roomID[group]
del self.roomIDreverse[roomid]
def rvousProposal(self,type,cookie,user,vip,port,**kw):
print '*** rendezvous. ***', type, cookie, user, vip, port, kw
def receiveBytes(self, user, file, chunk, sofar, total):
print '*** File transfer! ***', user, file, chunk, sofar, total
def joinGroup(self,name):
self.chat_join(4,toc.normalize(name))
class TOCAccount(basesupport.AbstractAccount):
__implements__ = (interfaces.IAccount,)
gatewayType = "AIM (TOC)"
_groupFactory = TOCGroup
_personFactory = TOCPerson
def _startLogOn(self, chatui):
logonDeferred = defer.Deferred()
cc = protocol.ClientCreator(reactor, TOCProto, self, chatui,
logonDeferred)
d = cc.connectTCP(self.host, self.port)
d.ad |
#!/usr/bin/env python
# -*- coding=utf8 -*-
"""clear the mysql database yjy_archtecture tables in aa.txt"""
import MySQLdb as mdb
db_conn = mdb.connect("localhost","root","HEkgDDZZ","yjy_human") |
cursor = db_conn.cursor()
with open("/root/scripts/clear_human_sql.tables") as f:
tables = f.readlines()
print tables
try:
for table in tables:
tb = table.strip()
print tb
sql = """TRUNCATE TABLE """+ tb
cursor.execute(sql)
data = cursor.fetchall()
print data
sql1 = """select * from """+ tb
cursor.execute(sql1)
data1 = cursor.fetchall()
print data1
except mdb.Error, e:
pri | nt e
db_conn.close()
|
#!/usr/bin/env python3
# Copyright (c) 2017-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"" | "Test message sending before handshake completion.
Before receiving a VERACK, a node should not send anything but VERSION/VERACK
and feature negotiation messages (WTXIDRELAY, SENDADDRV2).
This test conn | ects to a node and sends it a few messages, trying to entice it
into sending us something it shouldn't."""
import time
from test_framework.messages import (
msg_getaddr,
msg_ping,
msg_version,
)
from test_framework.p2p import (
P2PInterface,
P2P_SUBVERSION,
P2P_SERVICES,
P2P_VERSION_RELAY,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than_or_equal,
)
PEER_TIMEOUT = 3
class LazyPeer(P2PInterface):
def __init__(self):
super().__init__()
self.unexpected_msg = False
self.ever_connected = False
self.got_wtxidrelay = False
self.got_sendaddrv2 = False
def bad_message(self, message):
self.unexpected_msg = True
print("should not have received message: %s" % message.msgtype)
def on_open(self):
self.ever_connected = True
# Does not respond to "version" with "verack"
def on_version(self, message): self.bad_message(message)
def on_verack(self, message): self.bad_message(message)
def on_inv(self, message): self.bad_message(message)
def on_addr(self, message): self.bad_message(message)
def on_getdata(self, message): self.bad_message(message)
def on_getblocks(self, message): self.bad_message(message)
def on_tx(self, message): self.bad_message(message)
def on_block(self, message): self.bad_message(message)
def on_getaddr(self, message): self.bad_message(message)
def on_headers(self, message): self.bad_message(message)
def on_getheaders(self, message): self.bad_message(message)
def on_ping(self, message): self.bad_message(message)
def on_mempool(self, message): self.bad_message(message)
def on_pong(self, message): self.bad_message(message)
def on_feefilter(self, message): self.bad_message(message)
def on_sendheaders(self, message): self.bad_message(message)
def on_sendcmpct(self, message): self.bad_message(message)
def on_cmpctblock(self, message): self.bad_message(message)
def on_getblocktxn(self, message): self.bad_message(message)
def on_blocktxn(self, message): self.bad_message(message)
def on_wtxidrelay(self, message): self.got_wtxidrelay = True
def on_sendaddrv2(self, message): self.got_sendaddrv2 = True
# Peer that sends a version but not a verack.
class NoVerackIdlePeer(LazyPeer):
def __init__(self):
self.version_received = False
super().__init__()
def on_verack(self, message): pass
# When version is received, don't reply with a verack. Instead, see if the
# node will give us a message that it shouldn't. This is not an exhaustive
# list!
def on_version(self, message):
self.version_received = True
self.send_message(msg_ping())
self.send_message(msg_getaddr())
class P2PVersionStore(P2PInterface):
version_received = None
def on_version(self, msg):
# Responds with an appropriate verack
super().on_version(msg)
self.version_received = msg
class P2PLeakTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [[f"-peertimeout={PEER_TIMEOUT}"]]
def create_old_version(self, nversion):
old_version_msg = msg_version()
old_version_msg.nVersion = nversion
old_version_msg.strSubVer = P2P_SUBVERSION
old_version_msg.nServices = P2P_SERVICES
old_version_msg.relay = P2P_VERSION_RELAY
return old_version_msg
def run_test(self):
self.log.info('Check that the node doesn\'t send unexpected messages before handshake completion')
# Peer that never sends a version, nor any other messages. It shouldn't receive anything from the node.
no_version_idle_peer = self.nodes[0].add_p2p_connection(LazyPeer(), send_version=False, wait_for_verack=False)
# Peer that sends a version but not a verack.
no_verack_idle_peer = self.nodes[0].add_p2p_connection(NoVerackIdlePeer(), wait_for_verack=False)
# Pre-wtxidRelay peer that sends a version but not a verack and does not support feature negotiation
# messages which start at nVersion == 70016
pre_wtxidrelay_peer = self.nodes[0].add_p2p_connection(NoVerackIdlePeer(), send_version=False, wait_for_verack=False)
pre_wtxidrelay_peer.send_message(self.create_old_version(70015))
# Wait until the peer gets the verack in response to the version. Though, don't wait for the node to receive the
# verack, since the peer never sent one
no_verack_idle_peer.wait_for_verack()
pre_wtxidrelay_peer.wait_for_verack()
no_version_idle_peer.wait_until(lambda: no_version_idle_peer.ever_connected)
no_verack_idle_peer.wait_until(lambda: no_verack_idle_peer.version_received)
pre_wtxidrelay_peer.wait_until(lambda: pre_wtxidrelay_peer.version_received)
# Mine a block and make sure that it's not sent to the connected peers
self.nodes[0].generate(nblocks=1)
# Give the node enough time to possibly leak out a message
time.sleep(PEER_TIMEOUT + 2)
# Make sure only expected messages came in
assert not no_version_idle_peer.unexpected_msg
assert not no_version_idle_peer.got_wtxidrelay
assert not no_version_idle_peer.got_sendaddrv2
assert not no_verack_idle_peer.unexpected_msg
assert no_verack_idle_peer.got_wtxidrelay
assert no_verack_idle_peer.got_sendaddrv2
assert not pre_wtxidrelay_peer.unexpected_msg
assert not pre_wtxidrelay_peer.got_wtxidrelay
assert not pre_wtxidrelay_peer.got_sendaddrv2
# Expect peers to be disconnected due to timeout
assert not no_version_idle_peer.is_connected
assert not no_verack_idle_peer.is_connected
assert not pre_wtxidrelay_peer.is_connected
self.log.info('Check that the version message does not leak the local address of the node')
p2p_version_store = self.nodes[0].add_p2p_connection(P2PVersionStore())
ver = p2p_version_store.version_received
# Check that received time is within one hour of now
assert_greater_than_or_equal(ver.nTime, time.time() - 3600)
assert_greater_than_or_equal(time.time() + 3600, ver.nTime)
assert_equal(ver.addrFrom.port, 0)
assert_equal(ver.addrFrom.ip, '0.0.0.0')
assert_equal(ver.nStartingHeight, 201)
assert_equal(ver.relay, 1)
self.log.info('Check that old peers are disconnected')
p2p_old_peer = self.nodes[0].add_p2p_connection(P2PInterface(), send_version=False, wait_for_verack=False)
with self.nodes[0].assert_debug_log(['peer=4 using obsolete version 31799; disconnecting']):
p2p_old_peer.send_message(self.create_old_version(31799))
p2p_old_peer.wait_for_disconnect()
if __name__ == '__main__':
P2PLeakTest().main()
|
#!/usr/bin/python -Wall
# ================================================================
# John Kerl
# kerl.john.r@gmail.com
# 2008-02-06
# ================================================================
from __future__ import division # 1/2 = 0.5, not 0.
import sys
import randc_m # For random complex scalars
import math
from sackmatc_m import *
# ----------------------------------------------------------------
def randmatc(m, n):
A = make_zero_matrix(m, n)
for i in range(0, m):
for j in range(0, n):
A[i][j] = randc_m.randc_mean_sq_1()
return A
# ----------------------------------------------------------------
def randsqmatc(n):
return randmatc(n, n)
# ----------------------------------------------------------------
def randgue(n):
A = make_zero_matrix(n, n)
for i in range(0, n):
for j in range(i, n):
A[i][j] = randc_m.randc_mean_sq_1()
for i | in range(0, n):
for j in range(0, i):
A[i][j] = conj(A[j][i])
return A
# ------- | ---------------------------------------------------------
def randxxt(n):
A = randmatc(n, n)
return A.transpose() * A
|
from django.db import | models
from .user import User
from .post import Post
class Medal(models.Model):
rank = models.IntegerField()
user = models.ForeignKey(User, related_name='medals')
post = models.OneToOneField(Post, on_delete=models.C | ASCADE, related_name='medal')
def __str__(self):
return "%s %s" % (self.post.title, self.rank)
|
.dirpath, self.file])
def __eq__(self, other):
return self.path == other.path
def __lt__(self, other):
return self.path < other.path
@property
def path(self):
return os.path.join(self.dirpath, self.file)
def process(self, command, domain):
"""
Extract translatable literals from self.file for :param domain:,
creating or updating the POT file.
Uses the xgettext GNU gettext utility.
"""
from django.utils.translation import templatize
if command.verbosity > 1:
command.stdout.write('processing file %s in %s\n' % (self.file, self.dirpath))
file_ext = os.path.splitext(self.file)[1]
if domain == 'djangojs':
orig_file = os.path.join(self.dirpath, self.file)
work_file = orig_file
is_templatized = command.gettext_version < (0, 18, 3)
if is_templatized:
with io.open(orig_file, 'r', encoding=settings.FILE_CHARSET) as fp:
src_data = fp.read()
src_data = prepare_js_for_gettext(src_data)
work_file = os.path.join(self.dirpath, '%s.c' % self.file)
with io.open(work_file, "w", encoding='utf-8') as fp:
fp.write(src_data)
args = [
'xgettext',
'-d', domain,
'--language=%s' % ('C' if is_templatized else 'JavaScript',),
'--keyword=gettext_noop',
'--keyword=gettext_lazy',
'--keyword=ngettext_lazy:1,2',
'--keyword=pgettext:1c,2',
'--keyword=npgettext:1c,2,3',
'--output=-'
] + command.xgettext_options
args.append(work_file)
elif domain == 'django':
orig_file = os.path.join(self.dirpath, self.file)
work_file = orig_file
is_templatized = file_ext != '.py'
if is_templatized:
with io.open(orig_file, encoding=settings.FILE_CHARSET) as fp:
src_data = fp.read()
content = templatize(src_data, orig_file[2:])
work_file = os.path.join(self.dirpath, '%s.py' % self.file)
with io.open(work_file, "w", encoding='utf-8') as fp:
fp.write(content)
args = [
'xgettext',
'-d', domain,
'--language=Python',
'--keyword=gettext_noop',
'--keyword=gettext_lazy',
'--keyword=ngettext_lazy:1,2',
'--keyword=ugettext_noop',
'--keyword=ugettext_lazy',
' | --keyword=ungettext_lazy:1,2',
'--keyword=pgettext:1c,2',
'--keyword=npgettext:1c,2,3',
'--keyword=pgettext_lazy:1c,2',
| '--keyword=npgettext_lazy:1c,2,3',
'--output=-'
] + command.xgettext_options
args.append(work_file)
else:
return
msgs, errors, status = gettext_popen_wrapper(args)
if errors:
if status != STATUS_OK:
if is_templatized:
os.unlink(work_file)
raise CommandError(
"errors happened while running xgettext on %s\n%s" %
(self.file, errors))
elif command.verbosity > 0:
# Print warnings
command.stdout.write(errors)
if msgs:
# Write/append messages to pot file
potfile = os.path.join(self.locale_dir, '%s.pot' % str(domain))
if is_templatized:
# Remove '.py' suffix
if os.name == 'nt':
# Preserve '.\' prefix on Windows to respect gettext behavior
old = '#: ' + work_file
new = '#: ' + orig_file
else:
old = '#: ' + work_file[2:]
new = '#: ' + orig_file[2:]
msgs = msgs.replace(old, new)
write_pot_file(potfile, msgs)
if is_templatized:
os.unlink(work_file)
def write_pot_file(potfile, msgs):
"""
Write the :param potfile: POT file with the :param msgs: contents,
previously making sure its format is valid.
"""
if os.path.exists(potfile):
# Strip the header
msgs = '\n'.join(dropwhile(len, msgs.split('\n')))
else:
msgs = msgs.replace('charset=CHARSET', 'charset=UTF-8')
with io.open(potfile, 'a', encoding='utf-8') as fp:
fp.write(msgs)
class Command(BaseCommand):
help = ("Runs over the entire source tree of the current directory and "
"pulls out all strings marked for translation. It creates (or updates) a message "
"file in the conf/locale (in the django tree) or locale (for projects and "
"applications) directory.\n\nYou must run this command with one of either the "
"--locale, --exclude or --all options.")
requires_system_checks = False
leave_locale_alone = True
msgmerge_options = ['-q', '--previous']
msguniq_options = ['--to-code=utf-8']
msgattrib_options = ['--no-obsolete']
xgettext_options = ['--from-code=UTF-8', '--add-comments=Translators']
def add_arguments(self, parser):
parser.add_argument('--locale', '-l', default=[], dest='locale', action='append',
help='Creates or updates the message files for the given locale(s) (e.g. pt_BR). '
'Can be used multiple times.')
parser.add_argument('--exclude', '-x', default=[], dest='exclude', action='append',
help='Locales to exclude. Default is none. Can be used multiple times.')
parser.add_argument('--domain', '-d', default='django', dest='domain',
help='The domain of the message files (default: "django").')
parser.add_argument('--all', '-a', action='store_true', dest='all',
default=False, help='Updates the message files for all existing locales.')
parser.add_argument('--extension', '-e', dest='extensions',
help='The file extension(s) to examine (default: "html,txt", or "js" '
'if the domain is "djangojs"). Separate multiple extensions with '
'commas, or use -e multiple times.',
action='append')
parser.add_argument('--symlinks', '-s', action='store_true', dest='symlinks',
default=False, help='Follows symlinks to directories when examining '
'source code and templates for translation strings.')
parser.add_argument('--ignore', '-i', action='append', dest='ignore_patterns',
default=[], metavar='PATTERN',
help='Ignore files or directories matching this glob-style pattern. '
'Use multiple times to ignore more.')
parser.add_argument('--no-default-ignore', action='store_false', dest='use_default_ignore_patterns',
default=True, help="Don't ignore the common glob-style patterns 'CVS', '.*', '*~' and '*.pyc'.")
parser.add_argument('--no-wrap', action='store_true', dest='no_wrap',
default=False, help="Don't break long message lines into several lines.")
parser.add_argument('--no-location', action='store_true', dest='no_location',
default=False, help="Don't write '#: filename:line' lines.")
parser.add_argument('--no-obsolete', action='store_true', dest='no_obsolete',
default=False, help="Remove obsolete message strings.")
parser.add_argument('--keep-pot', action='store_true', dest='keep_pot',
default=False, help="Keep .pot file after making messages. Useful when debugging.")
def handle(self, *args, **options):
locale = options.get('locale')
exclude = options.get('exclude')
self.domain = options.get('domain')
self.verbosity = options.get('verbosity')
|
from steerclear import app
from steerclear.forms import RideForm
from steerclear.models import *
import unittest, flask
"""
RideFormTestCase
----------------
Test class for the RideForm class
"""
class RideFormTestCase(unittest.TestCase):
"""
submit_form
-----------
helper method to submit a RideForm by faking
a request context. Returns True is the form
validated and False if not.
*payload* is a dictionary of name/value pairs
of the form data that is being submitted
"""
def submit_form(self, payload):
with app.test_request_context():
form = RideForm(data=payload)
return form.validate()
def setUp(self):
self.payload = {
u"num_passengers": 4,
u"start_latitude": 1.1,
u"start_longitude": 2.2,
u"end_latitude": 3.3,
u"end_longitude": 4.4,
}
"""
test_ride_form_correct_submit
-----------------------------
Tests that a RideForm can be validated correctly
"""
def test_ride_form_correct_submit(self):
result = self.submit_form(self.payload)
self.assertTrue(result)
"""
test_data_required_fields
-------------------------
tests that a RideForm is not valid unless
all fields are included in the form data
"""
def test_data_required_fields(self):
payload = self.payload
for key in payload.keys():
bad_payload = payload.copy()
bad_payload.pop(key, None)
result = self.submit_form(bad_payload)
self.assertFalse(result)
"""
test_num_passengers_min_range
-----------------------------
Tests that a RideForm accepts the correct min
range value for the 'num_passengers' field
"""
def test_num_passengers_min_range(self):
payload = self.payload.copy()
payload[u'num_passengers'] = 1
result = self.submit_form(payload)
self.assertTrue(result)
"""
test_num_passengers_max_range
-----------------------------
Tests that a RideForm accepts the correct max
range value for the 'num_passengers' field
"""
def test_num_passengers_max_range(self):
payload = self.payload.copy()
payload[u'num_passengers'] = 8
result = self.submit_form(payload)
self.assertTrue(result)
"""
test_num_passengers_bad_range
-----------------------------
Tests that a RideForm does not accept values
for the 'num_passengers' field that are out of range
"""
def test_num_passengers_bad_range(self):
bad_payload = self.payload.copy()
ba | d_payload[u'num_passengers'] = 0
result = self.submit_form(bad_payload)
self.assertFalse(result)
bad_payload[u'num_passengers'] = -1
result = self.submit_form(bad_payload)
self.assertFalse(result)
bad_payload[u'num_passengers'] = -100
result = self.submit_form(bad_payload)
self.assertFalse(result)
bad_payload[u'num_passengers'] = 9
result = self.submit_form(bad_payload)
self.assertFalse(result)
bad_paylo | ad[u'num_passengers'] = 100
result = self.submit_form(bad_payload)
self.assertFalse(result)
|
'''
urlresolver XBMC Addon
Copyright (C) 2017
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from urlresolver.plugins.__generic_resolver__ import GenericResolver
class BitPornoResolver(GenericResolver):
#print "print UR BitPorno"
name = 'BitPorno'
domains = ['bitporno.com']
pattern = '(?://|\.)(bitporno\.com)/(?:\?v=|embed/)([a-zA-Z0-9]+)'
def get_url(self, host, media_id | ):
print "print UR BitPorno self, host, media_id", self,host, media_id
print "print return", self._default_get_url(host, media_id, template='http:/ | /{host}/?v={media_id}')
return self._default_get_url(host, media_id, template='http://{host}/?v={media_id}')
return "https://www.bitporno.com/?v=FM11XRJLMP"
@classmethod
def _is_enabled(cls):
return True
|
from django.conf.urls import url
from . import views
app_name = 'persons'
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^contact/$', views.contact, name='contact'),
url(r'^thanks/$', | views.thanks, name='thanks'),
url(r'^upload/$', views.upload_file, name='upload_file'),
url(r' | ^success/$', views.success, name='success'),
url(r'^uploadImage/$', views.uploadImage, name='uploadImage'),
]
|
import cStringIO as StringIO
import pytest
import time
from Connection import ConnectionServer
from Conn | ection import Connection
from File import FileServer
@pytest.mark.usefixtures("resetSettings")
@pytest.mark.usefixtures("resetTempSettings")
class TestFileRequest:
def testGetFile(self, file_server, site):
file_server.ip_incoming = {} # Reset flood protection
client = ConnectionServer("127.0.0.1", 1545)
|
connection = client.getConnection("127.0.0.1", 1544)
file_server.sites[site.address] = site
response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0})
assert "sign" in response["body"]
# Invalid file
response = connection.request("getFile", {"site": site.address, "inner_path": "invalid.file", "location": 0})
assert "File read error" in response["error"]
# Location over size
response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024})
assert "File read error" in response["error"]
# Stream from parent dir
response = connection.request("getFile", {"site": site.address, "inner_path": "../users.json", "location": 0})
assert "File read error" in response["error"]
# Invalid site
response = connection.request("getFile", {"site": "", "inner_path": "users.json", "location": 0})
assert "Unknown site" in response["error"]
response = connection.request("getFile", {"site": ".", "inner_path": "users.json", "location": 0})
assert "Unknown site" in response["error"]
connection.close()
client.stop()
def testStreamFile(self, file_server, site):
file_server.ip_incoming = {} # Reset flood protection
client = ConnectionServer("127.0.0.1", 1545)
connection = client.getConnection("127.0.0.1", 1544)
file_server.sites[site.address] = site
buff = StringIO.StringIO()
response = connection.request("streamFile", {"site": site.address, "inner_path": "content.json", "location": 0}, buff)
assert "stream_bytes" in response
assert "sign" in buff.getvalue()
# Invalid file
buff = StringIO.StringIO()
response = connection.request("streamFile", {"site": site.address, "inner_path": "invalid.file", "location": 0}, buff)
assert "File read error" in response["error"]
# Location over size
buff = StringIO.StringIO()
response = connection.request(
"streamFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024}, buff
)
assert "File read error" in response["error"]
# Stream from parent dir
buff = StringIO.StringIO()
response = connection.request("streamFile", {"site": site.address, "inner_path": "../users.json", "location": 0}, buff)
assert "File read error" in response["error"]
connection.close()
client.stop()
def testPex(self, file_server, site, site_temp):
file_server.sites[site.address] = site
client = FileServer("127.0.0.1", 1545)
client.sites[site_temp.address] = site_temp
site_temp.connection_server = client
connection = client.getConnection("127.0.0.1", 1544)
# Add new fake peer to site
fake_peer = site.addPeer("1.2.3.4", 11337, return_peer=True)
# Add fake connection to it
fake_peer.connection = Connection(file_server, "1.2.3.4", 11337)
fake_peer.connection.last_recv_time = time.time()
assert fake_peer in site.getConnectablePeers()
# Add file_server as peer to client
peer_file_server = site_temp.addPeer("127.0.0.1", 1544)
assert "1.2.3.4:11337" not in site_temp.peers
assert peer_file_server.pex()
assert "1.2.3.4:11337" in site_temp.peers
connection.close()
client.stop()
|
from gonullu.farm import Farm
f | rom gonullu.docker import Docker
from gonullu.volunteer import Volunt | eer
from gonullu.log import Log
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.