code
stringlengths 1
199k
|
|---|
from openstack_dashboard.test.integration_tests.pages import basepage
from openstack_dashboard.test.integration_tests.regions import forms
from openstack_dashboard.test.integration_tests.regions import tables
class KeypairsTable(tables.TableRegion):
name = "keypairs"
CREATE_KEY_PAIR_FORM_FIELDS = ('name',)
@tables.bind_table_action('create')
def create_keypair(self, create_button):
create_button.click()
return forms.FormRegion(
self.driver, self.conf,
field_mappings=self.CREATE_KEY_PAIR_FORM_FIELDS)
@tables.bind_row_action('delete')
def delete_keypair(self, delete_button, row):
delete_button.click()
return forms.BaseFormRegion(self.driver, self.conf)
@tables.bind_table_action('delete')
def delete_keypairs(self, delete_button):
delete_button.click()
return forms.BaseFormRegion(self.driver, self.conf)
class KeypairsPage(basepage.BaseNavigationPage):
KEY_PAIRS_TABLE_ACTIONS = ("create", "import", "delete")
KEY_PAIRS_TABLE_ROW_ACTION = "delete"
KEY_PAIRS_TABLE_NAME_COLUMN = 'name'
def __init__(self, driver, conf):
super(KeypairsPage, self).__init__(driver, conf)
self._page_title = "Access & Security"
def _get_row_with_keypair_name(self, name):
return self.keypairs_table.get_row(self.KEY_PAIRS_TABLE_NAME_COLUMN,
name)
@property
def keypairs_table(self):
return KeypairsTable(self.driver, self.conf)
@property
def delete_keypair_form(self):
return forms.BaseFormRegion(self.driver, self.conf, None)
def is_keypair_present(self, name):
return bool(self._get_row_with_keypair_name(name))
def create_keypair(self, keypair_name):
create_keypair_form = self.keypairs_table.create_keypair()
create_keypair_form.name.text = keypair_name
create_keypair_form.submit()
def delete_keypair(self, name):
row = self._get_row_with_keypair_name(name)
delete_keypair_form = self.keypairs_table.delete_keypair(row)
delete_keypair_form.submit()
def delete_keypairs(self, name):
row = self._get_row_with_keypair_name(name)
row.mark()
delete_keypair_form = self.keypairs_table.delete_keypairs()
delete_keypair_form.submit()
|
"""Controllers for simple, mostly-static pages (like About, Forum, etc.)."""
__author__ = 'sll@google.com (Sean Lip)'
import urllib
import urlparse
from core.controllers import base
from core.controllers import editor
from core.domain import config_domain
import feconf
ABOUT_PAGE_YOUTUBE_VIDEO_ID = config_domain.ConfigProperty(
'about_page_youtube_video_id', {'type': 'unicode'},
'The (optional) video id for the About page',
default_value='')
ADMIN_EMAIL_ADDRESS = config_domain.ConfigProperty(
'admin_email_address', {'type': 'unicode'},
'The admin email address to display on the About pages',
default_value='ADMIN_EMAIL_ADDRESS')
EMBEDDED_GOOGLE_GROUP_URL = config_domain.ConfigProperty(
'embedded_google_group_url', {'type': 'unicode'},
'The URL for the embedded Google Group in the Forum page',
default_value=(
'https://groups.google.com/forum/embed/?place=forum/oppia'))
SITE_FORUM_URL = config_domain.ConfigProperty(
'site_forum_url', {'type': 'unicode'},
'The site forum URL (for links; the Forum page is configured separately)',
default_value='https://site/forum/url')
SITE_NAME = config_domain.ConfigProperty(
'site_name', {'type': 'unicode'}, 'The site name',
default_value='SITE_NAME')
_ABOUT_EXPLORATION_ID = '14'
class AboutPage(base.BaseHandler):
"""Page with information about Oppia."""
def get(self):
"""Handles GET requests."""
self.values.update({
'ABOUT_EXPLORATION_ID': _ABOUT_EXPLORATION_ID,
'ABOUT_PAGE_YOUTUBE_VIDEO_ID': ABOUT_PAGE_YOUTUBE_VIDEO_ID.value,
'ADMIN_EMAIL_ADDRESS': ADMIN_EMAIL_ADDRESS.value,
'MODERATOR_REQUEST_FORUM_URL': (
editor.MODERATOR_REQUEST_FORUM_URL.value),
'SITE_FORUM_URL': SITE_FORUM_URL.value,
'SITE_NAME': SITE_NAME.value,
'nav_mode': feconf.NAV_MODE_ABOUT,
})
self.render_template('pages/about.html')
class ParticipatePage(base.BaseHandler):
"""Page with information about participating in Oppia."""
def get(self):
"""Handles GET requests."""
self.values.update({
'ADMIN_EMAIL_ADDRESS': ADMIN_EMAIL_ADDRESS.value,
'MODERATOR_REQUEST_FORUM_URL': (
editor.MODERATOR_REQUEST_FORUM_URL.value),
'SITE_FORUM_URL': SITE_FORUM_URL.value,
'SITE_NAME': SITE_NAME.value,
'nav_mode': feconf.NAV_MODE_PARTICIPATE,
})
self.render_template('pages/participate.html')
class ForumPage(base.BaseHandler):
"""Page with an embedded forum."""
def get(self):
"""Handles GET requests."""
if not feconf.SHOW_FORUM_PAGE:
raise self.PageNotFoundException
# Note: if you are working in the development environment and
# are accessing this page at localhost, please replace
# 'localhost' with '127.0.0.1'.
_, netloc, _, _, _ = urlparse.urlsplit(self.request.uri)
self.values.update({
'EMBEDDED_GOOGLE_GROUP_URL': (
'%s&showtabs=false&hideforumtitle=true&parenturl=%s' % (
EMBEDDED_GOOGLE_GROUP_URL.value,
urllib.quote(self.request.uri, safe=''),
)
),
'on_localhost': netloc.startswith('localhost'),
})
self.render_template('pages/forum.html')
|
"""
Home for functionality that provides context managers, and anything related to
making those context managers function.
"""
import logging
from contextlib import contextmanager
from tools.env import ALLOW_NOISY_LOGGING
@contextmanager
def log_filter(log_id, expected_strings=None):
"""
Context manager which allows silencing logs until exit.
Log records matching expected_strings will be filtered out of logging.
If expected_strings is not provided, everything is filtered for that log.
"""
logger = logging.getLogger(log_id)
log_filter = _make_filter_class(expected_strings)
logger.addFilter(log_filter)
yield
if log_filter.records_silenced > 0:
print("Logs were filtered to remove messages deemed unimportant, total count: %d" % log_filter.records_silenced)
logger.removeFilter(log_filter)
def _make_filter_class(expected_strings):
"""
Builds an anon-ish filtering class and returns it.
Returns a logfilter if filtering should take place, otherwise a nooplogfilter.
We're just using a class here as a one-off object with a filter method, for
use as a filter object on the desired log.
"""
class nooplogfilter(object):
records_silenced = 0
@classmethod
def filter(cls, record):
return True
class logfilter(object):
records_silenced = 0
@classmethod
def filter(cls, record):
if expected_strings is None:
cls.records_silenced += 1
return False
for s in expected_strings:
if s in record.msg or s in record.name:
cls.records_silenced += 1
return False
return True
if ALLOW_NOISY_LOGGING:
return nooplogfilter
else:
return logfilter
|
"""Contrib vision utilities."""
from .transforms import *
from .dataloader import *
|
from pyspider.libs.base_handler import *
class Handler(BaseHandler):
crawl_config = {
}
@every(minutes=24 * 60)
def on_start(self):
self.crawl('http://scrapy.org/', callback=self.index_page)
@config(age=10 * 24 * 60 * 60)
def index_page(self, response):
for each in response.doc('a[href^="http"]').items():
self.crawl(each.attr.href, callback=self.detail_page)
@config(priority=2)
def detail_page(self, response):
return {
"url": response.url,
"title": response.doc('title').text(),
}
|
import argparse
import code
import sys
import threading
import time
import six
from six.moves.urllib.parse import urlparse
import websocket
try:
import readline
except ImportError:
pass
def get_encoding():
encoding = getattr(sys.stdin, "encoding", "")
if not encoding:
return "utf-8"
else:
return encoding.lower()
OPCODE_DATA = (websocket.ABNF.OPCODE_TEXT, websocket.ABNF.OPCODE_BINARY)
ENCODING = get_encoding()
class VAction(argparse.Action):
def __call__(self, parser, args, values, option_string=None):
if values is None:
values = "1"
try:
values = int(values)
except ValueError:
values = values.count("v") + 1
setattr(args, self.dest, values)
def parse_args():
parser = argparse.ArgumentParser(description="WebSocket Simple Dump Tool")
parser.add_argument("url", metavar="ws_url",
help="websocket url. ex. ws://echo.websocket.org/")
parser.add_argument("-p", "--proxy",
help="proxy url. ex. http://127.0.0.1:8080")
parser.add_argument("-v", "--verbose", default=0, nargs='?', action=VAction,
dest="verbose",
help="set verbose mode. If set to 1, show opcode. "
"If set to 2, enable to trace websocket module")
parser.add_argument("-n", "--nocert", action='store_true',
help="Ignore invalid SSL cert")
parser.add_argument("-r", "--raw", action="store_true",
help="raw output")
parser.add_argument("-s", "--subprotocols", nargs='*',
help="Set subprotocols")
parser.add_argument("-o", "--origin",
help="Set origin")
parser.add_argument("--eof-wait", default=0, type=int,
help="wait time(second) after 'EOF' received.")
parser.add_argument("-t", "--text",
help="Send initial text")
parser.add_argument("--timings", action="store_true",
help="Print timings in seconds")
parser.add_argument("--headers",
help="Set custom headers. Use ',' as separator")
return parser.parse_args()
class RawInput:
def raw_input(self, prompt):
if six.PY3:
line = input(prompt)
else:
line = raw_input(prompt)
if ENCODING and ENCODING != "utf-8" and not isinstance(line, six.text_type):
line = line.decode(ENCODING).encode("utf-8")
elif isinstance(line, six.text_type):
line = line.encode("utf-8")
return line
class InteractiveConsole(RawInput, code.InteractiveConsole):
def write(self, data):
sys.stdout.write("\033[2K\033[E")
# sys.stdout.write("\n")
sys.stdout.write("\033[34m< " + data + "\033[39m")
sys.stdout.write("\n> ")
sys.stdout.flush()
def read(self):
return self.raw_input("> ")
class NonInteractive(RawInput):
def write(self, data):
sys.stdout.write(data)
sys.stdout.write("\n")
sys.stdout.flush()
def read(self):
return self.raw_input("")
def main():
start_time = time.time()
args = parse_args()
if args.verbose > 1:
websocket.enableTrace(True)
options = {}
if args.proxy:
p = urlparse(args.proxy)
options["http_proxy_host"] = p.hostname
options["http_proxy_port"] = p.port
if args.origin:
options["origin"] = args.origin
if args.subprotocols:
options["subprotocols"] = args.subprotocols
opts = {}
if args.nocert:
opts = {"cert_reqs": websocket.ssl.CERT_NONE, "check_hostname": False}
if args.headers:
options['header'] = map(str.strip, args.headers.split(','))
ws = websocket.create_connection(args.url, sslopt=opts, **options)
if args.raw:
console = NonInteractive()
else:
console = InteractiveConsole()
print("Press Ctrl+C to quit")
def recv():
try:
frame = ws.recv_frame()
except websocket.WebSocketException:
return websocket.ABNF.OPCODE_CLOSE, None
if not frame:
raise websocket.WebSocketException("Not a valid frame %s" % frame)
elif frame.opcode in OPCODE_DATA:
return frame.opcode, frame.data
elif frame.opcode == websocket.ABNF.OPCODE_CLOSE:
ws.send_close()
return frame.opcode, None
elif frame.opcode == websocket.ABNF.OPCODE_PING:
ws.pong(frame.data)
return frame.opcode, frame.data
return frame.opcode, frame.data
def recv_ws():
while True:
opcode, data = recv()
msg = None
if six.PY3 and opcode == websocket.ABNF.OPCODE_TEXT and isinstance(data, bytes):
data = str(data, "utf-8")
if not args.verbose and opcode in OPCODE_DATA:
msg = data
elif args.verbose:
msg = "%s: %s" % (websocket.ABNF.OPCODE_MAP.get(opcode), data)
if msg is not None:
if args.timings:
console.write(str(time.time() - start_time) + ": " + msg)
else:
console.write(msg)
if opcode == websocket.ABNF.OPCODE_CLOSE:
break
thread = threading.Thread(target=recv_ws)
thread.daemon = True
thread.start()
if args.text:
ws.send(args.text)
while True:
try:
message = console.read()
ws.send(message)
except KeyboardInterrupt:
return
except EOFError:
time.sleep(args.eof_wait)
return
if __name__ == "__main__":
try:
main()
except Exception as e:
print(e)
|
from __future__ import division
import numpy
from chainer.training import extension
class ExponentialShift(extension.Extension):
"""Trainer extension to exponentially shift an optimizer attribute.
This extension exponentially increases or decreases the specified attribute
of the optimizer. The typical use case is an exponential decay of the
learning rate.
This extension is also called before the training loop starts by default.
Args:
attr (str): Name of the attribute to shift.
rate (float): Rate of the exponential shift. This value is multiplied
to the attribute at each call.
init (float): Initial value of the attribute. If it is ``None``, the
extension extracts the attribute at the first call and uses it as
the initial value.
target (float): Target value of the attribute. If the attribute reaches
this value, the shift stops.
optimizer (~chainer.Optimizer): Target optimizer to adjust the
attribute. If it is ``None``, the main optimizer of the updater is
used.
"""
def __init__(self, attr, rate, init=None, target=None, optimizer=None):
self._attr = attr
if rate < 0:
raise ValueError('ExponentialShift does not support negative rate')
self._rate = rate
self._init = init
self._target = target
self._optimizer = optimizer
self._t = 0
self._last_value = None
def initialize(self, trainer):
optimizer = self._get_optimizer(trainer)
# ensure that _init is set
if self._init is None:
self._init = getattr(optimizer, self._attr)
if self._last_value is not None: # resuming from a snapshot
self._update_value(optimizer, self._last_value)
else:
self._update_value(optimizer, self._init)
def __call__(self, trainer):
self._t += 1
optimizer = self._get_optimizer(trainer)
value = self._init * (self._rate ** self._t)
if self._target is not None:
if self._rate > 1:
# almost same as value = min(value, self._target), but this
# line supports negative values, too
if value / self._target > 1:
value = self._target
else:
# ditto
if value / self._target < 1:
value = self._target
self._update_value(optimizer, value)
def serialize(self, serializer):
self._t = serializer('_t', self._t)
self._last_value = serializer('_last_value', self._last_value)
if isinstance(self._last_value, numpy.ndarray):
self._last_value = self._last_value.item()
def _get_optimizer(self, trainer):
return self._optimizer or trainer.updater.get_optimizer('main')
def _update_value(self, optimizer, value):
setattr(optimizer, self._attr, value)
self._last_value = value
|
"""Module containing user contributions
"""
|
"""Test block processing.
This reimplements tests from the bitcoinj/FullBlockTestGenerator used
by the pull-tester.
We use the testing framework in which we expect a particular answer from
each test.
"""
from test_framework.test_framework import ComparisonTestFramework
from test_framework.util import *
from test_framework.comptool import TestManager, TestInstance, RejectResult
from test_framework.blocktools import *
import time
from test_framework.key import CECKey
from test_framework.script import *
import struct
class PreviousSpendableOutput(object):
def __init__(self, tx = CTransaction(), n = -1):
self.tx = tx
self.n = n # the output we're spending
class CBrokenBlock(CBlock):
def __init__(self, header=None):
super(CBrokenBlock, self).__init__(header)
def initialize(self, base_block):
self.vtx = copy.deepcopy(base_block.vtx)
self.hashMerkleRoot = self.calc_merkle_root()
def serialize(self):
r = b""
r += super(CBlock, self).serialize()
r += struct.pack("<BQ", 255, len(self.vtx))
for tx in self.vtx:
r += tx.serialize()
return r
def normal_serialize(self):
r = b""
r += super(CBrokenBlock, self).serialize()
return r
class FullBlockTest(ComparisonTestFramework):
# Can either run this test as 1 node with expected answers, or two and compare them.
# Change the "outcome" variable from each TestInstance object to only do the comparison.
def __init__(self):
super().__init__()
self.num_nodes = 1
self.block_heights = {}
self.coinbase_key = CECKey()
self.coinbase_key.set_secretbytes(b"horsebattery")
self.coinbase_pubkey = self.coinbase_key.get_pubkey()
self.tip = None
self.blocks = {}
def add_options(self, parser):
super().add_options(parser)
parser.add_option("--runbarelyexpensive", dest="runbarelyexpensive", default=True)
def run_test(self):
self.test = TestManager(self, self.options.tmpdir)
self.test.add_all_connections(self.nodes)
NetworkThread().start() # Start up network handling in another thread
self.test.run()
def add_transactions_to_block(self, block, tx_list):
[ tx.rehash() for tx in tx_list ]
block.vtx.extend(tx_list)
# this is a little handier to use than the version in blocktools.py
def create_tx(self, spend_tx, n, value, script=CScript([OP_TRUE])):
tx = create_transaction(spend_tx, n, b"", value, script)
return tx
# sign a transaction, using the key we know about
# this signs input 0 in tx, which is assumed to be spending output n in spend_tx
def sign_tx(self, tx, spend_tx, n):
scriptPubKey = bytearray(spend_tx.vout[n].scriptPubKey)
if (scriptPubKey[0] == OP_TRUE): # an anyone-can-spend
tx.vin[0].scriptSig = CScript()
return
(sighash, err) = SignatureHash(spend_tx.vout[n].scriptPubKey, tx, 0, SIGHASH_ALL)
tx.vin[0].scriptSig = CScript([self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL]))])
def create_and_sign_transaction(self, spend_tx, n, value, script=CScript([OP_TRUE])):
tx = self.create_tx(spend_tx, n, value, script)
self.sign_tx(tx, spend_tx, n)
tx.rehash()
return tx
def next_block(self, number, spend=None, additional_coinbase_value=0, script=CScript([OP_TRUE]), solve=True):
if self.tip == None:
base_block_hash = self.genesis_hash
block_time = int(time.time())+1
else:
base_block_hash = self.tip.sha256
block_time = self.tip.nTime + 1
# First create the coinbase
height = self.block_heights[base_block_hash] + 1
coinbase = create_coinbase(height, self.coinbase_pubkey)
coinbase.vout[0].nValue += additional_coinbase_value
coinbase.rehash()
if spend == None:
block = create_block(base_block_hash, coinbase, block_time)
else:
coinbase.vout[0].nValue += spend.tx.vout[spend.n].nValue - 1 # all but one satoshi to fees
coinbase.rehash()
block = create_block(base_block_hash, coinbase, block_time)
tx = create_transaction(spend.tx, spend.n, b"", 1, script) # spend 1 satoshi
self.sign_tx(tx, spend.tx, spend.n)
self.add_transactions_to_block(block, [tx])
block.hashMerkleRoot = block.calc_merkle_root()
if solve:
block.solve()
self.tip = block
self.block_heights[block.sha256] = height
assert number not in self.blocks
self.blocks[number] = block
return block
def get_tests(self):
self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16)
self.block_heights[self.genesis_hash] = 0
spendable_outputs = []
# save the current tip so it can be spent by a later block
def save_spendable_output():
spendable_outputs.append(self.tip)
# get an output that we previously marked as spendable
def get_spendable_output():
return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0)
# returns a test case that asserts that the current tip was accepted
def accepted():
return TestInstance([[self.tip, True]])
# returns a test case that asserts that the current tip was rejected
def rejected(reject = None):
if reject is None:
return TestInstance([[self.tip, False]])
else:
return TestInstance([[self.tip, reject]])
# move the tip back to a previous block
def tip(number):
self.tip = self.blocks[number]
# adds transactions to the block and updates state
def update_block(block_number, new_transactions):
block = self.blocks[block_number]
self.add_transactions_to_block(block, new_transactions)
old_sha256 = block.sha256
block.hashMerkleRoot = block.calc_merkle_root()
block.solve()
# Update the internal state just like in next_block
self.tip = block
if block.sha256 != old_sha256:
self.block_heights[block.sha256] = self.block_heights[old_sha256]
del self.block_heights[old_sha256]
self.blocks[block_number] = block
return block
# shorthand for functions
block = self.next_block
create_tx = self.create_tx
create_and_sign_tx = self.create_and_sign_transaction
# these must be updated if consensus changes
MAX_BLOCK_SIGOPS = 20000
# Create a new block
block(0)
save_spendable_output()
yield accepted()
# Now we need that block to mature so we can spend the coinbase.
test = TestInstance(sync_every_block=False)
for i in range(99):
block(5000 + i)
test.blocks_and_transactions.append([self.tip, True])
save_spendable_output()
yield test
# collect spendable outputs now to avoid cluttering the code later on
out = []
for i in range(33):
out.append(get_spendable_output())
# Start by building a couple of blocks on top (which output is spent is
# in parentheses):
# genesis -> b1 (0) -> b2 (1)
block(1, spend=out[0])
save_spendable_output()
yield accepted()
block(2, spend=out[1])
yield accepted()
save_spendable_output()
# so fork like this:
#
# genesis -> b1 (0) -> b2 (1)
# \-> b3 (1)
#
# Nothing should happen at this point. We saw b2 first so it takes priority.
tip(1)
b3 = block(3, spend=out[1])
txout_b3 = PreviousSpendableOutput(b3.vtx[1], 0)
yield rejected()
# Now we add another block to make the alternative chain longer.
#
# genesis -> b1 (0) -> b2 (1)
# \-> b3 (1) -> b4 (2)
block(4, spend=out[2])
yield accepted()
# ... and back to the first chain.
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b3 (1) -> b4 (2)
tip(2)
block(5, spend=out[2])
save_spendable_output()
yield rejected()
block(6, spend=out[3])
yield accepted()
# Try to create a fork that double-spends
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b7 (2) -> b8 (4)
# \-> b3 (1) -> b4 (2)
tip(5)
block(7, spend=out[2])
yield rejected()
block(8, spend=out[4])
yield rejected()
# Try to create a block that has too much fee
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b9 (4)
# \-> b3 (1) -> b4 (2)
tip(6)
block(9, spend=out[4], additional_coinbase_value=1)
yield rejected(RejectResult(16, b'bad-cb-amount'))
# Create a fork that ends in a block with too much fee (the one that causes the reorg)
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b10 (3) -> b11 (4)
# \-> b3 (1) -> b4 (2)
tip(5)
block(10, spend=out[3])
yield rejected()
block(11, spend=out[4], additional_coinbase_value=1)
yield rejected(RejectResult(16, b'bad-cb-amount'))
# Try again, but with a valid fork first
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b14 (5)
# (b12 added last)
# \-> b3 (1) -> b4 (2)
tip(5)
b12 = block(12, spend=out[3])
save_spendable_output()
b13 = block(13, spend=out[4])
# Deliver the block header for b12, and the block b13.
# b13 should be accepted but the tip won't advance until b12 is delivered.
yield TestInstance([[CBlockHeader(b12), None], [b13, False]])
save_spendable_output()
# b14 is invalid, but the node won't know that until it tries to connect
# Tip still can't advance because b12 is missing
block(14, spend=out[5], additional_coinbase_value=1)
yield rejected()
yield TestInstance([[b12, True, b13.sha256]]) # New tip should be b13.
# Add a block with MAX_BLOCK_SIGOPS and one with one more sigop
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b16 (6)
# \-> b3 (1) -> b4 (2)
# Test that a block with a lot of checksigs is okay
lots_of_checksigs = CScript([OP_CHECKSIG] * (MAX_BLOCK_SIGOPS - 1))
tip(13)
block(15, spend=out[5], script=lots_of_checksigs)
yield accepted()
save_spendable_output()
# Test that a block with too many checksigs is rejected
too_many_checksigs = CScript([OP_CHECKSIG] * (MAX_BLOCK_SIGOPS))
block(16, spend=out[6], script=too_many_checksigs)
yield rejected(RejectResult(16, b'bad-blk-sigops'))
# Attempt to spend a transaction created on a different fork
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b17 (b3.vtx[1])
# \-> b3 (1) -> b4 (2)
tip(15)
block(17, spend=txout_b3)
yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))
# Attempt to spend a transaction created on a different fork (on a fork this time)
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5)
# \-> b18 (b3.vtx[1]) -> b19 (6)
# \-> b3 (1) -> b4 (2)
tip(13)
block(18, spend=txout_b3)
yield rejected()
block(19, spend=out[6])
yield rejected()
# Attempt to spend a coinbase at depth too low
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b20 (7)
# \-> b3 (1) -> b4 (2)
tip(15)
block(20, spend=out[7])
yield rejected(RejectResult(16, b'bad-txns-premature-spend-of-coinbase'))
# Attempt to spend a coinbase at depth too low (on a fork this time)
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5)
# \-> b21 (6) -> b22 (5)
# \-> b3 (1) -> b4 (2)
tip(13)
block(21, spend=out[6])
yield rejected()
block(22, spend=out[5])
yield rejected()
# Create a block on either side of MAX_BLOCK_BASE_SIZE and make sure its accepted/rejected
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6)
# \-> b24 (6) -> b25 (7)
# \-> b3 (1) -> b4 (2)
tip(15)
b23 = block(23, spend=out[6])
tx = CTransaction()
script_length = MAX_BLOCK_BASE_SIZE - len(b23.serialize()) - 69
script_output = CScript([b'\x00' * script_length])
tx.vout.append(CTxOut(0, script_output))
tx.vin.append(CTxIn(COutPoint(b23.vtx[1].sha256, 0)))
b23 = update_block(23, [tx])
# Make sure the math above worked out to produce a max-sized block
assert_equal(len(b23.serialize()), MAX_BLOCK_BASE_SIZE)
yield accepted()
save_spendable_output()
# Make the next block one byte bigger and check that it fails
tip(15)
b24 = block(24, spend=out[6])
script_length = MAX_BLOCK_BASE_SIZE - len(b24.serialize()) - 69
script_output = CScript([b'\x00' * (script_length+1)])
tx.vout = [CTxOut(0, script_output)]
b24 = update_block(24, [tx])
assert_equal(len(b24.serialize()), MAX_BLOCK_BASE_SIZE+1)
yield rejected(RejectResult(16, b'bad-blk-length'))
block(25, spend=out[7])
yield rejected()
# Create blocks with a coinbase input script size out of range
# genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
# \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7)
# \-> ... (6) -> ... (7)
# \-> b3 (1) -> b4 (2)
tip(15)
b26 = block(26, spend=out[6])
b26.vtx[0].vin[0].scriptSig = b'\x00'
b26.vtx[0].rehash()
# update_block causes the merkle root to get updated, even with no new
# transactions, and updates the required state.
b26 = update_block(26, [])
yield rejected(RejectResult(16, b'bad-cb-length'))
# Extend the b26 chain to make sure bitcoind isn't accepting b26
b27 = block(27, spend=out[7])
yield rejected(False)
# Now try a too-large-coinbase script
tip(15)
b28 = block(28, spend=out[6])
b28.vtx[0].vin[0].scriptSig = b'\x00' * 101
b28.vtx[0].rehash()
b28 = update_block(28, [])
yield rejected(RejectResult(16, b'bad-cb-length'))
# Extend the b28 chain to make sure bitcoind isn't accepting b28
b29 = block(29, spend=out[7])
yield rejected(False)
# b30 has a max-sized coinbase scriptSig.
tip(23)
b30 = block(30)
b30.vtx[0].vin[0].scriptSig = b'\x00' * 100
b30.vtx[0].rehash()
b30 = update_block(30, [])
yield accepted()
save_spendable_output()
# b31 - b35 - check sigops of OP_CHECKMULTISIG / OP_CHECKMULTISIGVERIFY / OP_CHECKSIGVERIFY
#
# genesis -> ... -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10)
# \-> b36 (11)
# \-> b34 (10)
# \-> b32 (9)
#
# MULTISIG: each op code counts as 20 sigops. To create the edge case, pack another 19 sigops at the end.
lots_of_multisigs = CScript([OP_CHECKMULTISIG] * ((MAX_BLOCK_SIGOPS-1) // 20) + [OP_CHECKSIG] * 19)
b31 = block(31, spend=out[8], script=lots_of_multisigs)
assert_equal(get_legacy_sigopcount_block(b31), MAX_BLOCK_SIGOPS)
yield accepted()
save_spendable_output()
# this goes over the limit because the coinbase has one sigop
too_many_multisigs = CScript([OP_CHECKMULTISIG] * (MAX_BLOCK_SIGOPS // 20))
b32 = block(32, spend=out[9], script=too_many_multisigs)
assert_equal(get_legacy_sigopcount_block(b32), MAX_BLOCK_SIGOPS + 1)
yield rejected(RejectResult(16, b'bad-blk-sigops'))
# CHECKMULTISIGVERIFY
tip(31)
lots_of_multisigs = CScript([OP_CHECKMULTISIGVERIFY] * ((MAX_BLOCK_SIGOPS-1) // 20) + [OP_CHECKSIG] * 19)
block(33, spend=out[9], script=lots_of_multisigs)
yield accepted()
save_spendable_output()
too_many_multisigs = CScript([OP_CHECKMULTISIGVERIFY] * (MAX_BLOCK_SIGOPS // 20))
block(34, spend=out[10], script=too_many_multisigs)
yield rejected(RejectResult(16, b'bad-blk-sigops'))
# CHECKSIGVERIFY
tip(33)
lots_of_checksigs = CScript([OP_CHECKSIGVERIFY] * (MAX_BLOCK_SIGOPS - 1))
b35 = block(35, spend=out[10], script=lots_of_checksigs)
yield accepted()
save_spendable_output()
too_many_checksigs = CScript([OP_CHECKSIGVERIFY] * (MAX_BLOCK_SIGOPS))
block(36, spend=out[11], script=too_many_checksigs)
yield rejected(RejectResult(16, b'bad-blk-sigops'))
# Check spending of a transaction in a block which failed to connect
#
# b6 (3)
# b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10)
# \-> b37 (11)
# \-> b38 (11/37)
#
# save 37's spendable output, but then double-spend out11 to invalidate the block
tip(35)
b37 = block(37, spend=out[11])
txout_b37 = PreviousSpendableOutput(b37.vtx[1], 0)
tx = create_and_sign_tx(out[11].tx, out[11].n, 0)
b37 = update_block(37, [tx])
yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))
# attempt to spend b37's first non-coinbase tx, at which point b37 was still considered valid
tip(35)
block(38, spend=txout_b37)
yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))
# Check P2SH SigOp counting
#
#
# 13 (4) -> b15 (5) -> b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b41 (12)
# \-> b40 (12)
#
# b39 - create some P2SH outputs that will require 6 sigops to spend:
#
# redeem_script = COINBASE_PUBKEY, (OP_2DUP+OP_CHECKSIGVERIFY) * 5, OP_CHECKSIG
# p2sh_script = OP_HASH160, ripemd160(sha256(script)), OP_EQUAL
#
tip(35)
b39 = block(39)
b39_outputs = 0
b39_sigops_per_output = 6
# Build the redeem script, hash it, use hash to create the p2sh script
redeem_script = CScript([self.coinbase_pubkey] + [OP_2DUP, OP_CHECKSIGVERIFY]*5 + [OP_CHECKSIG])
redeem_script_hash = hash160(redeem_script)
p2sh_script = CScript([OP_HASH160, redeem_script_hash, OP_EQUAL])
# Create a transaction that spends one satoshi to the p2sh_script, the rest to OP_TRUE
# This must be signed because it is spending a coinbase
spend = out[11]
tx = create_tx(spend.tx, spend.n, 1, p2sh_script)
tx.vout.append(CTxOut(spend.tx.vout[spend.n].nValue - 1, CScript([OP_TRUE])))
self.sign_tx(tx, spend.tx, spend.n)
tx.rehash()
b39 = update_block(39, [tx])
b39_outputs += 1
# Until block is full, add tx's with 1 satoshi to p2sh_script, the rest to OP_TRUE
tx_new = None
tx_last = tx
total_size=len(b39.serialize())
while(total_size < MAX_BLOCK_BASE_SIZE):
tx_new = create_tx(tx_last, 1, 1, p2sh_script)
tx_new.vout.append(CTxOut(tx_last.vout[1].nValue - 1, CScript([OP_TRUE])))
tx_new.rehash()
total_size += len(tx_new.serialize())
if total_size >= MAX_BLOCK_BASE_SIZE:
break
b39.vtx.append(tx_new) # add tx to block
tx_last = tx_new
b39_outputs += 1
b39 = update_block(39, [])
yield accepted()
save_spendable_output()
# Test sigops in P2SH redeem scripts
#
# b40 creates 3333 tx's spending the 6-sigop P2SH outputs from b39 for a total of 19998 sigops.
# The first tx has one sigop and then at the end we add 2 more to put us just over the max.
#
# b41 does the same, less one, so it has the maximum sigops permitted.
#
tip(39)
b40 = block(40, spend=out[12])
sigops = get_legacy_sigopcount_block(b40)
numTxes = (MAX_BLOCK_SIGOPS - sigops) // b39_sigops_per_output
assert_equal(numTxes <= b39_outputs, True)
lastOutpoint = COutPoint(b40.vtx[1].sha256, 0)
new_txs = []
for i in range(1, numTxes+1):
tx = CTransaction()
tx.vout.append(CTxOut(1, CScript([OP_TRUE])))
tx.vin.append(CTxIn(lastOutpoint, b''))
# second input is corresponding P2SH output from b39
tx.vin.append(CTxIn(COutPoint(b39.vtx[i].sha256, 0), b''))
# Note: must pass the redeem_script (not p2sh_script) to the signature hash function
(sighash, err) = SignatureHash(redeem_script, tx, 1, SIGHASH_ALL)
sig = self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL]))
scriptSig = CScript([sig, redeem_script])
tx.vin[1].scriptSig = scriptSig
tx.rehash()
new_txs.append(tx)
lastOutpoint = COutPoint(tx.sha256, 0)
b40_sigops_to_fill = MAX_BLOCK_SIGOPS - (numTxes * b39_sigops_per_output + sigops) + 1
tx = CTransaction()
tx.vin.append(CTxIn(lastOutpoint, b''))
tx.vout.append(CTxOut(1, CScript([OP_CHECKSIG] * b40_sigops_to_fill)))
tx.rehash()
new_txs.append(tx)
update_block(40, new_txs)
yield rejected(RejectResult(16, b'bad-blk-sigops'))
# same as b40, but one less sigop
tip(39)
b41 = block(41, spend=None)
update_block(41, b40.vtx[1:-1])
b41_sigops_to_fill = b40_sigops_to_fill - 1
tx = CTransaction()
tx.vin.append(CTxIn(lastOutpoint, b''))
tx.vout.append(CTxOut(1, CScript([OP_CHECKSIG] * b41_sigops_to_fill)))
tx.rehash()
update_block(41, [tx])
yield accepted()
# Fork off of b39 to create a constant base again
#
# b23 (6) -> b30 (7) -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13)
# \-> b41 (12)
#
tip(39)
block(42, spend=out[12])
yield rejected()
save_spendable_output()
block(43, spend=out[13])
yield accepted()
save_spendable_output()
# Test a number of really invalid scenarios
#
# -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b44 (14)
# \-> ??? (15)
# The next few blocks are going to be created "by hand" since they'll do funky things, such as having
# the first transaction be non-coinbase, etc. The purpose of b44 is to make sure this works.
height = self.block_heights[self.tip.sha256] + 1
coinbase = create_coinbase(height, self.coinbase_pubkey)
b44 = CBlock()
b44.nTime = self.tip.nTime + 1
b44.hashPrevBlock = self.tip.sha256
b44.nBits = 0x207fffff
b44.vtx.append(coinbase)
b44.hashMerkleRoot = b44.calc_merkle_root()
b44.solve()
self.tip = b44
self.block_heights[b44.sha256] = height
self.blocks[44] = b44
yield accepted()
# A block with a non-coinbase as the first tx
non_coinbase = create_tx(out[15].tx, out[15].n, 1)
b45 = CBlock()
b45.nTime = self.tip.nTime + 1
b45.hashPrevBlock = self.tip.sha256
b45.nBits = 0x207fffff
b45.vtx.append(non_coinbase)
b45.hashMerkleRoot = b45.calc_merkle_root()
b45.calc_sha256()
b45.solve()
self.block_heights[b45.sha256] = self.block_heights[self.tip.sha256]+1
self.tip = b45
self.blocks[45] = b45
yield rejected(RejectResult(16, b'bad-cb-missing'))
# A block with no txns
tip(44)
b46 = CBlock()
b46.nTime = b44.nTime+1
b46.hashPrevBlock = b44.sha256
b46.nBits = 0x207fffff
b46.vtx = []
b46.hashMerkleRoot = 0
b46.solve()
self.block_heights[b46.sha256] = self.block_heights[b44.sha256]+1
self.tip = b46
assert 46 not in self.blocks
self.blocks[46] = b46
s = ser_uint256(b46.hashMerkleRoot)
yield rejected(RejectResult(16, b'bad-blk-length'))
# A block with invalid work
tip(44)
b47 = block(47, solve=False)
target = uint256_from_compact(b47.nBits)
while b47.scrypt256 < target: #changed > to <
b47.nNonce += 1
b47.rehash()
yield rejected(RejectResult(16, b'high-hash'))
# A block with timestamp > 2 hrs in the future
tip(44)
b48 = block(48, solve=False)
b48.nTime = int(time.time()) + 60 * 60 * 3
b48.solve()
yield rejected(RejectResult(16, b'time-too-new'))
# A block with an invalid merkle hash
tip(44)
b49 = block(49)
b49.hashMerkleRoot += 1
b49.solve()
yield rejected(RejectResult(16, b'bad-txnmrklroot'))
# A block with an incorrect POW limit
tip(44)
b50 = block(50)
b50.nBits = b50.nBits - 1
b50.solve()
yield rejected(RejectResult(16, b'bad-diffbits'))
# A block with two coinbase txns
tip(44)
b51 = block(51)
cb2 = create_coinbase(51, self.coinbase_pubkey)
b51 = update_block(51, [cb2])
yield rejected(RejectResult(16, b'bad-cb-multiple'))
# A block w/ duplicate txns
# Note: txns have to be in the right position in the merkle tree to trigger this error
tip(44)
b52 = block(52, spend=out[15])
tx = create_tx(b52.vtx[1], 0, 1)
b52 = update_block(52, [tx, tx])
yield rejected(RejectResult(16, b'bad-txns-duplicate'))
# Test block timestamps
# -> b31 (8) -> b33 (9) -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15)
# \-> b54 (15)
#
tip(43)
block(53, spend=out[14])
yield rejected() # rejected since b44 is at same height
save_spendable_output()
# invalid timestamp (b35 is 5 blocks back, so its time is MedianTimePast)
b54 = block(54, spend=out[15])
b54.nTime = b35.nTime - 1
b54.solve()
yield rejected(RejectResult(16, b'time-too-old'))
# valid timestamp
tip(53)
b55 = block(55, spend=out[15])
b55.nTime = b35.nTime
update_block(55, [])
yield accepted()
save_spendable_output()
# Test CVE-2012-2459
#
# -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57p2 (16)
# \-> b57 (16)
# \-> b56p2 (16)
# \-> b56 (16)
#
# Merkle tree malleability (CVE-2012-2459): repeating sequences of transactions in a block without
# affecting the merkle root of a block, while still invalidating it.
# See: src/consensus/merkle.h
#
# b57 has three txns: coinbase, tx, tx1. The merkle root computation will duplicate tx.
# Result: OK
#
# b56 copies b57 but duplicates tx1 and does not recalculate the block hash. So it has a valid merkle
# root but duplicate transactions.
# Result: Fails
#
# b57p2 has six transactions in its merkle tree:
# - coinbase, tx, tx1, tx2, tx3, tx4
# Merkle root calculation will duplicate as necessary.
# Result: OK.
#
# b56p2 copies b57p2 but adds both tx3 and tx4. The purpose of the test is to make sure the code catches
# duplicate txns that are not next to one another with the "bad-txns-duplicate" error (which indicates
# that the error was caught early, avoiding a DOS vulnerability.)
# b57 - a good block with 2 txs, don't submit until end
tip(55)
b57 = block(57)
tx = create_and_sign_tx(out[16].tx, out[16].n, 1)
tx1 = create_tx(tx, 0, 1)
b57 = update_block(57, [tx, tx1])
# b56 - copy b57, add a duplicate tx
tip(55)
b56 = copy.deepcopy(b57)
self.blocks[56] = b56
assert_equal(len(b56.vtx),3)
b56 = update_block(56, [tx1])
assert_equal(b56.hash, b57.hash)
yield rejected(RejectResult(16, b'bad-txns-duplicate'))
# b57p2 - a good block with 6 tx'es, don't submit until end
tip(55)
b57p2 = block("57p2")
tx = create_and_sign_tx(out[16].tx, out[16].n, 1)
tx1 = create_tx(tx, 0, 1)
tx2 = create_tx(tx1, 0, 1)
tx3 = create_tx(tx2, 0, 1)
tx4 = create_tx(tx3, 0, 1)
b57p2 = update_block("57p2", [tx, tx1, tx2, tx3, tx4])
# b56p2 - copy b57p2, duplicate two non-consecutive tx's
tip(55)
b56p2 = copy.deepcopy(b57p2)
self.blocks["b56p2"] = b56p2
assert_equal(b56p2.hash, b57p2.hash)
assert_equal(len(b56p2.vtx),6)
b56p2 = update_block("b56p2", [tx3, tx4])
yield rejected(RejectResult(16, b'bad-txns-duplicate'))
tip("57p2")
yield accepted()
tip(57)
yield rejected() #rejected because 57p2 seen first
save_spendable_output()
# Test a few invalid tx types
#
# -> b35 (10) -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17)
# \-> ??? (17)
#
# tx with prevout.n out of range
tip(57)
b58 = block(58, spend=out[17])
tx = CTransaction()
assert(len(out[17].tx.vout) < 42)
tx.vin.append(CTxIn(COutPoint(out[17].tx.sha256, 42), CScript([OP_TRUE]), 0xffffffff))
tx.vout.append(CTxOut(0, b""))
tx.calc_sha256()
b58 = update_block(58, [tx])
yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))
# tx with output value > input value out of range
tip(57)
b59 = block(59)
tx = create_and_sign_tx(out[17].tx, out[17].n, 51*COIN)
b59 = update_block(59, [tx])
yield rejected(RejectResult(16, b'bad-txns-in-belowout'))
# reset to good chain
tip(57)
b60 = block(60, spend=out[17])
yield accepted()
save_spendable_output()
# Test BIP30
#
# -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17)
# \-> b61 (18)
#
# Blocks are not allowed to contain a transaction whose id matches that of an earlier,
# not-fully-spent transaction in the same chain. To test, make identical coinbases;
# the second one should be rejected.
#
tip(60)
b61 = block(61, spend=out[18])
b61.vtx[0].vin[0].scriptSig = b60.vtx[0].vin[0].scriptSig #equalize the coinbases
b61.vtx[0].rehash()
b61 = update_block(61, [])
assert_equal(b60.vtx[0].serialize(), b61.vtx[0].serialize())
yield rejected(RejectResult(16, b'bad-txns-BIP30'))
# Test tx.isFinal is properly rejected (not an exhaustive tx.isFinal test, that should be in data-driven transaction tests)
#
# -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17)
# \-> b62 (18)
#
tip(60)
b62 = block(62)
tx = CTransaction()
tx.nLockTime = 0xffffffff #this locktime is non-final
assert(out[18].n < len(out[18].tx.vout))
tx.vin.append(CTxIn(COutPoint(out[18].tx.sha256, out[18].n))) # don't set nSequence
tx.vout.append(CTxOut(0, CScript([OP_TRUE])))
assert(tx.vin[0].nSequence < 0xffffffff)
tx.calc_sha256()
b62 = update_block(62, [tx])
yield rejected(RejectResult(16, b'bad-txns-nonfinal'))
# Test a non-final coinbase is also rejected
#
# -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17)
# \-> b63 (-)
#
tip(60)
b63 = block(63)
b63.vtx[0].nLockTime = 0xffffffff
b63.vtx[0].vin[0].nSequence = 0xDEADBEEF
b63.vtx[0].rehash()
b63 = update_block(63, [])
yield rejected(RejectResult(16, b'bad-txns-nonfinal'))
# This checks that a block with a bloated VARINT between the block_header and the array of tx such that
# the block is > MAX_BLOCK_BASE_SIZE with the bloated varint, but <= MAX_BLOCK_BASE_SIZE without the bloated varint,
# does not cause a subsequent, identical block with canonical encoding to be rejected. The test does not
# care whether the bloated block is accepted or rejected; it only cares that the second block is accepted.
#
# What matters is that the receiving node should not reject the bloated block, and then reject the canonical
# block on the basis that it's the same as an already-rejected block (which would be a consensus failure.)
#
# -> b39 (11) -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18)
# \
# b64a (18)
# b64a is a bloated block (non-canonical varint)
# b64 is a good block (same as b64 but w/ canonical varint)
#
tip(60)
regular_block = block("64a", spend=out[18])
# make it a "broken_block," with non-canonical serialization
b64a = CBrokenBlock(regular_block)
b64a.initialize(regular_block)
self.blocks["64a"] = b64a
self.tip = b64a
tx = CTransaction()
# use canonical serialization to calculate size
script_length = MAX_BLOCK_BASE_SIZE - len(b64a.normal_serialize()) - 69
script_output = CScript([b'\x00' * script_length])
tx.vout.append(CTxOut(0, script_output))
tx.vin.append(CTxIn(COutPoint(b64a.vtx[1].sha256, 0)))
b64a = update_block("64a", [tx])
assert_equal(len(b64a.serialize()), MAX_BLOCK_BASE_SIZE + 8)
yield TestInstance([[self.tip, None]])
# comptool workaround: to make sure b64 is delivered, manually erase b64a from blockstore
self.test.block_store.erase(b64a.sha256)
tip(60)
b64 = CBlock(b64a)
b64.vtx = copy.deepcopy(b64a.vtx)
assert_equal(b64.hash, b64a.hash)
assert_equal(len(b64.serialize()), MAX_BLOCK_BASE_SIZE)
self.blocks[64] = b64
update_block(64, [])
yield accepted()
save_spendable_output()
# Spend an output created in the block itself
#
# -> b42 (12) -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19)
#
tip(64)
b65 = block(65)
tx1 = create_and_sign_tx(out[19].tx, out[19].n, out[19].tx.vout[0].nValue)
tx2 = create_and_sign_tx(tx1, 0, 0)
update_block(65, [tx1, tx2])
yield accepted()
save_spendable_output()
# Attempt to spend an output created later in the same block
#
# -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19)
# \-> b66 (20)
tip(65)
b66 = block(66)
tx1 = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue)
tx2 = create_and_sign_tx(tx1, 0, 1)
update_block(66, [tx2, tx1])
yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))
# Attempt to double-spend a transaction created in a block
#
# -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19)
# \-> b67 (20)
#
#
tip(65)
b67 = block(67)
tx1 = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue)
tx2 = create_and_sign_tx(tx1, 0, 1)
tx3 = create_and_sign_tx(tx1, 0, 2)
update_block(67, [tx1, tx2, tx3])
yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))
# More tests of block subsidy
#
# -> b43 (13) -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20)
# \-> b68 (20)
#
# b68 - coinbase with an extra 10 satoshis,
# creates a tx that has 9 satoshis from out[20] go to fees
# this fails because the coinbase is trying to claim 1 satoshi too much in fees
#
# b69 - coinbase with extra 10 satoshis, and a tx that gives a 10 satoshi fee
# this succeeds
#
tip(65)
b68 = block(68, additional_coinbase_value=10)
tx = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue-9)
update_block(68, [tx])
yield rejected(RejectResult(16, b'bad-cb-amount'))
tip(65)
b69 = block(69, additional_coinbase_value=10)
tx = create_and_sign_tx(out[20].tx, out[20].n, out[20].tx.vout[0].nValue-10)
update_block(69, [tx])
yield accepted()
save_spendable_output()
# Test spending the outpoint of a non-existent transaction
#
# -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20)
# \-> b70 (21)
#
tip(69)
block(70, spend=out[21])
bogus_tx = CTransaction()
bogus_tx.sha256 = uint256_from_str(b"23c70ed7c0506e9178fc1a987f40a33946d4ad4c962b5ae3a52546da53af0c5c")
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(bogus_tx.sha256, 0), b"", 0xffffffff))
tx.vout.append(CTxOut(1, b""))
update_block(70, [tx])
yield rejected(RejectResult(16, b'bad-txns-inputs-missingorspent'))
# Test accepting an invalid block which has the same hash as a valid one (via merkle tree tricks)
#
# -> b53 (14) -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) -> b72 (21)
# \-> b71 (21)
#
# b72 is a good block.
# b71 is a copy of 72, but re-adds one of its transactions. However, it has the same hash as b71.
#
tip(69)
b72 = block(72)
tx1 = create_and_sign_tx(out[21].tx, out[21].n, 2)
tx2 = create_and_sign_tx(tx1, 0, 1)
b72 = update_block(72, [tx1, tx2]) # now tip is 72
b71 = copy.deepcopy(b72)
b71.vtx.append(tx2) # add duplicate tx2
self.block_heights[b71.sha256] = self.block_heights[b69.sha256] + 1 # b71 builds off b69
self.blocks[71] = b71
assert_equal(len(b71.vtx), 4)
assert_equal(len(b72.vtx), 3)
assert_equal(b72.sha256, b71.sha256)
tip(71)
yield rejected(RejectResult(16, b'bad-txns-duplicate'))
tip(72)
yield accepted()
save_spendable_output()
# Test some invalid scripts and MAX_BLOCK_SIGOPS
#
# -> b55 (15) -> b57 (16) -> b60 (17) -> b64 (18) -> b65 (19) -> b69 (20) -> b72 (21)
# \-> b** (22)
#
# b73 - tx with excessive sigops that are placed after an excessively large script element.
# The purpose of the test is to make sure those sigops are counted.
#
# script is a bytearray of size 20,526
#
# bytearray[0-19,998] : OP_CHECKSIG
# bytearray[19,999] : OP_PUSHDATA4
# bytearray[20,000-20,003]: 521 (max_script_element_size+1, in little-endian format)
# bytearray[20,004-20,525]: unread data (script_element)
# bytearray[20,526] : OP_CHECKSIG (this puts us over the limit)
#
tip(72)
b73 = block(73)
size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 1 + 5 + 1
a = bytearray([OP_CHECKSIG] * size)
a[MAX_BLOCK_SIGOPS - 1] = int("4e",16) # OP_PUSHDATA4
element_size = MAX_SCRIPT_ELEMENT_SIZE + 1
a[MAX_BLOCK_SIGOPS] = element_size % 256
a[MAX_BLOCK_SIGOPS+1] = element_size // 256
a[MAX_BLOCK_SIGOPS+2] = 0
a[MAX_BLOCK_SIGOPS+3] = 0
tx = create_and_sign_tx(out[22].tx, 0, 1, CScript(a))
b73 = update_block(73, [tx])
assert_equal(get_legacy_sigopcount_block(b73), MAX_BLOCK_SIGOPS+1)
yield rejected(RejectResult(16, b'bad-blk-sigops'))
# b74/75 - if we push an invalid script element, all prevous sigops are counted,
# but sigops after the element are not counted.
#
# The invalid script element is that the push_data indicates that
# there will be a large amount of data (0xffffff bytes), but we only
# provide a much smaller number. These bytes are CHECKSIGS so they would
# cause b75 to fail for excessive sigops, if those bytes were counted.
#
# b74 fails because we put MAX_BLOCK_SIGOPS+1 before the element
# b75 succeeds because we put MAX_BLOCK_SIGOPS before the element
#
#
tip(72)
b74 = block(74)
size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 42 # total = 20,561
a = bytearray([OP_CHECKSIG] * size)
a[MAX_BLOCK_SIGOPS] = 0x4e
a[MAX_BLOCK_SIGOPS+1] = 0xfe
a[MAX_BLOCK_SIGOPS+2] = 0xff
a[MAX_BLOCK_SIGOPS+3] = 0xff
a[MAX_BLOCK_SIGOPS+4] = 0xff
tx = create_and_sign_tx(out[22].tx, 0, 1, CScript(a))
b74 = update_block(74, [tx])
yield rejected(RejectResult(16, b'bad-blk-sigops'))
tip(72)
b75 = block(75)
size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 42
a = bytearray([OP_CHECKSIG] * size)
a[MAX_BLOCK_SIGOPS-1] = 0x4e
a[MAX_BLOCK_SIGOPS] = 0xff
a[MAX_BLOCK_SIGOPS+1] = 0xff
a[MAX_BLOCK_SIGOPS+2] = 0xff
a[MAX_BLOCK_SIGOPS+3] = 0xff
tx = create_and_sign_tx(out[22].tx, 0, 1, CScript(a))
b75 = update_block(75, [tx])
yield accepted()
save_spendable_output()
# Check that if we push an element filled with CHECKSIGs, they are not counted
tip(75)
b76 = block(76)
size = MAX_BLOCK_SIGOPS - 1 + MAX_SCRIPT_ELEMENT_SIZE + 1 + 5
a = bytearray([OP_CHECKSIG] * size)
a[MAX_BLOCK_SIGOPS-1] = 0x4e # PUSHDATA4, but leave the following bytes as just checksigs
tx = create_and_sign_tx(out[23].tx, 0, 1, CScript(a))
b76 = update_block(76, [tx])
yield accepted()
save_spendable_output()
# Test transaction resurrection
#
# -> b77 (24) -> b78 (25) -> b79 (26)
# \-> b80 (25) -> b81 (26) -> b82 (27)
#
# b78 creates a tx, which is spent in b79. After b82, both should be in mempool
#
# The tx'es must be unsigned and pass the node's mempool policy. It is unsigned for the
# rather obscure reason that the Python signature code does not distinguish between
# Low-S and High-S values (whereas the bitcoin code has custom code which does so);
# as a result of which, the odds are 50% that the python code will use the right
# value and the transaction will be accepted into the mempool. Until we modify the
# test framework to support low-S signing, we are out of luck.
#
# To get around this issue, we construct transactions which are not signed and which
# spend to OP_TRUE. If the standard-ness rules change, this test would need to be
# updated. (Perhaps to spend to a P2SH OP_TRUE script)
#
tip(76)
block(77)
tx77 = create_and_sign_tx(out[24].tx, out[24].n, 10*COIN)
update_block(77, [tx77])
yield accepted()
save_spendable_output()
block(78)
tx78 = create_tx(tx77, 0, 9*COIN)
update_block(78, [tx78])
yield accepted()
block(79)
tx79 = create_tx(tx78, 0, 8*COIN)
update_block(79, [tx79])
yield accepted()
# mempool should be empty
assert_equal(len(self.nodes[0].getrawmempool()), 0)
tip(77)
block(80, spend=out[25])
yield rejected()
save_spendable_output()
block(81, spend=out[26])
yield rejected() # other chain is same length
save_spendable_output()
block(82, spend=out[27])
yield accepted() # now this chain is longer, triggers re-org
save_spendable_output()
# now check that tx78 and tx79 have been put back into the peer's mempool
mempool = self.nodes[0].getrawmempool()
assert_equal(len(mempool), 2)
assert(tx78.hash in mempool)
assert(tx79.hash in mempool)
# Test invalid opcodes in dead execution paths.
#
# -> b81 (26) -> b82 (27) -> b83 (28)
#
b83 = block(83)
op_codes = [OP_IF, OP_INVALIDOPCODE, OP_ELSE, OP_TRUE, OP_ENDIF]
script = CScript(op_codes)
tx1 = create_and_sign_tx(out[28].tx, out[28].n, out[28].tx.vout[0].nValue, script)
tx2 = create_and_sign_tx(tx1, 0, 0, CScript([OP_TRUE]))
tx2.vin[0].scriptSig = CScript([OP_FALSE])
tx2.rehash()
update_block(83, [tx1, tx2])
yield accepted()
save_spendable_output()
# Reorg on/off blocks that have OP_RETURN in them (and try to spend them)
#
# -> b81 (26) -> b82 (27) -> b83 (28) -> b84 (29) -> b87 (30) -> b88 (31)
# \-> b85 (29) -> b86 (30) \-> b89a (32)
#
#
b84 = block(84)
tx1 = create_tx(out[29].tx, out[29].n, 0, CScript([OP_RETURN]))
tx1.vout.append(CTxOut(0, CScript([OP_TRUE])))
tx1.vout.append(CTxOut(0, CScript([OP_TRUE])))
tx1.vout.append(CTxOut(0, CScript([OP_TRUE])))
tx1.vout.append(CTxOut(0, CScript([OP_TRUE])))
tx1.calc_sha256()
self.sign_tx(tx1, out[29].tx, out[29].n)
tx1.rehash()
tx2 = create_tx(tx1, 1, 0, CScript([OP_RETURN]))
tx2.vout.append(CTxOut(0, CScript([OP_RETURN])))
tx3 = create_tx(tx1, 2, 0, CScript([OP_RETURN]))
tx3.vout.append(CTxOut(0, CScript([OP_TRUE])))
tx4 = create_tx(tx1, 3, 0, CScript([OP_TRUE]))
tx4.vout.append(CTxOut(0, CScript([OP_RETURN])))
tx5 = create_tx(tx1, 4, 0, CScript([OP_RETURN]))
update_block(84, [tx1,tx2,tx3,tx4,tx5])
yield accepted()
save_spendable_output()
tip(83)
block(85, spend=out[29])
yield rejected()
block(86, spend=out[30])
yield accepted()
tip(84)
block(87, spend=out[30])
yield rejected()
save_spendable_output()
block(88, spend=out[31])
yield accepted()
save_spendable_output()
# trying to spend the OP_RETURN output is rejected
block("89a", spend=out[32])
tx = create_tx(tx1, 0, 0, CScript([OP_TRUE]))
update_block("89a", [tx])
yield rejected()
# Test re-org of a week's worth of blocks (1088 blocks)
# This test takes a minute or two and can be accomplished in memory
#
if self.options.runbarelyexpensive:
tip(88)
LARGE_REORG_SIZE = 1088
test1 = TestInstance(sync_every_block=False)
spend=out[32]
for i in range(89, LARGE_REORG_SIZE + 89):
b = block(i, spend)
tx = CTransaction()
script_length = MAX_BLOCK_BASE_SIZE - len(b.serialize()) - 69
script_output = CScript([b'\x00' * script_length])
tx.vout.append(CTxOut(0, script_output))
tx.vin.append(CTxIn(COutPoint(b.vtx[1].sha256, 0)))
b = update_block(i, [tx])
assert_equal(len(b.serialize()), MAX_BLOCK_BASE_SIZE)
test1.blocks_and_transactions.append([self.tip, True])
save_spendable_output()
spend = get_spendable_output()
yield test1
chain1_tip = i
# now create alt chain of same length
tip(88)
test2 = TestInstance(sync_every_block=False)
for i in range(89, LARGE_REORG_SIZE + 89):
block("alt"+str(i))
test2.blocks_and_transactions.append([self.tip, False])
yield test2
# extend alt chain to trigger re-org
block("alt" + str(chain1_tip + 1))
yield accepted()
# ... and re-org back to the first chain
tip(chain1_tip)
block(chain1_tip + 1)
yield rejected()
block(chain1_tip + 2)
yield accepted()
chain1_tip += 2
if __name__ == '__main__':
FullBlockTest().main()
|
import git
import sys
from collections import defaultdict
from textwrap import wrap
from email.Utils import formatdate
repo = git.Repo('.')
changelog = defaultdict(list)
for id in repo.iter_commits('%s..HEAD' % sys.argv[1]):
commit = repo.commit(id)
changelog[commit.author.name].append(commit.summary)
print 'bash-completion (X.Y)'
print
for author in sorted(changelog.keys()):
print " [ %s ]" % author
for log in changelog[author]:
print '\n'.join(wrap(log, initial_indent=' * ', subsequent_indent=' '))
print
print ' -- David Paleino <d.paleino@gmail.com> ', formatdate(localtime=True)
|
"""
Unittests for exporting to git via management command.
"""
import copy
import os
import shutil
import StringIO
import subprocess
import unittest
from uuid import uuid4
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import CommandError
from django.test.utils import override_settings
from contentstore.tests.utils import CourseTestCase
import contentstore.git_export_utils as git_export_utils
from contentstore.git_export_utils import GitExportError
FEATURES_WITH_EXPORT_GIT = settings.FEATURES.copy()
FEATURES_WITH_EXPORT_GIT['ENABLE_EXPORT_GIT'] = True
TEST_DATA_CONTENTSTORE = copy.deepcopy(settings.CONTENTSTORE)
TEST_DATA_CONTENTSTORE['DOC_STORE_CONFIG']['db'] = 'test_xcontent_%s' % uuid4().hex
@override_settings(CONTENTSTORE=TEST_DATA_CONTENTSTORE)
@override_settings(FEATURES=FEATURES_WITH_EXPORT_GIT)
class TestGitExport(CourseTestCase):
"""
Excercise the git_export django management command with various inputs.
"""
def setUp(self):
"""
Create/reinitialize bare repo and folders needed
"""
super(TestGitExport, self).setUp()
if not os.path.isdir(git_export_utils.GIT_REPO_EXPORT_DIR):
os.mkdir(git_export_utils.GIT_REPO_EXPORT_DIR)
self.addCleanup(shutil.rmtree, git_export_utils.GIT_REPO_EXPORT_DIR)
self.bare_repo_dir = '{0}/data/test_bare.git'.format(
os.path.abspath(settings.TEST_ROOT))
if not os.path.isdir(self.bare_repo_dir):
os.mkdir(self.bare_repo_dir)
self.addCleanup(shutil.rmtree, self.bare_repo_dir)
subprocess.check_output(['git', '--bare', 'init'],
cwd=self.bare_repo_dir)
def test_command(self):
"""
Test that the command interface works. Ignore stderr fo clean
test output.
"""
with self.assertRaises(SystemExit) as ex:
with self.assertRaisesRegexp(CommandError, 'This script requires.*'):
call_command('git_export', 'blah', 'blah', 'blah',
stderr=StringIO.StringIO())
self.assertEqual(ex.exception.code, 1)
with self.assertRaises(SystemExit) as ex:
with self.assertRaisesRegexp(CommandError, 'This script requires.*'):
call_command('git_export', stderr=StringIO.StringIO())
self.assertEqual(ex.exception.code, 1)
# Send bad url to get course not exported
with self.assertRaises(SystemExit) as ex:
with self.assertRaisesRegexp(CommandError, GitExportError.URL_BAD):
call_command('git_export', 'foo', 'silly',
stderr=StringIO.StringIO())
self.assertEqual(ex.exception.code, 1)
def test_bad_git_url(self):
"""
Test several bad URLs for validation
"""
with self.assertRaisesRegexp(GitExportError, str(GitExportError.URL_BAD)):
git_export_utils.export_to_git('', 'Sillyness')
with self.assertRaisesRegexp(GitExportError, str(GitExportError.URL_BAD)):
git_export_utils.export_to_git('', 'example.com:edx/notreal')
with self.assertRaisesRegexp(GitExportError,
str(GitExportError.URL_NO_AUTH)):
git_export_utils.export_to_git('', 'http://blah')
def test_bad_git_repos(self):
"""
Test invalid git repos
"""
test_repo_path = '{}/test_repo'.format(git_export_utils.GIT_REPO_EXPORT_DIR)
self.assertFalse(os.path.isdir(test_repo_path))
# Test bad clones
with self.assertRaisesRegexp(GitExportError,
str(GitExportError.CANNOT_PULL)):
git_export_utils.export_to_git(
'foo/blah/100',
'https://user:blah@example.com/test_repo.git')
self.assertFalse(os.path.isdir(test_repo_path))
# Setup good repo with bad course to test xml export
with self.assertRaisesRegexp(GitExportError,
str(GitExportError.XML_EXPORT_FAIL)):
git_export_utils.export_to_git(
'foo/blah/100',
'file://{0}'.format(self.bare_repo_dir))
# Test bad git remote after successful clone
with self.assertRaisesRegexp(GitExportError,
str(GitExportError.CANNOT_PULL)):
git_export_utils.export_to_git(
'foo/blah/100',
'https://user:blah@example.com/r.git')
def test_bad_course_id(self):
"""
Test valid git url, but bad course.
"""
with self.assertRaisesRegexp(GitExportError, str(GitExportError.BAD_COURSE)):
git_export_utils.export_to_git(
'', 'file://{0}'.format(self.bare_repo_dir), '', '/blah')
@unittest.skipIf(os.environ.get('GIT_CONFIG') or
os.environ.get('GIT_AUTHOR_EMAIL') or
os.environ.get('GIT_AUTHOR_NAME') or
os.environ.get('GIT_COMMITTER_EMAIL') or
os.environ.get('GIT_COMMITTER_NAME'),
'Global git override set')
def test_git_ident(self):
"""
Test valid course with and without user specified.
Test skipped if git global config override environment variable GIT_CONFIG
is set.
"""
git_export_utils.export_to_git(
self.course.id,
'file://{0}'.format(self.bare_repo_dir),
'enigma'
)
expect_string = '{0}|{1}\n'.format(
git_export_utils.GIT_EXPORT_DEFAULT_IDENT['name'],
git_export_utils.GIT_EXPORT_DEFAULT_IDENT['email']
)
cwd = os.path.abspath(git_export_utils.GIT_REPO_EXPORT_DIR / 'test_bare')
git_log = subprocess.check_output(['git', 'log', '-1',
'--format=%an|%ae'], cwd=cwd)
self.assertEqual(expect_string, git_log)
# Make changes to course so there is something to commit
self.populate_course()
git_export_utils.export_to_git(
self.course.id,
'file://{0}'.format(self.bare_repo_dir),
self.user.username
)
expect_string = '{0}|{1}\n'.format(
self.user.username,
self.user.email,
)
git_log = subprocess.check_output(
['git', 'log', '-1', '--format=%an|%ae'], cwd=cwd)
self.assertEqual(expect_string, git_log)
def test_no_change(self):
"""
Test response if there are no changes
"""
git_export_utils.export_to_git(
'i4x://{0}'.format(self.course.id),
'file://{0}'.format(self.bare_repo_dir)
)
with self.assertRaisesRegexp(GitExportError,
str(GitExportError.CANNOT_COMMIT)):
git_export_utils.export_to_git(
self.course.id, 'file://{0}'.format(self.bare_repo_dir))
|
"""
Implementation of DOM Level 2 CDATASection interface
WWW: http://4suite.com/4DOM e-mail: support@4suite.com
Copyright (c) 2000 Fourthought Inc, USA. All Rights Reserved.
See http://4suite.com/COPYRIGHT for license and copyright information
"""
from xml.dom import Node
from Text import Text
class CDATASection(Text):
nodeType = Node.CDATA_SECTION_NODE
def __init__(self, ownerDocument, data):
Text.__init__(self, ownerDocument, data)
self.__dict__['__nodeName'] = "#cdata-section"
|
import mxnet as mx
from mxnet.test_utils import *
from data import get_avazu_data
from linear_model import *
import argparse
import os
parser = argparse.ArgumentParser(description="Run sparse linear classification " \
"with distributed kvstore",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--num-epoch', type=int, default=5,
help='number of epochs to train')
parser.add_argument('--batch-size', type=int, default=8192,
help='number of examples per batch')
parser.add_argument('--kvstore', type=str, default=None,
help='what kvstore to use',
choices=["dist_async", "local"])
parser.add_argument('--optimizer', type=str, default='ftrl',
help='what optimizer to use',
choices=["ftrl", "sgd", "adam"])
AVAZU = {
'train': 'avazu-app',
'test': 'avazu-app.t',
'url': "https://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/binary/",
# 1000000 + 1 since LibSVMIter uses zero-based indexing
'num_features': 1000001,
}
if __name__ == '__main__':
import logging
head = '%(asctime)-15s %(message)s'
logging.basicConfig(level=logging.INFO, format=head)
# arg parser
args = parser.parse_args()
logging.info(args)
num_epoch = args.num_epoch
kvstore = args.kvstore
batch_size = args.batch_size
optimizer = args.optimizer
# create kvstore
kv = mx.kvstore.create(kvstore) if kvstore else None
rank = kv.rank if kv else 0
num_worker = kv.num_workers if kv else 1
# dataset
num_features = AVAZU['num_features']
data_dir = os.path.join(os.getcwd(), 'data')
train_data = os.path.join(data_dir, AVAZU['train'])
val_data = os.path.join(data_dir, AVAZU['test'])
get_avazu_data(data_dir, AVAZU['train'], AVAZU['url'])
get_avazu_data(data_dir, AVAZU['test'], AVAZU['url'])
# data iterator
train_data = mx.io.LibSVMIter(data_libsvm=train_data, data_shape=(num_features,),
batch_size=batch_size, num_parts=num_worker,
part_index=rank)
eval_data = mx.io.LibSVMIter(data_libsvm=val_data, data_shape=(num_features,),
batch_size=batch_size)
# model
# The positive class weight, says how much more we should upweight the importance of
# positive instances in the objective function.
# This is used to combat the extreme class imbalance.
positive_class_weight = 2
model = linear_model(num_features, positive_class_weight)
# module
mod = mx.mod.Module(symbol=model, data_names=['data'], label_names=['softmax_label'])
mod.bind(data_shapes=train_data.provide_data, label_shapes=train_data.provide_label)
mod.init_params()
optim = mx.optimizer.create(optimizer, learning_rate=0.01, rescale_grad=1.0/batch_size/num_worker)
mod.init_optimizer(optimizer=optim, kvstore=kv)
# use accuracy as the metric
metric = mx.metric.create(['nll_loss'])
# get the sparse weight parameter
weight_index = mod._exec_group.param_names.index('weight')
weight_param = mod._exec_group.param_arrays[weight_index]
all_row_ids = mx.nd.arange(0, num_features, dtype='int64')
speedometer = mx.callback.Speedometer(batch_size, 100)
logging.info('Training started ...')
for epoch in range(num_epoch):
nbatch = 0
metric.reset()
for batch in train_data:
nbatch += 1
# for distributed training, we need to manually pull sparse weights from kvstore
if kv:
row_ids = batch.data[0].indices
kv.row_sparse_pull('weight', weight_param, row_ids=[row_ids],
priority=-weight_index)
mod.forward_backward(batch)
# update all parameters (including the weight parameter)
mod.update()
# update training metric
mod.update_metric(metric, batch.label)
speedometer_param = mx.model.BatchEndParam(epoch=epoch, nbatch=nbatch,
eval_metric=metric, locals=locals())
speedometer(speedometer_param)
# pull all rows before making a checkpoint
if kv:
kv.row_sparse_pull('weight', weight_param, row_ids=[all_row_ids],
priority=-weight_index)
# evaluate metric on validation dataset
score = mod.score(eval_data, ['nll_loss'])
logging.info('epoch %d, eval nll = %s ' % (epoch, score[0][1]))
save_optimizer_states = 'dist' not in kv.type if kv else True
mod.save_checkpoint("checkpoint", epoch, save_optimizer_states=save_optimizer_states)
# reset the iterator for next pass of data
train_data.reset()
eval_data.reset()
logging.info('Training completed.')
|
__all__ = [
'dispatcher',
'error',
'plugin',
'robustapply',
'saferef',
'sender',
'signal',
'version',
'connect',
'disconnect',
'get_all_receivers',
'reset',
'send',
'send_exact',
'send_minimal',
'send_robust',
'install_plugin',
'remove_plugin',
'Plugin',
'QtWidgetPlugin',
'TwistedDispatchPlugin',
'Anonymous',
'Any',
'All',
'Signal',
]
import louie.dispatcher, louie.error, louie.plugin, louie.robustapply, \
louie.saferef, louie.sender, louie.signal, louie.version
from louie.dispatcher import \
connect, disconnect, get_all_receivers, reset, \
send, send_exact, send_minimal, send_robust
from louie.plugin import \
install_plugin, remove_plugin, Plugin, \
QtWidgetPlugin, TwistedDispatchPlugin
from louie.sender import Anonymous, Any
from louie.signal import All, Signal
|
from datetime import datetime
from boto.resultset import ResultSet
class Stack:
def __init__(self, connection=None):
self.connection = connection
self.creation_time = None
self.description = None
self.disable_rollback = None
self.notification_arns = []
self.outputs = []
self.parameters = []
self.stack_id = None
self.stack_status = None
self.stack_name = None
self.stack_name_reason = None
self.timeout_in_minutes = None
def startElement(self, name, attrs, connection):
if name == "Parameters":
self.parameters = ResultSet([('member', Parameter)])
return self.parameters
elif name == "Outputs":
self.outputs = ResultSet([('member', Output)])
return self.outputs
else:
return None
def endElement(self, name, value, connection):
if name == 'CreationTime':
self.creation_time = datetime.strptime(value, '%Y-%m-%dT%H:%M:%SZ')
elif name == "Description":
self.description = value
elif name == "DisableRollback":
self.disable_rollback = bool(value)
elif name == "NotificationARNs":
self.notification_arns = value
elif name == 'StackId':
self.stack_id = value
elif name == 'StackName':
self.stack_name = value
elif name == 'StackStatus':
self.stack_status = value
elif name == "StackStatusReason":
self.stack_status_reason = value
elif name == "TimeoutInMinutes":
self.timeout_in_minutes = int(value)
elif name == "member":
pass
else:
setattr(self, name, value)
def delete(self):
return self.connection.delete_stack(stack_name_or_id=self.stack_id)
def describe_events(self, next_token=None):
return self.connection.describe_stack_events(
stack_name_or_id=self.stack_id,
next_token=next_token
)
def describe_resource(self, logical_resource_id):
return self.connection.describe_stack_resource(
stack_name_or_id=self.stack_id,
logical_resource_id=logical_resource_id
)
def describe_resources(self, logical_resource_id=None,
physical_resource_id=None):
return self.connection.describe_stack_resources(
stack_name_or_id=self.stack_id,
logical_resource_id=logical_resource_id,
physical_resource_id=physical_resource_id
)
def list_resources(self, next_token=None):
return self.connection.list_stack_resources(
stack_name_or_id=self.stack_id,
next_token=next_token
)
def update(self):
rs = self.connection.describe_stacks(self.stack_id)
if len(rs) == 1 and rs[0].stack_id == self.stack_id:
self.__dict__.update(rs[0].__dict__)
else:
raise ValueError("%s is not a valid Stack ID or Name" %
self.stack_id)
def get_template(self):
return self.connection.get_template(stack_name_or_id=self.stack_id)
class StackSummary:
def __init__(self, connection=None):
self.connection = connection
self.stack_id = None
self.stack_status = None
self.stack_name = None
self.creation_time = None
self.deletion_time = None
self.template_description = None
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'StackId':
self.stack_id = value
elif name == 'StackStatus':
self.stack_status = value
elif name == 'StackName':
self.stack_name = value
elif name == 'CreationTime':
self.creation_time = datetime.strptime(value, '%Y-%m-%dT%H:%M:%SZ')
elif name == "DeletionTime":
self.deletion_time = datetime.strptime(value, '%Y-%m-%dT%H:%M:%SZ')
elif name == 'TemplateDescription':
self.template_description = value
elif name == "member":
pass
else:
setattr(self, name, value)
class Parameter:
def __init__(self, connection=None):
self.connection = None
self.key = None
self.value = None
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == "ParameterKey":
self.key = value
elif name == "ParameterValue":
self.value = value
else:
setattr(self, name, value)
def __repr__(self):
return "Parameter:\"%s\"=\"%s\"" % (self.key, self.value)
class Output:
def __init__(self, connection=None):
self.connection = connection
self.description = None
self.key = None
self.value = None
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == "Description":
self.description = value
elif name == "OutputKey":
self.key = value
elif name == "OutputValue":
self.value = value
else:
setattr(self, name, value)
def __repr__(self):
return "Output:\"%s\"=\"%s\"" % (self.key, self.value)
class StackResource:
def __init__(self, connection=None):
self.connection = connection
self.description = None
self.logical_resource_id = None
self.physical_resource_id = None
self.resource_status = None
self.resource_status_reason = None
self.resource_type = None
self.stack_id = None
self.stack_name = None
self.timestamp = None
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == "Description":
self.description = value
elif name == "LogicalResourceId":
self.logical_resource_id = value
elif name == "PhysicalResourceId":
self.physical_resource_id = value
elif name == "ResourceStatus":
self.resource_status = value
elif name == "ResourceStatusReason":
self.resource_status_reason = value
elif name == "ResourceType":
self.resource_type = value
elif name == "StackId":
self.stack_id = value
elif name == "StackName":
self.stack_name = value
elif name == "Timestamp":
self.timestamp = datetime.strptime(value, '%Y-%m-%dT%H:%M:%SZ')
else:
setattr(self, name, value)
def __repr__(self):
return "StackResource:%s (%s)" % (self.logical_resource_id,
self.resource_type)
class StackResourceSummary:
def __init__(self, connection=None):
self.connection = connection
self.last_updated_timestamp = None
self.logical_resource_id = None
self.physical_resource_id = None
self.resource_status = None
self.resource_status_reason = None
self.resource_type = None
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == "LastUpdatedTimestamp":
self.last_updated_timestampe = datetime.strptime(value,
'%Y-%m-%dT%H:%M:%SZ')
elif name == "LogicalResourceId":
self.logical_resource_id = value
elif name == "PhysicalResourceId":
self.physical_resource_id = value
elif name == "ResourceStatus":
self.resource_status = value
elif name == "ResourceStatusReason":
self.resource_status_reason = value
elif name == "ResourceType":
self.resource_type = value
else:
setattr(self, name, value)
def __repr__(self):
return "StackResourceSummary:%s (%s)" % (self.logical_resource_id,
self.resource_type)
class StackEvent:
valid_states = ("CREATE_IN_PROGRESS", "CREATE_FAILED", "CREATE_COMPLETE",
"DELETE_IN_PROGRESS", "DELETE_FAILED", "DELETE_COMPLETE")
def __init__(self, connection=None):
self.connection = connection
self.event_id = None
self.logical_resource_id = None
self.physical_resource_id = None
self.resource_properties = None
self.resource_status = None
self.resource_status_reason = None
self.resource_type = None
self.stack_id = None
self.stack_name = None
self.timestamp = None
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == "EventId":
self.event_id = value
elif name == "LogicalResourceId":
self.logical_resource_id = value
elif name == "PhysicalResourceId":
self.physical_resource_id = value
elif name == "ResourceProperties":
self.resource_properties = value
elif name == "ResourceStatus":
self.resource_status = value
elif name == "ResourceStatusReason":
self.resource_status_reason = value
elif name == "ResourceType":
self.resource_type = value
elif name == "StackId":
self.stack_id = value
elif name == "StackName":
self.stack_name = value
elif name == "Timestamp":
self.timestamp = datetime.strptime(value, '%Y-%m-%dT%H:%M:%SZ')
else:
setattr(self, name, value)
def __repr__(self):
return "StackEvent %s %s %s" % (self.resource_type,
self.logical_resource_id, self.resource_status)
|
import datetime
import decimal
import enum
import functools
import math
import os
import re
import uuid
from unittest import mock
import custom_migration_operations.more_operations
import custom_migration_operations.operations
from django import get_version
from django.conf import SettingsReference, settings
from django.core.validators import EmailValidator, RegexValidator
from django.db import migrations, models
from django.db.migrations.serializer import BaseSerializer
from django.db.migrations.writer import MigrationWriter, OperationWriter
from django.test import SimpleTestCase
from django.utils.deconstruct import deconstructible
from django.utils.functional import SimpleLazyObject
from django.utils.timezone import get_default_timezone, get_fixed_timezone, utc
from django.utils.translation import gettext_lazy as _
from .models import FoodManager, FoodQuerySet
class Money(decimal.Decimal):
def deconstruct(self):
return (
'%s.%s' % (self.__class__.__module__, self.__class__.__name__),
[str(self)],
{}
)
class TestModel1:
def upload_to(self):
return '/somewhere/dynamic/'
thing = models.FileField(upload_to=upload_to)
class OperationWriterTests(SimpleTestCase):
def test_empty_signature(self):
operation = custom_migration_operations.operations.TestOperation()
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
'custom_migration_operations.operations.TestOperation(\n'
'),'
)
def test_args_signature(self):
operation = custom_migration_operations.operations.ArgsOperation(1, 2)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
'custom_migration_operations.operations.ArgsOperation(\n'
' arg1=1,\n'
' arg2=2,\n'
'),'
)
def test_kwargs_signature(self):
operation = custom_migration_operations.operations.KwargsOperation(kwarg1=1)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
'custom_migration_operations.operations.KwargsOperation(\n'
' kwarg1=1,\n'
'),'
)
def test_args_kwargs_signature(self):
operation = custom_migration_operations.operations.ArgsKwargsOperation(1, 2, kwarg2=4)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
'custom_migration_operations.operations.ArgsKwargsOperation(\n'
' arg1=1,\n'
' arg2=2,\n'
' kwarg2=4,\n'
'),'
)
def test_nested_args_signature(self):
operation = custom_migration_operations.operations.ArgsOperation(
custom_migration_operations.operations.ArgsOperation(1, 2),
custom_migration_operations.operations.KwargsOperation(kwarg1=3, kwarg2=4)
)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
'custom_migration_operations.operations.ArgsOperation(\n'
' arg1=custom_migration_operations.operations.ArgsOperation(\n'
' arg1=1,\n'
' arg2=2,\n'
' ),\n'
' arg2=custom_migration_operations.operations.KwargsOperation(\n'
' kwarg1=3,\n'
' kwarg2=4,\n'
' ),\n'
'),'
)
def test_multiline_args_signature(self):
operation = custom_migration_operations.operations.ArgsOperation("test\n arg1", "test\narg2")
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
"custom_migration_operations.operations.ArgsOperation(\n"
" arg1='test\\n arg1',\n"
" arg2='test\\narg2',\n"
"),"
)
def test_expand_args_signature(self):
operation = custom_migration_operations.operations.ExpandArgsOperation([1, 2])
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
'custom_migration_operations.operations.ExpandArgsOperation(\n'
' arg=[\n'
' 1,\n'
' 2,\n'
' ],\n'
'),'
)
def test_nested_operation_expand_args_signature(self):
operation = custom_migration_operations.operations.ExpandArgsOperation(
arg=[
custom_migration_operations.operations.KwargsOperation(
kwarg1=1,
kwarg2=2,
),
]
)
buff, imports = OperationWriter(operation, indentation=0).serialize()
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
'custom_migration_operations.operations.ExpandArgsOperation(\n'
' arg=[\n'
' custom_migration_operations.operations.KwargsOperation(\n'
' kwarg1=1,\n'
' kwarg2=2,\n'
' ),\n'
' ],\n'
'),'
)
class WriterTests(SimpleTestCase):
"""
Tests the migration writer (makes migration files from Migration instances)
"""
def safe_exec(self, string, value=None):
d = {}
try:
exec(string, globals(), d)
except Exception as e:
if value:
self.fail("Could not exec %r (from value %r): %s" % (string.strip(), value, e))
else:
self.fail("Could not exec %r: %s" % (string.strip(), e))
return d
def serialize_round_trip(self, value):
string, imports = MigrationWriter.serialize(value)
return self.safe_exec("%s\ntest_value_result = %s" % ("\n".join(imports), string), value)['test_value_result']
def assertSerializedEqual(self, value):
self.assertEqual(self.serialize_round_trip(value), value)
def assertSerializedResultEqual(self, value, target):
self.assertEqual(MigrationWriter.serialize(value), target)
def assertSerializedFieldEqual(self, value):
new_value = self.serialize_round_trip(value)
self.assertEqual(value.__class__, new_value.__class__)
self.assertEqual(value.max_length, new_value.max_length)
self.assertEqual(value.null, new_value.null)
self.assertEqual(value.unique, new_value.unique)
def test_serialize_numbers(self):
self.assertSerializedEqual(1)
self.assertSerializedEqual(1.2)
self.assertTrue(math.isinf(self.serialize_round_trip(float("inf"))))
self.assertTrue(math.isinf(self.serialize_round_trip(float("-inf"))))
self.assertTrue(math.isnan(self.serialize_round_trip(float("nan"))))
self.assertSerializedEqual(decimal.Decimal('1.3'))
self.assertSerializedResultEqual(
decimal.Decimal('1.3'),
("Decimal('1.3')", {'from decimal import Decimal'})
)
self.assertSerializedEqual(Money('1.3'))
self.assertSerializedResultEqual(
Money('1.3'),
("migrations.test_writer.Money('1.3')", {'import migrations.test_writer'})
)
def test_serialize_constants(self):
self.assertSerializedEqual(None)
self.assertSerializedEqual(True)
self.assertSerializedEqual(False)
def test_serialize_strings(self):
self.assertSerializedEqual(b"foobar")
string, imports = MigrationWriter.serialize(b"foobar")
self.assertEqual(string, "b'foobar'")
self.assertSerializedEqual("föobár")
string, imports = MigrationWriter.serialize("foobar")
self.assertEqual(string, "'foobar'")
def test_serialize_multiline_strings(self):
self.assertSerializedEqual(b"foo\nbar")
string, imports = MigrationWriter.serialize(b"foo\nbar")
self.assertEqual(string, "b'foo\\nbar'")
self.assertSerializedEqual("föo\nbár")
string, imports = MigrationWriter.serialize("foo\nbar")
self.assertEqual(string, "'foo\\nbar'")
def test_serialize_collections(self):
self.assertSerializedEqual({1: 2})
self.assertSerializedEqual(["a", 2, True, None])
self.assertSerializedEqual({2, 3, "eighty"})
self.assertSerializedEqual({"lalalala": ["yeah", "no", "maybe"]})
self.assertSerializedEqual(_('Hello'))
def test_serialize_builtin_types(self):
self.assertSerializedEqual([list, tuple, dict, set, frozenset])
self.assertSerializedResultEqual(
[list, tuple, dict, set, frozenset],
("[list, tuple, dict, set, frozenset]", set())
)
def test_serialize_lazy_objects(self):
pattern = re.compile(r'^foo$')
lazy_pattern = SimpleLazyObject(lambda: pattern)
self.assertEqual(self.serialize_round_trip(lazy_pattern), pattern)
def test_serialize_enums(self):
class TextEnum(enum.Enum):
A = 'a-value'
B = 'value-b'
class BinaryEnum(enum.Enum):
A = b'a-value'
B = b'value-b'
class IntEnum(enum.IntEnum):
A = 1
B = 2
self.assertSerializedResultEqual(
TextEnum.A,
("migrations.test_writer.TextEnum('a-value')", {'import migrations.test_writer'})
)
self.assertSerializedResultEqual(
BinaryEnum.A,
("migrations.test_writer.BinaryEnum(b'a-value')", {'import migrations.test_writer'})
)
self.assertSerializedResultEqual(
IntEnum.B,
("migrations.test_writer.IntEnum(2)", {'import migrations.test_writer'})
)
field = models.CharField(default=TextEnum.B, choices=[(m.value, m) for m in TextEnum])
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.CharField(choices=["
"('a-value', migrations.test_writer.TextEnum('a-value')), "
"('value-b', migrations.test_writer.TextEnum('value-b'))], "
"default=migrations.test_writer.TextEnum('value-b'))"
)
field = models.CharField(default=BinaryEnum.B, choices=[(m.value, m) for m in BinaryEnum])
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.CharField(choices=["
"(b'a-value', migrations.test_writer.BinaryEnum(b'a-value')), "
"(b'value-b', migrations.test_writer.BinaryEnum(b'value-b'))], "
"default=migrations.test_writer.BinaryEnum(b'value-b'))"
)
field = models.IntegerField(default=IntEnum.A, choices=[(m.value, m) for m in IntEnum])
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.IntegerField(choices=["
"(1, migrations.test_writer.IntEnum(1)), "
"(2, migrations.test_writer.IntEnum(2))], "
"default=migrations.test_writer.IntEnum(1))"
)
def test_serialize_uuid(self):
self.assertSerializedEqual(uuid.uuid1())
self.assertSerializedEqual(uuid.uuid4())
uuid_a = uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8')
uuid_b = uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2')
self.assertSerializedResultEqual(
uuid_a,
("uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8')", {'import uuid'})
)
self.assertSerializedResultEqual(
uuid_b,
("uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2')", {'import uuid'})
)
field = models.UUIDField(choices=((uuid_a, 'UUID A'), (uuid_b, 'UUID B')), default=uuid_a)
string = MigrationWriter.serialize(field)[0]
self.assertEqual(
string,
"models.UUIDField(choices=["
"(uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8'), 'UUID A'), "
"(uuid.UUID('c7853ec1-2ea3-4359-b02d-b54e8f1bcee2'), 'UUID B')], "
"default=uuid.UUID('5c859437-d061-4847-b3f7-e6b78852f8c8'))"
)
def test_serialize_functions(self):
with self.assertRaisesMessage(ValueError, 'Cannot serialize function: lambda'):
self.assertSerializedEqual(lambda x: 42)
self.assertSerializedEqual(models.SET_NULL)
string, imports = MigrationWriter.serialize(models.SET(42))
self.assertEqual(string, 'models.SET(42)')
self.serialize_round_trip(models.SET(42))
def test_serialize_datetime(self):
self.assertSerializedEqual(datetime.datetime.utcnow())
self.assertSerializedEqual(datetime.datetime.utcnow)
self.assertSerializedEqual(datetime.datetime.today())
self.assertSerializedEqual(datetime.datetime.today)
self.assertSerializedEqual(datetime.date.today())
self.assertSerializedEqual(datetime.date.today)
self.assertSerializedEqual(datetime.datetime.now().time())
self.assertSerializedEqual(datetime.datetime(2014, 1, 1, 1, 1, tzinfo=get_default_timezone()))
self.assertSerializedEqual(datetime.datetime(2013, 12, 31, 22, 1, tzinfo=get_fixed_timezone(180)))
self.assertSerializedResultEqual(
datetime.datetime(2014, 1, 1, 1, 1),
("datetime.datetime(2014, 1, 1, 1, 1)", {'import datetime'})
)
self.assertSerializedResultEqual(
datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc),
(
"datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc)",
{'import datetime', 'from django.utils.timezone import utc'},
)
)
def test_serialize_fields(self):
self.assertSerializedFieldEqual(models.CharField(max_length=255))
self.assertSerializedResultEqual(
models.CharField(max_length=255),
("models.CharField(max_length=255)", {"from django.db import models"})
)
self.assertSerializedFieldEqual(models.TextField(null=True, blank=True))
self.assertSerializedResultEqual(
models.TextField(null=True, blank=True),
("models.TextField(blank=True, null=True)", {'from django.db import models'})
)
def test_serialize_settings(self):
self.assertSerializedEqual(SettingsReference(settings.AUTH_USER_MODEL, "AUTH_USER_MODEL"))
self.assertSerializedResultEqual(
SettingsReference("someapp.model", "AUTH_USER_MODEL"),
("settings.AUTH_USER_MODEL", {"from django.conf import settings"})
)
def test_serialize_iterators(self):
self.assertSerializedResultEqual(
((x, x * x) for x in range(3)),
("((0, 0), (1, 1), (2, 4))", set())
)
def test_serialize_compiled_regex(self):
"""
Make sure compiled regex can be serialized.
"""
regex = re.compile(r'^\w+$')
self.assertSerializedEqual(regex)
def test_serialize_class_based_validators(self):
"""
Ticket #22943: Test serialization of class-based validators, including
compiled regexes.
"""
validator = RegexValidator(message="hello")
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(string, "django.core.validators.RegexValidator(message='hello')")
self.serialize_round_trip(validator)
# Test with a compiled regex.
validator = RegexValidator(regex=re.compile(r'^\w+$'))
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(string, "django.core.validators.RegexValidator(regex=re.compile('^\\\\w+$'))")
self.serialize_round_trip(validator)
# Test a string regex with flag
validator = RegexValidator(r'^[0-9]+$', flags=re.S)
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(string, "django.core.validators.RegexValidator('^[0-9]+$', flags=re.RegexFlag(16))")
self.serialize_round_trip(validator)
# Test message and code
validator = RegexValidator('^[-a-zA-Z0-9_]+$', 'Invalid', 'invalid')
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(string, "django.core.validators.RegexValidator('^[-a-zA-Z0-9_]+$', 'Invalid', 'invalid')")
self.serialize_round_trip(validator)
# Test with a subclass.
validator = EmailValidator(message="hello")
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(string, "django.core.validators.EmailValidator(message='hello')")
self.serialize_round_trip(validator)
validator = deconstructible(path="migrations.test_writer.EmailValidator")(EmailValidator)(message="hello")
string = MigrationWriter.serialize(validator)[0]
self.assertEqual(string, "migrations.test_writer.EmailValidator(message='hello')")
validator = deconstructible(path="custom.EmailValidator")(EmailValidator)(message="hello")
with self.assertRaisesMessage(ImportError, "No module named 'custom'"):
MigrationWriter.serialize(validator)
validator = deconstructible(path="django.core.validators.EmailValidator2")(EmailValidator)(message="hello")
with self.assertRaisesMessage(ValueError, "Could not find object EmailValidator2 in django.core.validators."):
MigrationWriter.serialize(validator)
def test_serialize_empty_nonempty_tuple(self):
"""
Ticket #22679: makemigrations generates invalid code for (an empty
tuple) default_permissions = ()
"""
empty_tuple = ()
one_item_tuple = ('a',)
many_items_tuple = ('a', 'b', 'c')
self.assertSerializedEqual(empty_tuple)
self.assertSerializedEqual(one_item_tuple)
self.assertSerializedEqual(many_items_tuple)
def test_serialize_builtins(self):
string, imports = MigrationWriter.serialize(range)
self.assertEqual(string, 'range')
self.assertEqual(imports, set())
def test_serialize_unbound_method_reference(self):
"""An unbound method used within a class body can be serialized."""
self.serialize_round_trip(TestModel1.thing)
def test_serialize_local_function_reference(self):
"""A reference in a local scope can't be serialized."""
class TestModel2:
def upload_to(self):
return "somewhere dynamic"
thing = models.FileField(upload_to=upload_to)
with self.assertRaisesMessage(ValueError, 'Could not find function upload_to in migrations.test_writer'):
self.serialize_round_trip(TestModel2.thing)
def test_serialize_managers(self):
self.assertSerializedEqual(models.Manager())
self.assertSerializedResultEqual(
FoodQuerySet.as_manager(),
('migrations.models.FoodQuerySet.as_manager()', {'import migrations.models'})
)
self.assertSerializedEqual(FoodManager('a', 'b'))
self.assertSerializedEqual(FoodManager('x', 'y', c=3, d=4))
def test_serialize_frozensets(self):
self.assertSerializedEqual(frozenset())
self.assertSerializedEqual(frozenset("let it go"))
def test_serialize_set(self):
self.assertSerializedEqual(set())
self.assertSerializedResultEqual(set(), ('set()', set()))
self.assertSerializedEqual({'a'})
self.assertSerializedResultEqual({'a'}, ("{'a'}", set()))
def test_serialize_timedelta(self):
self.assertSerializedEqual(datetime.timedelta())
self.assertSerializedEqual(datetime.timedelta(minutes=42))
def test_serialize_functools_partial(self):
value = functools.partial(datetime.timedelta, 1, seconds=2)
result = self.serialize_round_trip(value)
self.assertEqual(result.func, value.func)
self.assertEqual(result.args, value.args)
self.assertEqual(result.keywords, value.keywords)
def test_serialize_functools_partialmethod(self):
value = functools.partialmethod(datetime.timedelta, 1, seconds=2)
result = self.serialize_round_trip(value)
self.assertIsInstance(result, functools.partialmethod)
self.assertEqual(result.func, value.func)
self.assertEqual(result.args, value.args)
self.assertEqual(result.keywords, value.keywords)
def test_serialize_type_none(self):
self.assertSerializedEqual(type(None))
def test_simple_migration(self):
"""
Tests serializing a simple migration.
"""
fields = {
'charfield': models.DateTimeField(default=datetime.datetime.utcnow),
'datetimefield': models.DateTimeField(default=datetime.datetime.utcnow),
}
options = {
'verbose_name': 'My model',
'verbose_name_plural': 'My models',
}
migration = type("Migration", (migrations.Migration,), {
"operations": [
migrations.CreateModel("MyModel", tuple(fields.items()), options, (models.Model,)),
migrations.CreateModel("MyModel2", tuple(fields.items()), bases=(models.Model,)),
migrations.CreateModel(
name="MyModel3", fields=tuple(fields.items()), options=options, bases=(models.Model,)
),
migrations.DeleteModel("MyModel"),
migrations.AddField("OtherModel", "datetimefield", fields["datetimefield"]),
],
"dependencies": [("testapp", "some_other_one")],
})
writer = MigrationWriter(migration)
output = writer.as_string()
# We don't test the output formatting - that's too fragile.
# Just make sure it runs for now, and that things look alright.
result = self.safe_exec(output)
self.assertIn("Migration", result)
def test_migration_path(self):
test_apps = [
'migrations.migrations_test_apps.normal',
'migrations.migrations_test_apps.with_package_model',
'migrations.migrations_test_apps.without_init_file',
]
base_dir = os.path.dirname(os.path.dirname(__file__))
for app in test_apps:
with self.modify_settings(INSTALLED_APPS={'append': app}):
migration = migrations.Migration('0001_initial', app.split('.')[-1])
expected_path = os.path.join(base_dir, *(app.split('.') + ['migrations', '0001_initial.py']))
writer = MigrationWriter(migration)
self.assertEqual(writer.path, expected_path)
def test_custom_operation(self):
migration = type("Migration", (migrations.Migration,), {
"operations": [
custom_migration_operations.operations.TestOperation(),
custom_migration_operations.operations.CreateModel(),
migrations.CreateModel("MyModel", (), {}, (models.Model,)),
custom_migration_operations.more_operations.TestOperation()
],
"dependencies": []
})
writer = MigrationWriter(migration)
output = writer.as_string()
result = self.safe_exec(output)
self.assertIn("custom_migration_operations", result)
self.assertNotEqual(
result['custom_migration_operations'].operations.TestOperation,
result['custom_migration_operations'].more_operations.TestOperation
)
def test_sorted_imports(self):
"""
#24155 - Tests ordering of imports.
"""
migration = type("Migration", (migrations.Migration,), {
"operations": [
migrations.AddField("mymodel", "myfield", models.DateTimeField(
default=datetime.datetime(2012, 1, 1, 1, 1, tzinfo=utc),
)),
]
})
writer = MigrationWriter(migration)
output = writer.as_string()
self.assertIn(
"import datetime\n"
"from django.db import migrations, models\n"
"from django.utils.timezone import utc\n",
output
)
def test_migration_file_header_comments(self):
"""
Test comments at top of file.
"""
migration = type("Migration", (migrations.Migration,), {
"operations": []
})
dt = datetime.datetime(2015, 7, 31, 4, 40, 0, 0, tzinfo=utc)
with mock.patch('django.db.migrations.writer.now', lambda: dt):
for include_header in (True, False):
with self.subTest(include_header=include_header):
writer = MigrationWriter(migration, include_header)
output = writer.as_string()
self.assertEqual(
include_header,
output.startswith(
"# Generated by Django %s on 2015-07-31 04:40\n\n" % get_version()
)
)
if not include_header:
# Make sure the output starts with something that's not
# a comment or indentation or blank line
self.assertRegex(output.splitlines(keepends=True)[0], r"^[^#\s]+")
def test_models_import_omitted(self):
"""
django.db.models shouldn't be imported if unused.
"""
migration = type("Migration", (migrations.Migration,), {
"operations": [
migrations.AlterModelOptions(
name='model',
options={'verbose_name': 'model', 'verbose_name_plural': 'models'},
),
]
})
writer = MigrationWriter(migration)
output = writer.as_string()
self.assertIn("from django.db import migrations\n", output)
def test_deconstruct_class_arguments(self):
# Yes, it doesn't make sense to use a class as a default for a
# CharField. It does make sense for custom fields though, for example
# an enumfield that takes the enum class as an argument.
class DeconstructibleInstances:
def deconstruct(self):
return ('DeconstructibleInstances', [], {})
string = MigrationWriter.serialize(models.CharField(default=DeconstructibleInstances))[0]
self.assertEqual(string, "models.CharField(default=migrations.test_writer.DeconstructibleInstances)")
def test_register_serializer(self):
class ComplexSerializer(BaseSerializer):
def serialize(self):
return 'complex(%r)' % self.value, {}
MigrationWriter.register_serializer(complex, ComplexSerializer)
self.assertSerializedEqual(complex(1, 2))
MigrationWriter.unregister_serializer(complex)
with self.assertRaisesMessage(ValueError, 'Cannot serialize: (1+2j)'):
self.assertSerializedEqual(complex(1, 2))
def test_register_non_serializer(self):
with self.assertRaisesMessage(ValueError, "'TestModel1' must inherit from 'BaseSerializer'."):
MigrationWriter.register_serializer(complex, TestModel1)
|
"""
Support for ZoneMinder.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/zoneminder/
"""
import logging
import json
from urllib.parse import urljoin
import requests
import voluptuous as vol
from homeassistant.const import (
CONF_PATH, CONF_HOST, CONF_SSL, CONF_PASSWORD, CONF_USERNAME)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEFAULT_PATH = '/zm/'
DEFAULT_SSL = False
DEFAULT_TIMEOUT = 10
DOMAIN = 'zoneminder'
LOGIN_RETRIES = 2
ZM = {}
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,
vol.Optional(CONF_PATH, default=DEFAULT_PATH): cv.string,
vol.Optional(CONF_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD): cv.string
})
}, extra=vol.ALLOW_EXTRA)
def setup(hass, config):
"""Set up the ZoneMinder component."""
global ZM
ZM = {}
conf = config[DOMAIN]
if conf[CONF_SSL]:
schema = 'https'
else:
schema = 'http'
url = urljoin('{}://{}'.format(schema, conf[CONF_HOST]), conf[CONF_PATH])
username = conf.get(CONF_USERNAME, None)
password = conf.get(CONF_PASSWORD, None)
ZM['url'] = url
ZM['username'] = username
ZM['password'] = password
return login()
def login():
"""Login to the ZoneMinder API."""
_LOGGER.debug("Attempting to login to ZoneMinder")
login_post = {'view': 'console', 'action': 'login'}
if ZM['username']:
login_post['username'] = ZM['username']
if ZM['password']:
login_post['password'] = ZM['password']
req = requests.post(ZM['url'] + '/index.php', data=login_post)
ZM['cookies'] = req.cookies
# Login calls returns a 200 response on both failure and success.
# The only way to tell if you logged in correctly is to issue an api call.
req = requests.get(
ZM['url'] + 'api/host/getVersion.json', cookies=ZM['cookies'],
timeout=DEFAULT_TIMEOUT)
if req.status_code != requests.codes.ok:
_LOGGER.error("Connection error logging into ZoneMinder")
return False
return True
def get_state(api_url):
"""Get a state from the ZoneMinder API service."""
# Since the API uses sessions that expire, sometimes we need to re-auth
# if the call fails.
for _ in range(LOGIN_RETRIES):
req = requests.get(urljoin(ZM['url'], api_url), cookies=ZM['cookies'],
timeout=DEFAULT_TIMEOUT)
if req.status_code != requests.codes.ok:
login()
else:
break
else:
_LOGGER.exception("Unable to get API response from ZoneMinder")
return json.loads(req.text)
def change_state(api_url, post_data):
"""Update a state using the Zoneminder API."""
for _ in range(LOGIN_RETRIES):
req = requests.post(
urljoin(ZM['url'], api_url), data=post_data, cookies=ZM['cookies'],
timeout=DEFAULT_TIMEOUT)
if req.status_code != requests.codes.ok:
login()
else:
break
else:
_LOGGER.exception("Unable to get API response from ZoneMinder")
return json.loads(req.text)
|
from datetime import datetime
from . import base
class Countdown(base.InLoopPollText):
"""
A simple countdown timer text widget.
"""
orientations = base.ORIENTATION_HORIZONTAL
defaults = [
('format', '{D}d {H}h {M}m {S}s',
'Format of the displayed text. Available variables:'
'{D} == days, {H} == hours, {M} == minutes, {S} seconds.'),
('update_interval', 1., 'Update interval in seconds for the clock'),
('date', datetime.now(), "The datetime for the endo of the countdown"),
]
def __init__(self, **config):
base.InLoopPollText.__init__(self, **config)
self.add_defaults(Countdown.defaults)
def poll(self):
now = datetime.now()
days = hours = minutes = seconds = 0
if not self.date < now:
delta = self.date - now
days = delta.days
hours, rem = divmod(delta.seconds, 3600)
minutes, seconds = divmod(rem, 60)
data = {"D": "%02d" % days,
"H": "%02d" % hours,
"M": "%02d" % minutes,
"S": "%02d" % seconds}
return self.format.format(**data)
|
"""engine.SCons.Variables.PackageVariable
This file defines the option type for SCons implementing 'package
activation'.
To be used whenever a 'package' may be enabled/disabled and the
package path may be specified.
Usage example:
Examples:
x11=no (disables X11 support)
x11=yes (will search for the package installation dir)
x11=/usr/local/X11 (will check this path for existance)
To replace autoconf's --with-xxx=yyy
opts = Variables()
opts.Add(PackageVariable('x11',
'use X11 installed here (yes = search some places',
'yes'))
...
if env['x11'] == True:
dir = ... search X11 in some standard places ...
env['x11'] = dir
if env['x11']:
... build with x11 ...
"""
__revision__ = "src/engine/SCons/Variables/PackageVariable.py 2013/03/03 09:48:35 garyo"
__all__ = ['PackageVariable',]
import SCons.Errors
__enable_strings = ('1', 'yes', 'true', 'on', 'enable', 'search')
__disable_strings = ('0', 'no', 'false', 'off', 'disable')
def _converter(val):
"""
"""
lval = val.lower()
if lval in __enable_strings: return True
if lval in __disable_strings: return False
#raise ValueError("Invalid value for boolean option: %s" % val)
return val
def _validator(key, val, env, searchfunc):
# NB: searchfunc is currenty undocumented and unsupported
"""
"""
# todo: write validator, check for path
import os
if env[key] is True:
if searchfunc:
env[key] = searchfunc(key, val)
elif env[key] and not os.path.exists(val):
raise SCons.Errors.UserError(
'Path does not exist for option %s: %s' % (key, val))
def PackageVariable(key, help, default, searchfunc=None):
# NB: searchfunc is currenty undocumented and unsupported
"""
The input parameters describe a 'package list' option, thus they
are returned with the correct converter and validator appended. The
result is usable for input to opts.Add() .
A 'package list' option may either be 'all', 'none' or a list of
package names (seperated by space).
"""
help = '\n '.join(
(help, '( yes | no | /path/to/%s )' % key))
return (key, help, default,
lambda k, v, e: _validator(k,v,e,searchfunc),
_converter)
|
from cms.cache.permissions import clear_permission_cache
from cms.exceptions import NoHomeFound
from cms.signals.apphook import apphook_post_delete_page_checker, apphook_post_page_checker
from cms.signals.title import update_title, update_title_paths
from django.core.exceptions import ObjectDoesNotExist
from cms.models import Page
from menus.menu_pool import menu_pool
def pre_save_page(instance, **kwargs):
instance.old_page = None
try:
instance.old_page = Page.objects.get(pk=instance.pk)
except ObjectDoesNotExist:
pass
menu_pool.clear(instance.site_id)
clear_permission_cache()
def post_save_page(instance, **kwargs):
if not kwargs.get('raw'):
instance.rescan_placeholders()
update_home(instance)
if instance.old_page is None or instance.old_page.parent_id != instance.parent_id or instance.is_home != instance.old_page.is_home:
for page in instance.get_descendants(include_self=True):
for title in page.title_set.all().select_related('page'):
update_title(title)
title._publisher_keep_state = True
title.save()
if (instance.old_page is None and instance.application_urls) or (instance.old_page and (
instance.old_page.application_urls != instance.application_urls or instance.old_page.application_namespace != instance.application_namespace)):
if instance.publisher_public_id and instance.publisher_is_draft:
# this was breaking load data
try:
public = instance.publisher_public
public._publisher_keep_state = True
public.application_urls = instance.application_urls
public.application_namespace = instance.application_namespace
public.save()
except ObjectDoesNotExist:
pass
elif not instance.publisher_is_draft:
apphook_post_page_checker(instance)
def pre_delete_page(instance, **kwargs):
menu_pool.clear(instance.site_id)
for placeholder in instance.placeholders.all():
for plugin in placeholder.cmsplugin_set.all():
plugin._no_reorder = True
plugin.delete()
placeholder.delete()
clear_permission_cache()
def post_delete_page(instance, **kwargs):
update_home(instance, **kwargs)
apphook_post_delete_page_checker(instance)
from cms.views import invalidate_cms_page_cache
invalidate_cms_page_cache()
def post_moved_page(instance, **kwargs):
update_title_paths(instance, **kwargs)
update_home(instance, **kwargs)
def update_home(instance, **kwargs):
"""
Updates the is_home flag of page instances after they are saved or moved.
:param instance: Page instance
:param kwargs:
:return:
"""
if getattr(instance, '_home_checked', False):
return
if not instance.parent_id or (getattr(instance, 'old_page', False) and not instance.old_page.parent_id):
if instance.publisher_is_draft:
qs = Page.objects.drafts()
else:
qs = Page.objects.public()
try:
home_pk = qs.filter(title_set__published=True).distinct().get_home(instance.site_id).pk
except NoHomeFound:
if instance.publisher_is_draft and instance.title_set.filter(published=True,
publisher_public__published=True).count():
return
home_pk = instance.pk
for page in qs.filter(site=instance.site_id, is_home=True).exclude(pk=home_pk):
if instance.pk == page.pk:
instance.is_home = False
page.is_home = False
page._publisher_keep_state = True
page._home_checked = True
page.save()
try:
page = qs.get(pk=home_pk, site=instance.site_id)
except Page.DoesNotExist:
return
page.is_home = True
if instance.pk == home_pk:
instance.is_home = True
page._publisher_keep_state = True
page._home_checked = True
page.save()
|
from dionaea.core import *
import datetime
import traceback
import logging
import binascii
import os
import tempfile
from dionaea.smb.include.smbfields import *
from dionaea.smb.include.packet import Raw
from .include.tds import *
logger = logging.getLogger('MSSQL')
class mssqld(connection):
def __init__ (self):
connection.__init__(self,"tcp")
self.buf = b''
def handle_established(self):
self.timeouts.idle = 120
self.processors()
if False:
# FIXME SESSIONDUMP remove at some point
# useful to dump sessions which are _large_
self.session = tempfile.NamedTemporaryFile(delete=False, prefix='mssql_session-', dir="/tmp/")
else:
self.session = None
def handle_io_in(self, data):
l=0
size = 0
chunk = b''
while len(data) > l:
p = None
try:
if len(data) - l < 8: # length of TDS_Header
logger.warn("Incomplete TDS_Header")
return l
p = TDS_Header(data[l:l+8])
p.show()
if p.Length == 0:
logger.warn("Bad TDS Header, Length = 0")
return l
if len(data[l:]) < p.Length:
return l
chunk = data[l:l+p.Length]
p = TDS_Header(chunk)
l+=p.Length
self.buf += chunk[8:]
self.pendingPacketType = p.Type
if p.Status != TDS_STATUS_EOM:
# Command spans multiple packets TDS_ things
# this is not the last packet
continue
except:
t = traceback.format_exc()
logger.critical(t)
return l
if self.pendingPacketType == TDS_TYPES_PRE_LOGIN:
x = TDS_Prelogin_Request(self.buf)
elif self.pendingPacketType == TDS_TYPES_TDS7_LOGIN:
x = TDS_Login7_Request(self.buf)
elif self.pendingPacketType == TDS_TYPES_SQL_BATCH:
x = TDS_SQLBatchData(self.buf)
elif self.pendingPacketType == TDS_TYPES_PRETDS7_LOGIN:
x = TDS_PreTDS7_Login_Request(self.buf)
elif self.pendingPacketType == TDS_TYPES_TDS5_QUERY:
x = TDS_TDS5_Query_Request(self.buf)
self.buf = b''
x.show()
r = None
r = self.process( self.pendingPacketType, x, chunk)
if r:
mssqlheader = TDS_Header(Tokens=[])
mssqlheader.Status = TDS_STATUS_EOM
mssqlheader.PacketID = p.getlayer(TDS_Header).PacketID
mssqlheader.SPID = p.getlayer(TDS_Header).SPID
if type(r) == list:
# I'm pretty sure only TDS_Tokens have TDS_TYPES_TABULAR_RESULT
mssqlheader.Type = TDS_TYPES_TABULAR_RESULT
mssqlheader.Tokens = r
rp = mssqlheader
else:
mssqlheader.Type = r.tds_type
rp = mssqlheader/r
rp.Length = len(rp)
rp.show()
self.send(rp.build())
return l
def decode_password(self, password):
decoded = ""
for p in password:
j = ord(p)
j = j^0xa5
k = ((j&0x0F) << 4)| ((j&0xF0) >> 4)
decoded += chr(k)
return decoded
def process(self, PacketType, p, data):
r =''
rp = None
if PacketType == TDS_TYPES_PRE_LOGIN:
r = TDS_Prelogin_Response()
#FIXME: any better way to initialise this?
r.VersionToken.TokenType = 0x00
r.VersionToken.Offset = 26
r.VersionToken.Len = 6
r.EncryptionToken.TokenType = 0x01
r.EncryptionToken.Offset = 32
r.EncryptionToken.Len = 1
r.InstanceToken.TokenType = 0x02
r.InstanceToken.Offset = 33
r.InstanceToken.Len = 1
r.ThreadIDToken.TokenType = 0x03
r.ThreadIDToken.Offset = 34
r.ThreadIDToken.Len = 0
r.MARSToken.TokenType = 0x04
r.MARSToken.Offset = 34
r.MARSToken.Len = 1
elif PacketType == TDS_TYPES_TDS7_LOGIN:
# another layers TDS_Token_EnvChange, TDS_Token_Info() can be added
# example : r = TDS_Token_EnvChange()/TDS_Token_Info()/TDS_Token_LoginACK()/TDS_Token_Done()
# for the moment, only these 2 layers have binded
l = p.getlayer(TDS_Login7_Request)
# we can gather some values from the client, maybe use for fingerprinting clients
fields = {}
for i in ["HostName","UserName", "Password","AppName","ServerName", "CltIntName", "Language", "Database"]:
ib = 8 + l.getfieldval("ib" + i)
cch = l.getfieldval("cch" + i)*2
field = data[ib:ib+cch]
xfield = field.decode('utf-16')
if i == "Password":
xfield = self.decode_password(xfield)
fields[i] = xfield
i = incident("dionaea.modules.python.mssql.login")
i.con = self
i.username = fields['UserName']
i.password = fields['Password']
i.cltintname = fields['CltIntName']
i.hostname = fields['HostName']
i.appname = fields['AppName']
i.report()
r = [TDS_Token()/TDS_Token_LoginACK(),TDS_Token()/TDS_Token_Done()]
elif PacketType == TDS_TYPES_PRETDS7_LOGIN:
r = [TDS_Token()/TDS_Token_LoginACK(),TDS_Token()/TDS_Token_Done()]
elif PacketType == TDS_TYPES_SQL_BATCH:
l = p.getlayer(TDS_SQLBatchData)
cmd = l.SQLBatchData
if cmd[1] == 0x00:
# we got unicode, hopefully there is a way to detect this besides using this ugly hack
cmd = cmd.decode('utf-16')
cmd = cmd.encode()
# limit to 1024
logger.debug("SQL BATCH : {:.1024s}".format(cmd))
# FIXME SESSIONDUMP remove at some point
if self.session != None:
self.session.write(b"COMMAND:\n")
self.session.write(cmd)
self.session.write(b"\n")
i = incident("dionaea.modules.python.mssql.cmd")
i.con = self
i.status = "complete"
i.cmd = cmd
i.report()
# FIXME this reply is wrong too
# proper replies require parsing the SQLBatchData into statement and compiling a TDS_Token per statement
r = [TDS_Token()/TDS_Token_ColMetaData(),TDS_Token()/TDS_Token_Row(),TDS_Token()/TDS_Token_ReturnStatus(),TDS_Token()/TDS_Token_DoneProc()]
elif PacketType == TDS_TYPES_TDS5_QUERY:
# FIXME the reply is wrong,
# /opt/freetds/bin/tsql -H 127.0.0.1 -p 1433 -U sa -v -D test
# dies with
# Msg 20020, Level 9, State -1, Server OpenClient, Line -1
# Bad token from the server: Datastream processing out of sync
r = [TDS_Token()/TDS_Token_ColMetaData(),TDS_Token()/TDS_Token_Row(),TDS_Token()/TDS_Token_ReturnStatus(),TDS_Token()/TDS_Token_DoneProc()]
else:
logger.warn("UNKNOWN PACKET TYPE FOR MSSQL {}".format(PacketType))
return r
def handle_timeout_idle(self):
return False
def handle_disconnect(self):
# FIXME SESSIONDUMP remove at some point
if self.session != None:
if len(self.buf) > 0:
if self.pendingPacketType == TDS_TYPES_PRE_LOGIN:
x = TDS_Prelogin_Request(self.buf)
elif self.pendingPacketType == TDS_TYPES_TDS7_LOGIN:
x = TDS_Login7_Request(self.buf)
elif self.pendingPacketType == TDS_TYPES_SQL_BATCH:
x = TDS_SQLBatchData(self.buf)
elif self.pendingPacketType == TDS_TYPES_PRETDS7_LOGIN:
x = TDS_PreTDS7_Login_Request(self.buf)
elif self.pendingPacketType == TDS_TYPES_TDS5_QUERY:
x = TDS_TDS5_Query_Request(self.buf)
self.buf = b''
x.show()
r = None
r = self.process( self.pendingPacketType, x, self.buf[9:])
self.session.close()
return False
|
from botocore.docs.method import document_model_driven_method
from botocore.docs.waiter import document_wait_method
from botocore.docs.paginator import document_paginate_method
from botocore.docs.bcdoc.restdoc import DocumentStructure
class LazyLoadedDocstring(str):
"""Used for lazily loading docstrings
You can instantiate this class and assign it to a __doc__ value.
The docstring will not be generated till accessed via __doc__ or
help(). Note that all docstring classes **must** subclass from
this class. It cannot be used directly as a docstring.
"""
def __init__(self, *args, **kwargs):
"""
The args and kwargs are the same as the underlying document
generation function. These just get proxied to the underlying
function.
"""
super(LazyLoadedDocstring, self).__init__()
self._gen_args = args
self._gen_kwargs = kwargs
self._docstring = None
def __new__(cls, *args, **kwargs):
# Needed in order to sub class from str with args and kwargs
return super(LazyLoadedDocstring, cls).__new__(cls)
def _write_docstring(self, *args, **kwargs):
raise NotImplementedError(
'_write_docstring is not implemented. Please subclass from '
'this class and provide your own _write_docstring method'
)
def expandtabs(self, tabsize=8):
"""Expands tabs to spaces
So this is a big hack in order to get lazy loaded docstring work
for the ``help()``. In the ``help()`` function, ``pydoc`` and
``inspect`` are used. At some point the ``inspect.cleandoc``
method is called. To clean the docs ``expandtabs`` is called
and that is where we override the method to generate and return the
docstrings.
"""
if self._docstring is None:
self._generate()
return self._docstring.expandtabs(tabsize)
def __str__(self):
return self._generate()
# __doc__ of target will use either __repr__ or __str__ of this class.
__repr__ = __str__
def _generate(self):
# Generate the docstring if it is not already cached.
if self._docstring is None:
self._docstring = self._create_docstring()
return self._docstring
def _create_docstring(self):
docstring_structure = DocumentStructure('docstring', target='html')
# Call the document method function with the args and kwargs
# passed to the class.
self._write_docstring(
docstring_structure, *self._gen_args,
**self._gen_kwargs)
return docstring_structure.flush_structure().decode('utf-8')
class ClientMethodDocstring(LazyLoadedDocstring):
def _write_docstring(self, *args, **kwargs):
document_model_driven_method(*args, **kwargs)
class WaiterDocstring(LazyLoadedDocstring):
def _write_docstring(self, *args, **kwargs):
document_wait_method(*args, **kwargs)
class PaginatorDocstring(LazyLoadedDocstring):
def _write_docstring(self, *args, **kwargs):
document_paginate_method(*args, **kwargs)
|
{
'name': 'Belgium - Payroll with Accounting',
'category': 'Localization',
'author': 'OpenERP SA',
'depends': ['l10n_be_hr_payroll', 'hr_payroll_account', 'l10n_be'],
'version': '1.0',
'description': """
Accounting Data for Belgian Payroll Rules.
==========================================
""",
'auto_install': True,
'website': 'https://www.odoo.com/page/accounting',
'demo': [],
'data':[
'l10n_be_wizard.yml',
'l10n_be_hr_payroll_account_data.xml',
'data/hr.salary.rule.csv',
],
'installable': False
}
|
from odoo.tests.common import TransactionCase
class TestProductTemplate(TransactionCase):
def test_name_search(self):
partner = self.env['res.partner'].create({
'name': 'Azure Interior',
})
seller = self.env['product.supplierinfo'].create({
'name': partner.id,
'price': 12.0,
'delay': 1,
'product_code': 'VOB2a',
})
product_tmpl = self.env['product.template'].create({
'name': 'Rubber Duck',
'type': 'product',
'default_code': 'VOB2A',
'seller_ids': [seller.id],
'purchase_ok': True,
})
ns = self.env['product.template'].with_context(partner_id=partner.id).name_search('VOB2', [['purchase_ok', '=', True]])
self.assertEqual(len(ns), 1, "name_search should have 1 item")
self.assertEqual(ns[0][1], '[VOB2A] Rubber Duck', "name_search should return the expected result")
|
"""Auto-generated file, do not edit by hand. SA metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_SA = PhoneMetadata(id='SA', country_code=None, international_prefix=None,
general_desc=PhoneNumberDesc(national_number_pattern='[19]\\d{2,5}', possible_number_pattern='\\d{3,6}'),
toll_free=PhoneNumberDesc(national_number_pattern='116111|937|998', possible_number_pattern='\\d{3,6}', example_number='116111'),
premium_rate=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
emergency=PhoneNumberDesc(national_number_pattern='112|9(?:11|9[79])', possible_number_pattern='\\d{3}', example_number='999'),
short_code=PhoneNumberDesc(national_number_pattern='1(?:1(?:00|2|6111)|410|9(?:00|1[89]|9(?:099|22|91)))|9(?:0[24-79]|11|3[379]|40|66|8[5-9]|9[02-9])', possible_number_pattern='\\d{3,6}', example_number='937'),
standard_rate=PhoneNumberDesc(national_number_pattern='1410', possible_number_pattern='\\d{4}', example_number='1410'),
carrier_specific=PhoneNumberDesc(national_number_pattern='1(?:100|410)|90[24679]', possible_number_pattern='\\d{3,4}', example_number='902'),
short_data=True)
|
import math
import operator
import netaddr
from neutron.api.v2 import attributes
from neutron.common import constants
from neutron.common import exceptions as n_exc
from neutron.db import models_v2
import neutron.ipam as ipam
from neutron.ipam import driver
from neutron.openstack.common import uuidutils
class SubnetAllocator(driver.Pool):
"""Class for handling allocation of subnet prefixes from a subnet pool.
This class leverages the pluggable IPAM interface where possible to
make merging into IPAM framework easier in future cycles.
"""
def __init__(self, subnetpool):
self._subnetpool = subnetpool
self._sp_helper = SubnetPoolHelper()
def _get_allocated_cidrs(self, session):
query = session.query(
models_v2.Subnet).with_lockmode('update')
subnets = query.filter_by(subnetpool_id=self._subnetpool['id'])
return (x.cidr for x in subnets)
def _get_available_prefix_list(self, session):
prefixes = (x.cidr for x in self._subnetpool.prefixes)
allocations = self._get_allocated_cidrs(session)
prefix_set = netaddr.IPSet(iterable=prefixes)
allocation_set = netaddr.IPSet(iterable=allocations)
available_set = prefix_set.difference(allocation_set)
available_set.compact()
return sorted(available_set.iter_cidrs(),
key=operator.attrgetter('prefixlen'),
reverse=True)
def _num_quota_units_in_prefixlen(self, prefixlen, quota_unit):
return math.pow(2, quota_unit - prefixlen)
def _allocations_used_by_tenant(self, session, quota_unit):
subnetpool_id = self._subnetpool['id']
tenant_id = self._subnetpool['tenant_id']
with session.begin(subtransactions=True):
qry = session.query(
models_v2.Subnet).with_lockmode('update')
allocations = qry.filter_by(subnetpool_id=subnetpool_id,
tenant_id=tenant_id)
value = 0
for allocation in allocations:
prefixlen = netaddr.IPNetwork(allocation.cidr).prefixlen
value += self._num_quota_units_in_prefixlen(prefixlen,
quota_unit)
return value
def _check_subnetpool_tenant_quota(self, session, tenant_id, prefixlen):
quota_unit = self._sp_helper.ip_version_subnetpool_quota_unit(
self._subnetpool['ip_version'])
quota = self._subnetpool.get('default_quota')
if quota:
used = self._allocations_used_by_tenant(session, quota_unit)
requested_units = self._num_quota_units_in_prefixlen(prefixlen,
quota_unit)
if used + requested_units > quota:
raise n_exc.SubnetPoolQuotaExceeded()
def _allocate_any_subnet(self, session, request):
with session.begin(subtransactions=True):
self._check_subnetpool_tenant_quota(session,
request.tenant_id,
request.prefixlen)
prefix_pool = self._get_available_prefix_list(session)
for prefix in prefix_pool:
if request.prefixlen >= prefix.prefixlen:
subnet = prefix.subnet(request.prefixlen).next()
gateway_ip = request.gateway_ip
if not gateway_ip:
gateway_ip = subnet.network + 1
return IpamSubnet(request.tenant_id,
request.subnet_id,
subnet.cidr,
gateway_ip=gateway_ip,
allocation_pools=None)
msg = _("Insufficient prefix space to allocate subnet size /%s")
raise n_exc.SubnetAllocationError(reason=msg %
str(request.prefixlen))
def _allocate_specific_subnet(self, session, request):
with session.begin(subtransactions=True):
self._check_subnetpool_tenant_quota(session,
request.tenant_id,
request.prefixlen)
subnet = request.subnet
available = self._get_available_prefix_list(session)
matched = netaddr.all_matching_cidrs(subnet, available)
if len(matched) is 1 and matched[0].prefixlen <= subnet.prefixlen:
return IpamSubnet(request.tenant_id,
request.subnet_id,
subnet.cidr,
gateway_ip=request.gateway_ip,
allocation_pools=request.allocation_pools)
msg = _("Cannot allocate requested subnet from the available "
"set of prefixes")
raise n_exc.SubnetAllocationError(reason=msg)
def allocate_subnet(self, session, request):
max_prefixlen = int(self._subnetpool['max_prefixlen'])
min_prefixlen = int(self._subnetpool['min_prefixlen'])
if request.prefixlen > max_prefixlen:
raise n_exc.MaxPrefixSubnetAllocationError(
prefixlen=request.prefixlen,
max_prefixlen=max_prefixlen)
if request.prefixlen < min_prefixlen:
raise n_exc.MinPrefixSubnetAllocationError(
prefixlen=request.prefixlen,
min_prefixlen=min_prefixlen)
if isinstance(request, ipam.AnySubnetRequest):
return self._allocate_any_subnet(session, request)
elif isinstance(request, ipam.SpecificSubnetRequest):
return self._allocate_specific_subnet(session, request)
else:
msg = _("Unsupported request type")
raise n_exc.SubnetAllocationError(reason=msg)
def get_subnet(self, subnet, subnet_id):
raise NotImplementedError()
def update_subnet(self, request):
raise NotImplementedError()
def remove_subnet(self, subnet, subnet_id):
raise NotImplementedError()
class IpamSubnet(driver.Subnet):
def __init__(self,
tenant_id,
subnet_id,
cidr,
gateway_ip=None,
allocation_pools=None):
self._req = ipam.SpecificSubnetRequest(tenant_id,
subnet_id,
cidr,
gateway_ip=gateway_ip,
allocation_pools=None)
def allocate(self, address_request):
raise NotImplementedError()
def deallocate(self, address):
raise NotImplementedError()
def get_details(self):
return self._req
class SubnetPoolReader(object):
'''Class to assist with reading a subnetpool, loading defaults, and
inferring IP version from prefix list. Provides a common way of
reading a stored model or a create request with defaultable attributes.
'''
MIN_PREFIX_TYPE = 'min'
MAX_PREFIX_TYPE = 'max'
DEFAULT_PREFIX_TYPE = 'default'
_sp_helper = None
def __init__(self, subnetpool):
self._read_prefix_info(subnetpool)
self._sp_helper = SubnetPoolHelper()
self._read_id(subnetpool)
self._read_prefix_bounds(subnetpool)
self._read_attrs(subnetpool,
['tenant_id', 'name', 'shared'])
self.subnetpool = {'id': self.id,
'name': self.name,
'tenant_id': self.tenant_id,
'prefixes': self.prefixes,
'min_prefix': self.min_prefix,
'min_prefixlen': self.min_prefixlen,
'max_prefix': self.max_prefix,
'max_prefixlen': self.max_prefixlen,
'default_prefix': self.default_prefix,
'default_prefixlen': self.default_prefixlen,
'default_quota': self.default_quota,
'shared': self.shared}
def _read_attrs(self, subnetpool, keys):
for key in keys:
setattr(self, key, subnetpool[key])
def _ip_version_from_cidr(self, cidr):
return netaddr.IPNetwork(cidr).version
def _prefixlen_from_cidr(self, cidr):
return netaddr.IPNetwork(cidr).prefixlen
def _read_id(self, subnetpool):
id = subnetpool.get('id', attributes.ATTR_NOT_SPECIFIED)
if id is attributes.ATTR_NOT_SPECIFIED:
id = uuidutils.generate_uuid()
self.id = id
def _read_prefix_bounds(self, subnetpool):
ip_version = self.ip_version
default_min = self._sp_helper.default_min_prefixlen(ip_version)
default_max = self._sp_helper.default_max_prefixlen(ip_version)
self._read_prefix_bound(self.MIN_PREFIX_TYPE,
subnetpool,
default_min)
self._read_prefix_bound(self.MAX_PREFIX_TYPE,
subnetpool,
default_max)
self._read_prefix_bound(self.DEFAULT_PREFIX_TYPE,
subnetpool,
self.min_prefixlen)
self._sp_helper.validate_min_prefixlen(self.min_prefixlen,
self.max_prefixlen)
self._sp_helper.validate_max_prefixlen(self.max_prefixlen,
ip_version)
self._sp_helper.validate_default_prefixlen(self.min_prefixlen,
self.max_prefixlen,
self.default_prefixlen)
def _read_prefix_bound(self, type, subnetpool, default_bound=None):
prefixlen_attr = type + '_prefixlen'
prefix_attr = type + '_prefix'
prefixlen = subnetpool.get(prefixlen_attr,
attributes.ATTR_NOT_SPECIFIED)
wildcard = self._sp_helper.wildcard(self.ip_version)
if prefixlen is attributes.ATTR_NOT_SPECIFIED and default_bound:
prefixlen = default_bound
if prefixlen is not attributes.ATTR_NOT_SPECIFIED:
prefix_cidr = '/'.join((wildcard,
str(prefixlen)))
setattr(self, prefix_attr, prefix_cidr)
setattr(self, prefixlen_attr, prefixlen)
def _read_prefix_info(self, subnetpool):
prefix_list = subnetpool['prefixes']
if not prefix_list:
raise n_exc.EmptySubnetPoolPrefixList()
ip_version = None
for prefix in prefix_list:
if not ip_version:
ip_version = netaddr.IPNetwork(prefix).version
elif netaddr.IPNetwork(prefix).version != ip_version:
raise n_exc.PrefixVersionMismatch()
self.default_quota = subnetpool.get('default_quota')
if self.default_quota is attributes.ATTR_NOT_SPECIFIED:
self.default_quota = None
self.ip_version = ip_version
self.prefixes = self._compact_subnetpool_prefix_list(prefix_list)
def _compact_subnetpool_prefix_list(self, prefix_list):
"""Compact any overlapping prefixes in prefix_list and return the
result
"""
ip_set = netaddr.IPSet()
for prefix in prefix_list:
ip_set.add(netaddr.IPNetwork(prefix))
ip_set.compact()
return [str(x.cidr) for x in ip_set.iter_cidrs()]
class SubnetPoolHelper(object):
_PREFIX_VERSION_INFO = {4: {'max_prefixlen': constants.IPv4_BITS,
'wildcard': '0.0.0.0',
'default_min_prefixlen': 8,
# IPv4 quota measured in units of /32
'quota_units': 32},
6: {'max_prefixlen': constants.IPv6_BITS,
'wildcard': '::',
'default_min_prefixlen': 64,
# IPv6 quota measured in units of /64
'quota_units': 64}}
def validate_min_prefixlen(self, min_prefixlen, max_prefixlen):
if min_prefixlen < 0:
raise n_exc.UnsupportedMinSubnetPoolPrefix(prefix=min_prefixlen,
version=4)
if min_prefixlen > max_prefixlen:
raise n_exc.IllegalSubnetPoolPrefixBounds(
prefix_type='min_prefixlen',
prefixlen=min_prefixlen,
base_prefix_type='max_prefixlen',
base_prefixlen=max_prefixlen)
def validate_max_prefixlen(self, prefixlen, ip_version):
max = self._PREFIX_VERSION_INFO[ip_version]['max_prefixlen']
if prefixlen > max:
raise n_exc.IllegalSubnetPoolPrefixBounds(
prefix_type='max_prefixlen',
prefixlen=prefixlen,
base_prefix_type='ip_version_max',
base_prefixlen=max)
def validate_default_prefixlen(self,
min_prefixlen,
max_prefixlen,
default_prefixlen):
if default_prefixlen < min_prefixlen:
raise n_exc.IllegalSubnetPoolPrefixBounds(
prefix_type='default_prefixlen',
prefixlen=default_prefixlen,
base_prefix_type='min_prefixlen',
base_prefixlen=min_prefixlen)
if default_prefixlen > max_prefixlen:
raise n_exc.IllegalSubnetPoolPrefixBounds(
prefix_type='default_prefixlen',
prefixlen=default_prefixlen,
base_prefix_type='max_prefixlen',
base_prefixlen=max_prefixlen)
def wildcard(self, ip_version):
return self._PREFIX_VERSION_INFO[ip_version]['wildcard']
def default_max_prefixlen(self, ip_version):
return self._PREFIX_VERSION_INFO[ip_version]['max_prefixlen']
def default_min_prefixlen(self, ip_version):
return self._PREFIX_VERSION_INFO[ip_version]['default_min_prefixlen']
def ip_version_subnetpool_quota_unit(self, ip_version):
return self._PREFIX_VERSION_INFO[ip_version]['quota_units']
|
"""
PostgreSQL database backend for Django.
Requires psycopg 1: http://initd.org/projects/psycopg1
"""
import sys
from django.db import utils
from django.db.backends import *
from django.db.backends.signals import connection_created
from django.db.backends.postgresql.client import DatabaseClient
from django.db.backends.postgresql.creation import DatabaseCreation
from django.db.backends.postgresql.introspection import DatabaseIntrospection
from django.db.backends.postgresql.operations import DatabaseOperations
from django.db.backends.postgresql.version import get_version
from django.utils.encoding import smart_str, smart_unicode
try:
import psycopg as Database
except ImportError, e:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Error loading psycopg module: %s" % e)
DatabaseError = Database.DatabaseError
IntegrityError = Database.IntegrityError
class UnicodeCursorWrapper(object):
"""
A thin wrapper around psycopg cursors that allows them to accept Unicode
strings as params.
This is necessary because psycopg doesn't apply any DB quoting to
parameters that are Unicode strings. If a param is Unicode, this will
convert it to a bytestring using database client's encoding before passing
it to psycopg.
All results retrieved from the database are converted into Unicode strings
before being returned to the caller.
"""
def __init__(self, cursor, charset):
self.cursor = cursor
self.charset = charset
def format_params(self, params):
if isinstance(params, dict):
result = {}
charset = self.charset
for key, value in params.items():
result[smart_str(key, charset)] = smart_str(value, charset)
return result
else:
return tuple([smart_str(p, self.charset, True) for p in params])
def execute(self, sql, params=()):
try:
return self.cursor.execute(smart_str(sql, self.charset), self.format_params(params))
except Database.IntegrityError, e:
raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2]
except Database.DatabaseError, e:
raise utils.DatabaseError, utils.DatabaseError(*tuple(e)), sys.exc_info()[2]
def executemany(self, sql, param_list):
try:
new_param_list = [self.format_params(params) for params in param_list]
return self.cursor.executemany(sql, new_param_list)
except Database.IntegrityError, e:
raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2]
except Database.DatabaseError, e:
raise utils.DatabaseError, utils.DatabaseError(*tuple(e)), sys.exc_info()[2]
def __getattr__(self, attr):
if attr in self.__dict__:
return self.__dict__[attr]
else:
return getattr(self.cursor, attr)
def __iter__(self):
return iter(self.cursor.fetchall())
class DatabaseFeatures(BaseDatabaseFeatures):
uses_savepoints = True
requires_rollback_on_dirty_transaction = True
has_real_datatype = True
can_defer_constraint_checks = True
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = 'postgresql'
operators = {
'exact': '= %s',
'iexact': '= UPPER(%s)',
'contains': 'LIKE %s',
'icontains': 'LIKE UPPER(%s)',
'regex': '~ %s',
'iregex': '~* %s',
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': 'LIKE %s',
'endswith': 'LIKE %s',
'istartswith': 'LIKE UPPER(%s)',
'iendswith': 'LIKE UPPER(%s)',
}
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
import warnings
warnings.warn(
'The "postgresql" backend has been deprecated. Use "postgresql_psycopg2" instead.',
DeprecationWarning
)
self.features = DatabaseFeatures(self)
self.ops = DatabaseOperations(self)
self.client = DatabaseClient(self)
self.creation = DatabaseCreation(self)
self.introspection = DatabaseIntrospection(self)
self.validation = BaseDatabaseValidation(self)
def _cursor(self):
new_connection = False
set_tz = False
settings_dict = self.settings_dict
if self.connection is None:
new_connection = True
set_tz = settings_dict.get('TIME_ZONE')
if settings_dict['NAME'] == '':
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("You need to specify NAME in your Django settings file.")
conn_string = "dbname=%s" % settings_dict['NAME']
if settings_dict['USER']:
conn_string = "user=%s %s" % (settings_dict['USER'], conn_string)
if settings_dict['PASSWORD']:
conn_string += " password='%s'" % settings_dict['PASSWORD']
if settings_dict['HOST']:
conn_string += " host=%s" % settings_dict['HOST']
if settings_dict['PORT']:
conn_string += " port=%s" % settings_dict['PORT']
self.connection = Database.connect(conn_string, **settings_dict['OPTIONS'])
# make transactions transparent to all cursors
self.connection.set_isolation_level(1)
connection_created.send(sender=self.__class__, connection=self)
cursor = self.connection.cursor()
if new_connection:
if set_tz:
cursor.execute("SET TIME ZONE %s", [settings_dict['TIME_ZONE']])
if not hasattr(self, '_version'):
self.__class__._version = get_version(cursor)
if self._version[0:2] < (8, 0):
# No savepoint support for earlier version of PostgreSQL.
self.features.uses_savepoints = False
cursor.execute("SET client_encoding to 'UNICODE'")
return UnicodeCursorWrapper(cursor, 'utf-8')
def _commit(self):
if self.connection is not None:
try:
return self.connection.commit()
except Database.IntegrityError, e:
raise utils.IntegrityError, utils.IntegrityError(*tuple(e)), sys.exc_info()[2]
def typecast_string(s):
"""
Cast all returned strings to unicode strings.
"""
if not s and not isinstance(s, str):
return s
return smart_unicode(s)
try:
Database.register_type(Database.new_type((1082,), "DATE", util.typecast_date))
except AttributeError:
raise Exception("You appear to be using psycopg version 2. Set your DATABASES.ENGINE to 'postgresql_psycopg2' instead of 'postgresql'.")
Database.register_type(Database.new_type((1083,1266), "TIME", util.typecast_time))
Database.register_type(Database.new_type((1114,1184), "TIMESTAMP", util.typecast_timestamp))
Database.register_type(Database.new_type((16,), "BOOLEAN", util.typecast_boolean))
Database.register_type(Database.new_type((1700,), "NUMERIC", util.typecast_decimal))
Database.register_type(Database.new_type(Database.types[1043].values, 'STRING', typecast_string))
|
"""
Boolean geometry sphere.
"""
from __future__ import absolute_import
import __init__
from fabmetheus_utilities.geometry.creation import solid
from fabmetheus_utilities.geometry.geometry_utilities import evaluate
from fabmetheus_utilities.geometry.solids import cube
from fabmetheus_utilities.geometry.solids import triangle_mesh
from fabmetheus_utilities.vector3 import Vector3
from fabmetheus_utilities import euclidean
import math
__author__ = 'Enrique Perez (perez_enrique@yahoo.com)'
__credits__ = 'Nophead <http://hydraraptor.blogspot.com/>\nArt of Illusion <http://www.artofillusion.org/>'
__date__ = '$Date: 2008/21/04 $'
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
def addSphere(elementNode, faces, radius, vertexes):
'Add sphere by radius.'
bottom = -radius.z
sides = evaluate.getSidesMinimumThreeBasedOnPrecision(elementNode, max(radius.x, radius.y, radius.z))
sphereSlices = max(sides / 2, 2)
equator = euclidean.getComplexPolygonByComplexRadius(complex(radius.x, radius.y), sides)
polygons = [triangle_mesh.getAddIndexedLoop([complex()], vertexes, bottom)]
zIncrement = (radius.z + radius.z) / float(sphereSlices)
z = bottom
for sphereSlice in xrange(1, sphereSlices):
z += zIncrement
zPortion = abs(z) / radius.z
multipliedPath = euclidean.getComplexPathByMultiplier(math.sqrt(1.0 - zPortion * zPortion), equator)
polygons.append(triangle_mesh.getAddIndexedLoop(multipliedPath, vertexes, z))
polygons.append(triangle_mesh.getAddIndexedLoop([complex()], vertexes, radius.z))
triangle_mesh.addPillarByLoops(faces, polygons)
def getGeometryOutput(elementNode, radius):
'Get triangle mesh from attribute dictionary.'
faces = []
vertexes = []
addSphere(elementNode, faces, radius, vertexes)
return {'trianglemesh' : {'vertex' : vertexes, 'face' : faces}}
def getNewDerivation(elementNode):
'Get new derivation.'
return SphereDerivation(elementNode)
def processElementNode(elementNode):
'Process the xml element.'
evaluate.processArchivable(Sphere, elementNode)
class Sphere(cube.Cube):
'A sphere object.'
def createShape(self):
'Create the shape.'
addSphere(self.elementNode, self.faces, self.radius, self.vertexes)
def setToElementNode(self, elementNode):
'Set to elementNode.'
attributes = elementNode.attributes
self.elementNode = elementNode
self.radius = SphereDerivation(elementNode).radius
if 'radius' in attributes:
del attributes['radius']
attributes['radius.x'] = self.radius.x
attributes['radius.y'] = self.radius.y
attributes['radius.z'] = self.radius.z
self.createShape()
solid.processArchiveRemoveSolid(elementNode, self.getGeometryOutput())
class SphereDerivation:
"Class to hold sphere variables."
def __init__(self, elementNode):
'Set defaults.'
self.radius = evaluate.getVector3ByPrefixes(elementNode, ['demisize', 'radius'], Vector3(1.0, 1.0, 1.0))
self.radius = evaluate.getVector3ByMultiplierPrefixes(elementNode, 2.0, ['diameter', 'size'], self.radius)
|
from dMainFrame import *
|
from django.db import models
from django.db import connection
class Square(models.Model):
root = models.IntegerField()
square = models.PositiveIntegerField()
def __unicode__(self):
return "%s ** 2 == %s" % (self.root, self.square)
class Person(models.Model):
first_name = models.CharField(max_length=20)
last_name = models.CharField(max_length=20)
def __unicode__(self):
return u'%s %s' % (self.first_name, self.last_name)
if connection.features.uses_case_insensitive_names:
t_convert = lambda x: x.upper()
else:
t_convert = lambda x: x
qn = connection.ops.quote_name
__test__ = {'API_TESTS': """
>>> from django.db import connection
>>> cursor = connection.cursor()
>>> opts = Square._meta
>>> f1, f2 = opts.get_field('root'), opts.get_field('square')
>>> query = ('INSERT INTO %s (%s, %s) VALUES (%%s, %%s)'
... % (t_convert(opts.db_table), qn(f1.column), qn(f2.column)))
>>> cursor.executemany(query, [(i, i**2) for i in range(-5, 6)]) and None or None
>>> Square.objects.order_by('root')
[<Square: -5 ** 2 == 25>, <Square: -4 ** 2 == 16>, <Square: -3 ** 2 == 9>, <Square: -2 ** 2 == 4>, <Square: -1 ** 2 == 1>, <Square: 0 ** 2 == 0>, <Square: 1 ** 2 == 1>, <Square: 2 ** 2 == 4>, <Square: 3 ** 2 == 9>, <Square: 4 ** 2 == 16>, <Square: 5 ** 2 == 25>]
>>> cursor.executemany(query, []) and None or None
>>> Square.objects.count()
11
>>> Person(first_name="John", last_name="Doe").save()
>>> Person(first_name="Jane", last_name="Doe").save()
>>> Person(first_name="Mary", last_name="Agnelline").save()
>>> Person(first_name="Peter", last_name="Parker").save()
>>> Person(first_name="Clark", last_name="Kent").save()
>>> opts2 = Person._meta
>>> f3, f4 = opts2.get_field('first_name'), opts2.get_field('last_name')
>>> query2 = ('SELECT %s, %s FROM %s ORDER BY %s'
... % (qn(f3.column), qn(f4.column), t_convert(opts2.db_table),
... qn(f3.column)))
>>> cursor.execute(query2) and None or None
>>> cursor.fetchone()
(u'Clark', u'Kent')
>>> list(cursor.fetchmany(2))
[(u'Jane', u'Doe'), (u'John', u'Doe')]
>>> list(cursor.fetchall())
[(u'Mary', u'Agnelline'), (u'Peter', u'Parker')]
"""}
|
"""
Interfaces for L{twisted.mail}.
@since: 16.5
"""
from __future__ import absolute_import, division
from zope.interface import Interface
class IClientAuthentication(Interface):
def getName():
"""
Return an identifier associated with this authentication scheme.
@rtype: L{bytes}
"""
def challengeResponse(secret, challenge):
"""
Generate a challenge response string.
"""
class IServerFactoryPOP3(Interface):
"""
An interface for querying capabilities of a POP3 server.
Any cap_* method may raise L{NotImplementedError} if the particular
capability is not supported. If L{cap_EXPIRE()} does not raise
L{NotImplementedError}, L{perUserExpiration()} must be implemented,
otherwise they are optional. If L{cap_LOGIN_DELAY()} is implemented,
L{perUserLoginDelay()} must be implemented, otherwise they are optional.
@type challengers: L{dict} of L{bytes} -> L{IUsernameHashedPassword
<cred.credentials.IUsernameHashedPassword>}
@ivar challengers: A mapping of challenger names to
L{IUsernameHashedPassword <cred.credentials.IUsernameHashedPassword>}
provider.
"""
def cap_IMPLEMENTATION():
"""
Return a string describing the POP3 server implementation.
@rtype: L{bytes}
@return: Server implementation information.
"""
def cap_EXPIRE():
"""
Return the minimum number of days messages are retained.
@rtype: L{int} or L{None}
@return: The minimum number of days messages are retained or none, if
the server never deletes messages.
"""
def perUserExpiration():
"""
Indicate whether the message expiration policy differs per user.
@rtype: L{bool}
@return: C{True} when the message expiration policy differs per user,
C{False} otherwise.
"""
def cap_LOGIN_DELAY():
"""
Return the minimum number of seconds between client logins.
@rtype: L{int}
@return: The minimum number of seconds between client logins.
"""
def perUserLoginDelay():
"""
Indicate whether the login delay period differs per user.
@rtype: L{bool}
@return: C{True} when the login delay differs per user, C{False}
otherwise.
"""
class IMailboxPOP3(Interface):
"""
An interface for mailbox access.
Message indices are 0-based.
@type loginDelay: L{int}
@ivar loginDelay: The number of seconds between allowed logins for the
user associated with this mailbox.
@type messageExpiration: L{int}
@ivar messageExpiration: The number of days messages in this mailbox will
remain on the server before being deleted.
"""
def listMessages(index=None):
"""
Retrieve the size of a message, or, if none is specified, the size of
each message in the mailbox.
@type index: L{int} or L{None}
@param index: The 0-based index of the message.
@rtype: L{int}, sequence of L{int}, or L{Deferred <defer.Deferred>}
@return: The number of octets in the specified message, or, if an
index is not specified, a sequence of the number of octets for
all messages in the mailbox or a deferred which fires with
one of those. Any value which corresponds to a deleted message
is set to 0.
@raise ValueError or IndexError: When the index does not correspond to
a message in the mailbox. The use of ValueError is preferred.
"""
def getMessage(index):
"""
Retrieve a file containing the contents of a message.
@type index: L{int}
@param index: The 0-based index of a message.
@rtype: file-like object
@return: A file containing the message.
@raise ValueError or IndexError: When the index does not correspond to
a message in the mailbox. The use of ValueError is preferred.
"""
def getUidl(index):
"""
Get a unique identifier for a message.
@type index: L{int}
@param index: The 0-based index of a message.
@rtype: L{bytes}
@return: A string of printable characters uniquely identifying the
message for all time.
@raise ValueError or IndexError: When the index does not correspond to
a message in the mailbox. The use of ValueError is preferred.
"""
def deleteMessage(index):
"""
Mark a message for deletion.
This must not change the number of messages in this mailbox. Further
requests for the size of the deleted message should return 0. Further
requests for the message itself may raise an exception.
@type index: L{int}
@param index: The 0-based index of a message.
@raise ValueError or IndexError: When the index does not correspond to
a message in the mailbox. The use of ValueError is preferred.
"""
def undeleteMessages():
"""
Undelete all messages marked for deletion.
Any message which can be undeleted should be returned to its original
position in the message sequence and retain its original UID.
"""
def sync():
"""
Discard the contents of any message marked for deletion.
"""
class IDomain(Interface):
"""
An interface for email domains.
"""
def exists(user):
"""
Check whether a user exists in this domain.
@type user: L{User}
@param user: A user.
@rtype: no-argument callable which returns L{IMessageSMTP} provider
@return: A function which takes no arguments and returns a message
receiver for the user.
@raise SMTPBadRcpt: When the given user does not exist in this domain.
"""
def addUser(user, password):
"""
Add a user to this domain.
@type user: L{bytes}
@param user: A username.
@type password: L{bytes}
@param password: A password.
"""
def getCredentialsCheckers():
"""
Return credentials checkers for this domain.
@rtype: L{list} of L{ICredentialsChecker
<twisted.cred.checkers.ICredentialsChecker>} provider
@return: Credentials checkers for this domain.
"""
class IAlias(Interface):
"""
An interface for aliases.
"""
def createMessageReceiver():
"""
Create a message receiver.
@rtype: L{IMessageSMTP} provider
@return: A message receiver.
"""
class IAliasableDomain(IDomain):
"""
An interface for email domains which can be aliased to other domains.
"""
def setAliasGroup(aliases):
"""
Set the group of defined aliases for this domain.
@type aliases: L{dict} of L{bytes} -> L{IAlias} provider
@param aliases: A mapping of domain name to alias.
"""
def exists(user, memo=None):
"""
Check whether a user exists in this domain or an alias of it.
@type user: L{User}
@param user: A user.
@type memo: L{None} or L{dict} of
L{AliasBase <twisted.mail.alias.AliasBase>}
@param memo: A record of the addresses already considered while
resolving aliases. The default value should be used by all external
code.
@rtype: no-argument callable which returns L{IMessageSMTP} provider
@return: A function which takes no arguments and returns a message
receiver for the user.
@raise SMTPBadRcpt: When the given user does not exist in this domain
or an alias of it.
"""
class IMessageDelivery(Interface):
def receivedHeader(helo, origin, recipients):
"""
Generate the Received header for a message.
@type helo: 2-L{tuple} of L{bytes} and L{bytes}.
@param helo: The argument to the HELO command and the client's IP
address.
@type origin: L{Address}
@param origin: The address the message is from
@type recipients: L{list} of L{User}
@param recipients: A list of the addresses for which this message
is bound.
@rtype: L{bytes}
@return: The full C{"Received"} header string.
"""
def validateTo(user):
"""
Validate the address for which the message is destined.
@type user: L{User}
@param user: The address to validate.
@rtype: no-argument callable
@return: A L{Deferred} which becomes, or a callable which takes no
arguments and returns an object implementing L{IMessageSMTP}. This
will be called and the returned object used to deliver the message
when it arrives.
@raise SMTPBadRcpt: Raised if messages to the address are not to be
accepted.
"""
def validateFrom(helo, origin):
"""
Validate the address from which the message originates.
@type helo: 2-L{tuple} of L{bytes} and L{bytes}.
@param helo: The argument to the HELO command and the client's IP
address.
@type origin: L{Address}
@param origin: The address the message is from
@rtype: L{Deferred} or L{Address}
@return: C{origin} or a L{Deferred} whose callback will be
passed C{origin}.
@raise SMTPBadSender: Raised of messages from this address are
not to be accepted.
"""
class IMessageDeliveryFactory(Interface):
"""
An alternate interface to implement for handling message delivery.
It is useful to implement this interface instead of L{IMessageDelivery}
directly because it allows the implementor to distinguish between different
messages delivery over the same connection. This can be used to optimize
delivery of a single message to multiple recipients, something which cannot
be done by L{IMessageDelivery} implementors due to their lack of
information.
"""
def getMessageDelivery():
"""
Return an L{IMessageDelivery} object.
This will be called once per message.
"""
class IMessageSMTP(Interface):
"""
Interface definition for messages that can be sent via SMTP.
"""
def lineReceived(line):
"""
Handle another line.
"""
def eomReceived():
"""
Handle end of message.
return a deferred. The deferred should be called with either:
callback(string) or errback(error)
@rtype: L{Deferred}
"""
def connectionLost():
"""
Handle message truncated.
semantics should be to discard the message
"""
class IMessageIMAPPart(Interface):
def getHeaders(negate, *names):
"""
Retrieve a group of message headers.
@type names: L{tuple} of L{bytes}
@param names: The names of the headers to retrieve or omit.
@type negate: L{bool}
@param negate: If True, indicates that the headers listed in C{names}
should be omitted from the return value, rather than included.
@rtype: L{dict}
@return: A mapping of header field names to header field values
"""
def getBodyFile():
"""
Retrieve a file object containing only the body of this message.
"""
def getSize():
"""
Retrieve the total size, in octets, of this message.
@rtype: L{int}
"""
def isMultipart():
"""
Indicate whether this message has subparts.
@rtype: L{bool}
"""
def getSubPart(part):
"""
Retrieve a MIME sub-message
@type part: L{int}
@param part: The number of the part to retrieve, indexed from 0.
@raise IndexError: Raised if the specified part does not exist.
@raise TypeError: Raised if this message is not multipart.
@rtype: Any object implementing L{IMessageIMAPPart}.
@return: The specified sub-part.
"""
class IMessageIMAP(IMessageIMAPPart):
def getUID():
"""
Retrieve the unique identifier associated with this message.
"""
def getFlags():
"""
Retrieve the flags associated with this message.
@rtype: C{iterable}
@return: The flags, represented as strings.
"""
def getInternalDate():
"""
Retrieve the date internally associated with this message.
@rtype: L{bytes}
@return: An RFC822-formatted date string.
"""
class IMessageIMAPFile(Interface):
"""
Optional message interface for representing messages as files.
If provided by message objects, this interface will be used instead the
more complex MIME-based interface.
"""
def open():
"""
Return a file-like object opened for reading.
Reading from the returned file will return all the bytes of which this
message consists.
"""
class ISearchableIMAPMailbox(Interface):
def search(query, uid):
"""
Search for messages that meet the given query criteria.
If this interface is not implemented by the mailbox,
L{IMailboxIMAP.fetch} and various methods of L{IMessageIMAP} will be
used instead.
Implementations which wish to offer better performance than the default
implementation should implement this interface.
@type query: L{list}
@param query: The search criteria
@type uid: L{bool}
@param uid: If true, the IDs specified in the query are UIDs; otherwise
they are message sequence IDs.
@rtype: L{list} or L{Deferred}
@return: A list of message sequence numbers or message UIDs which match
the search criteria or a L{Deferred} whose callback will be invoked
with such a list.
@raise IllegalQueryError: Raised when query is not valid.
"""
class IMailboxIMAPListener(Interface):
"""
Interface for objects interested in mailbox events
"""
def modeChanged(writeable):
"""
Indicates that the write status of a mailbox has changed.
@type writeable: L{bool}
@param writeable: A true value if write is now allowed, false
otherwise.
"""
def flagsChanged(newFlags):
"""
Indicates that the flags of one or more messages have changed.
@type newFlags: L{dict}
@param newFlags: A mapping of message identifiers to tuples of flags
now set on that message.
"""
def newMessages(exists, recent):
"""
Indicates that the number of messages in a mailbox has changed.
@type exists: L{int} or L{None}
@param exists: The total number of messages now in this mailbox. If the
total number of messages has not changed, this should be L{None}.
@type recent: L{int}
@param recent: The number of messages now flagged C{\\Recent}. If the
number of recent messages has not changed, this should be L{None}.
"""
class IMessageIMAPCopier(Interface):
def copy(messageObject):
"""
Copy the given message object into this mailbox.
The message object will be one which was previously returned by
L{IMailboxIMAP.fetch}.
Implementations which wish to offer better performance than the default
implementation should implement this interface.
If this interface is not implemented by the mailbox,
L{IMailboxIMAP.addMessage} will be used instead.
@rtype: L{Deferred} or L{int}
@return: Either the UID of the message or a Deferred which fires with
the UID when the copy finishes.
"""
class IMailboxIMAPInfo(Interface):
"""
Interface specifying only the methods required for C{listMailboxes}.
Implementations can return objects implementing only these methods for
return to C{listMailboxes} if it can allow them to operate more
efficiently.
"""
def getFlags():
"""
Return the flags defined in this mailbox
Flags with the \\ prefix are reserved for use as system flags.
@rtype: L{list} of L{bytes}
@return: A list of the flags that can be set on messages in this
mailbox.
"""
def getHierarchicalDelimiter():
"""
Get the character which delimits namespaces for in this mailbox.
@rtype: L{bytes}
"""
class IMailboxIMAP(IMailboxIMAPInfo):
def getUIDValidity():
"""
Return the unique validity identifier for this mailbox.
@rtype: L{int}
"""
def getUIDNext():
"""
Return the likely UID for the next message added to this mailbox.
@rtype: L{int}
"""
def getUID(message):
"""
Return the UID of a message in the mailbox
@type message: L{int}
@param message: The message sequence number
@rtype: L{int}
@return: The UID of the message.
"""
def getMessageCount():
"""
Return the number of messages in this mailbox.
@rtype: L{int}
"""
def getRecentCount():
"""
Return the number of messages with the 'Recent' flag.
@rtype: L{int}
"""
def getUnseenCount():
"""
Return the number of messages with the 'Unseen' flag.
@rtype: L{int}
"""
def isWriteable():
"""
Get the read/write status of the mailbox.
@rtype: L{int}
@return: A true value if write permission is allowed, a false value
otherwise.
"""
def destroy():
"""
Called before this mailbox is deleted, permanently.
If necessary, all resources held by this mailbox should be cleaned up
here. This function _must_ set the \\Noselect flag on this mailbox.
"""
def requestStatus(names):
"""
Return status information about this mailbox.
Mailboxes which do not intend to do any special processing to generate
the return value, C{statusRequestHelper} can be used to build the
dictionary by calling the other interface methods which return the data
for each name.
@type names: Any iterable
@param names: The status names to return information regarding. The
possible values for each name are: MESSAGES, RECENT, UIDNEXT,
UIDVALIDITY, UNSEEN.
@rtype: L{dict} or L{Deferred}
@return: A dictionary containing status information about the requested
names is returned. If the process of looking this information up
would be costly, a deferred whose callback will eventually be
passed this dictionary is returned instead.
"""
def addListener(listener):
"""
Add a mailbox change listener
@type listener: Any object which implements C{IMailboxIMAPListener}
@param listener: An object to add to the set of those which will be
notified when the contents of this mailbox change.
"""
def removeListener(listener):
"""
Remove a mailbox change listener
@type listener: Any object previously added to and not removed from
this mailbox as a listener.
@param listener: The object to remove from the set of listeners.
@raise ValueError: Raised when the given object is not a listener for
this mailbox.
"""
def addMessage(message, flags=(), date=None):
"""
Add the given message to this mailbox.
@type message: A file-like object
@param message: The RFC822 formatted message
@type flags: Any iterable of L{bytes}
@param flags: The flags to associate with this message
@type date: L{bytes}
@param date: If specified, the date to associate with this message.
@rtype: L{Deferred}
@return: A deferred whose callback is invoked with the message id if
the message is added successfully and whose errback is invoked
otherwise.
@raise ReadOnlyMailbox: Raised if this Mailbox is not open for
read-write.
"""
def expunge():
"""
Remove all messages flagged \\Deleted.
@rtype: L{list} or L{Deferred}
@return: The list of message sequence numbers which were deleted, or a
L{Deferred} whose callback will be invoked with such a list.
@raise ReadOnlyMailbox: Raised if this Mailbox is not open for
read-write.
"""
def fetch(messages, uid):
"""
Retrieve one or more messages.
@type messages: C{MessageSet}
@param messages: The identifiers of messages to retrieve information
about
@type uid: L{bool}
@param uid: If true, the IDs specified in the query are UIDs; otherwise
they are message sequence IDs.
@rtype: Any iterable of two-tuples of message sequence numbers and
implementors of C{IMessageIMAP}.
"""
def store(messages, flags, mode, uid):
"""
Set the flags of one or more messages.
@type messages: A MessageSet object with the list of messages requested
@param messages: The identifiers of the messages to set the flags of.
@type flags: sequence of L{bytes}
@param flags: The flags to set, unset, or add.
@type mode: -1, 0, or 1
@param mode: If mode is -1, these flags should be removed from the
specified messages. If mode is 1, these flags should be added to
the specified messages. If mode is 0, all existing flags should be
cleared and these flags should be added.
@type uid: L{bool}
@param uid: If true, the IDs specified in the query are UIDs; otherwise
they are message sequence IDs.
@rtype: L{dict} or L{Deferred}
@return: A L{dict} mapping message sequence numbers to sequences of
L{bytes} representing the flags set on the message after this
operation has been performed, or a L{Deferred} whose callback will
be invoked with such a L{dict}.
@raise ReadOnlyMailbox: Raised if this mailbox is not open for
read-write.
"""
class ICloseableMailboxIMAP(Interface):
"""
A supplementary interface for mailboxes which require cleanup on close.
Implementing this interface is optional. If it is implemented, the protocol
code will call the close method defined whenever a mailbox is closed.
"""
def close():
"""
Close this mailbox.
@return: A L{Deferred} which fires when this mailbox has been closed,
or None if the mailbox can be closed immediately.
"""
class IAccountIMAP(Interface):
"""
Interface for Account classes
Implementors of this interface should consider implementing
C{INamespacePresenter}.
"""
def addMailbox(name, mbox=None):
"""
Add a new mailbox to this account
@type name: L{bytes}
@param name: The name associated with this mailbox. It may not contain
multiple hierarchical parts.
@type mbox: An object implementing C{IMailboxIMAP}
@param mbox: The mailbox to associate with this name. If L{None}, a
suitable default is created and used.
@rtype: L{Deferred} or L{bool}
@return: A true value if the creation succeeds, or a deferred whose
callback will be invoked when the creation succeeds.
@raise MailboxException: Raised if this mailbox cannot be added for
some reason. This may also be raised asynchronously, if a
L{Deferred} is returned.
"""
def create(pathspec):
"""
Create a new mailbox from the given hierarchical name.
@type pathspec: L{bytes}
@param pathspec: The full hierarchical name of a new mailbox to create.
If any of the inferior hierarchical names to this one do not exist,
they are created as well.
@rtype: L{Deferred} or L{bool}
@return: A true value if the creation succeeds, or a deferred whose
callback will be invoked when the creation succeeds.
@raise MailboxException: Raised if this mailbox cannot be added. This
may also be raised asynchronously, if a L{Deferred} is returned.
"""
def select(name, rw=True):
"""
Acquire a mailbox, given its name.
@type name: L{bytes}
@param name: The mailbox to acquire
@type rw: L{bool}
@param rw: If a true value, request a read-write version of this
mailbox. If a false value, request a read-only version.
@rtype: Any object implementing C{IMailboxIMAP} or L{Deferred}
@return: The mailbox object, or a L{Deferred} whose callback will be
invoked with the mailbox object. None may be returned if the
specified mailbox may not be selected for any reason.
"""
def delete(name):
"""
Delete the mailbox with the specified name.
@type name: L{bytes}
@param name: The mailbox to delete.
@rtype: L{Deferred} or L{bool}
@return: A true value if the mailbox is successfully deleted, or a
L{Deferred} whose callback will be invoked when the deletion
completes.
@raise MailboxException: Raised if this mailbox cannot be deleted. This
may also be raised asynchronously, if a L{Deferred} is returned.
"""
def rename(oldname, newname):
"""
Rename a mailbox
@type oldname: L{bytes}
@param oldname: The current name of the mailbox to rename.
@type newname: L{bytes}
@param newname: The new name to associate with the mailbox.
@rtype: L{Deferred} or L{bool}
@return: A true value if the mailbox is successfully renamed, or a
L{Deferred} whose callback will be invoked when the rename
operation is completed.
@raise MailboxException: Raised if this mailbox cannot be renamed. This
may also be raised asynchronously, if a L{Deferred} is returned.
"""
def isSubscribed(name):
"""
Check the subscription status of a mailbox
@type name: L{bytes}
@param name: The name of the mailbox to check
@rtype: L{Deferred} or L{bool}
@return: A true value if the given mailbox is currently subscribed to,
a false value otherwise. A L{Deferred} may also be returned whose
callback will be invoked with one of these values.
"""
def subscribe(name):
"""
Subscribe to a mailbox
@type name: L{bytes}
@param name: The name of the mailbox to subscribe to
@rtype: L{Deferred} or L{bool}
@return: A true value if the mailbox is subscribed to successfully, or
a Deferred whose callback will be invoked with this value when the
subscription is successful.
@raise MailboxException: Raised if this mailbox cannot be subscribed
to. This may also be raised asynchronously, if a L{Deferred} is
returned.
"""
def unsubscribe(name):
"""
Unsubscribe from a mailbox
@type name: L{bytes}
@param name: The name of the mailbox to unsubscribe from
@rtype: L{Deferred} or L{bool}
@return: A true value if the mailbox is unsubscribed from successfully,
or a Deferred whose callback will be invoked with this value when
the unsubscription is successful.
@raise MailboxException: Raised if this mailbox cannot be unsubscribed
from. This may also be raised asynchronously, if a L{Deferred} is
returned.
"""
def listMailboxes(ref, wildcard):
"""
List all the mailboxes that meet a certain criteria
@type ref: L{bytes}
@param ref: The context in which to apply the wildcard
@type wildcard: L{bytes}
@param wildcard: An expression against which to match mailbox names.
'*' matches any number of characters in a mailbox name, and '%'
matches similarly, but will not match across hierarchical
boundaries.
@rtype: L{list} of L{tuple}
@return: A list of C{(mailboxName, mailboxObject)} which meet the given
criteria. C{mailboxObject} should implement either
C{IMailboxIMAPInfo} or C{IMailboxIMAP}. A Deferred may also be
returned.
"""
class INamespacePresenter(Interface):
def getPersonalNamespaces():
"""
Report the available personal namespaces.
Typically there should be only one personal namespace. A common name
for it is C{\"\"}, and its hierarchical delimiter is usually C{\"/\"}.
@rtype: iterable of two-tuples of strings
@return: The personal namespaces and their hierarchical delimiters. If
no namespaces of this type exist, None should be returned.
"""
def getSharedNamespaces():
"""
Report the available shared namespaces.
Shared namespaces do not belong to any individual user but are usually
to one or more of them. Examples of shared namespaces might be
C{\"#news\"} for a usenet gateway.
@rtype: iterable of two-tuples of strings
@return: The shared namespaces and their hierarchical delimiters. If no
namespaces of this type exist, None should be returned.
"""
def getUserNamespaces():
"""
Report the available user namespaces.
These are namespaces that contain folders belonging to other users
access to which this account has been granted.
@rtype: iterable of two-tuples of strings
@return: The user namespaces and their hierarchical delimiters. If no
namespaces of this type exist, None should be returned.
"""
__all__ = [
# IMAP
'IAccountIMAP', 'ICloseableMailboxIMAP', 'IMailboxIMAP',
'IMailboxIMAPInfo', 'IMailboxIMAPListener', 'IMessageIMAP',
'IMessageIMAPCopier', 'IMessageIMAPFile', 'IMessageIMAPPart',
'ISearchableIMAPMailbox', 'INamespacePresenter',
# SMTP
'IMessageDelivery', 'IMessageDeliveryFactory', 'IMessageSMTP',
# Domains and aliases
'IDomain', 'IAlias', 'IAliasableDomain',
# POP3
'IMailboxPOP3', 'IServerFactoryPOP3',
# Authentication
'IClientAuthentication',
]
|
from django import forms
from ckeditor import fields
from ckeditor_uploader import widgets
class RichTextUploadingField(fields.RichTextField):
@staticmethod
def _get_form_class():
return RichTextUploadingFormField
class RichTextUploadingFormField(forms.fields.CharField):
def __init__(self, config_name='default', extra_plugins=None, external_plugin_resources=None, *args, **kwargs):
kwargs.update({'widget': widgets.CKEditorUploadingWidget(config_name=config_name, extra_plugins=extra_plugins,
external_plugin_resources=external_plugin_resources)})
super(RichTextUploadingFormField, self).__init__(*args, **kwargs)
try:
from south.modelsinspector import add_introspection_rules
add_introspection_rules([], ["^ckeditor_uploader\.fields\.RichTextUploadingField"])
except ImportError:
pass
|
from ctypes import *
import sys
import unittest
class SizesTestCase(unittest.TestCase):
def test_8(self):
self.assertEqual(1, sizeof(c_int8))
self.assertEqual(1, sizeof(c_uint8))
def test_16(self):
self.assertEqual(2, sizeof(c_int16))
self.assertEqual(2, sizeof(c_uint16))
def test_32(self):
self.assertEqual(4, sizeof(c_int32))
self.assertEqual(4, sizeof(c_uint32))
def test_64(self):
self.assertEqual(8, sizeof(c_int64))
self.assertEqual(8, sizeof(c_uint64))
def test_size_t(self):
self.assertEqual(sizeof(c_void_p), sizeof(c_size_t))
def test_ssize_t(self):
self.assertEqual(sizeof(c_void_p), sizeof(c_ssize_t))
if __name__ == "__main__":
unittest.main()
|
import os
import sys
import re
import optparse
import textwrap
import shutil
import time
from optparse import OptionParser
def main():
parser = OptionParser()
parser.add_option("-f", "--file", action="store", dest = "range_file", help = "input range file")
parser.add_option("-o", "--output", action = "store", dest = "output", help = "output file")
parser.add_option("-i", "--index", action="store", dest = "svr_idx", help = "server index (of current server [0, count-1]")
parser.add_option("-c", "--count", action="store", dest = "svr_count", help="number of server in this cluster")
(options,args) = parser.parse_args(sys.argv)
if not options.range_file or not options.svr_idx or not options.svr_count or not options.output:
parser.print_help()
sys.exit(1)
svr_count = int(options.svr_count)
svr_idx = int(options.svr_idx)
tablet_count = 0
range_input = open(options.range_file, "r")
for line in range_input:
tablet_count += 1
tablet_count_pern = tablet_count/svr_count
tablet_beg = tablet_count_pern * svr_idx
tablet_end = tablet_beg + tablet_count_pern
if svr_idx == svr_count - 1:
tablet_end = tablet_count
range_input.seek(0)
idx = 0
output = open(options.output, "w")
for line in range_input:
if idx % svr_count == svr_idx:
output.write(line)
idx += 1
if __name__ == "__main__":
main()
|
import hashlib
from gettext import gettext as _
from gi.repository import Gtk
from gi.repository import Gdk
import dbus
from sugar3.graphics import style
from jarabe.model import network
IW_AUTH_ALG_OPEN_SYSTEM = 'open'
IW_AUTH_ALG_SHARED_KEY = 'shared'
WEP_PASSPHRASE = 1
WEP_HEX = 2
WEP_ASCII = 3
def string_is_hex(key):
is_hex = True
for c in key:
if not 'a' <= c.lower() <= 'f' and not '0' <= c <= '9':
is_hex = False
return is_hex
def string_is_ascii(string):
try:
string.encode('ascii')
return True
except UnicodeEncodeError:
return False
def string_to_hex(passphrase):
key = ''
for c in passphrase:
key += '%02x' % ord(c)
return key
def hash_passphrase(passphrase):
# passphrase must have a length of 64
if len(passphrase) > 64:
passphrase = passphrase[:64]
elif len(passphrase) < 64:
while len(passphrase) < 64:
passphrase += passphrase[:64 - len(passphrase)]
passphrase = hashlib.md5(passphrase).digest()
return string_to_hex(passphrase)[:26]
class CanceledKeyRequestError(dbus.DBusException):
def __init__(self):
dbus.DBusException.__init__(self)
self._dbus_error_name = network.NM_SETTINGS_IFACE + '.CanceledError'
class KeyDialog(Gtk.Dialog):
def __init__(self, ssid, flags, wpa_flags, rsn_flags, dev_caps, response):
Gtk.Dialog.__init__(self, flags=Gtk.DialogFlags.MODAL)
self.set_title('Wireless Key Required')
self._response = response
self._entry = None
self._ssid = ssid
self._flags = flags
self._wpa_flags = wpa_flags
self._rsn_flags = rsn_flags
self._dev_caps = dev_caps
display_name = network.ssid_to_display_name(ssid)
label = Gtk.Label(label=_("A wireless encryption key is required for\n"
" the wireless network '%s'.")
% (display_name, ))
self.vbox.pack_start(label, True, True, 0)
self.add_buttons(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,
Gtk.STOCK_OK, Gtk.ResponseType.OK)
self.set_default_response(Gtk.ResponseType.OK)
def add_key_entry(self):
self._entry = Gtk.Entry()
self._entry.connect('changed', self._update_response_sensitivity)
self._entry.connect('activate', self._entry_activate_cb)
self.vbox.pack_start(self._entry, True, True, 0)
self.vbox.set_spacing(6)
self.vbox.show_all()
self._update_response_sensitivity()
self._entry.grab_focus()
def _entry_activate_cb(self, entry):
self.response(Gtk.ResponseType.OK)
def create_security(self):
raise NotImplementedError
def get_response_object(self):
return self._response
class WEPKeyDialog(KeyDialog):
def __init__(self, ssid, flags, wpa_flags, rsn_flags, dev_caps, response):
KeyDialog.__init__(self, ssid, flags, wpa_flags, rsn_flags,
dev_caps, response)
# WEP key type
self.key_store = Gtk.ListStore(str, int)
self.key_store.append(['Passphrase (128-bit)', WEP_PASSPHRASE])
self.key_store.append(['Hex (40/128-bit)', WEP_HEX])
self.key_store.append(['ASCII (40/128-bit)', WEP_ASCII])
self.key_combo = Gtk.ComboBox(model=self.key_store)
cell = Gtk.CellRendererText()
self.key_combo.pack_start(cell, True)
self.key_combo.add_attribute(cell, 'text', 0)
self.key_combo.set_active(0)
self.key_combo.connect('changed', self._key_combo_changed_cb)
hbox = Gtk.HBox()
hbox.pack_start(Gtk.Label(_('Key Type:')), True, True, 0)
hbox.pack_start(self.key_combo, True, True, 0)
hbox.show_all()
self.vbox.pack_start(hbox, True, True, 0)
# Key entry field
self.add_key_entry()
# WEP authentication mode
self.auth_store = Gtk.ListStore(str, str)
self.auth_store.append(['Open System', IW_AUTH_ALG_OPEN_SYSTEM])
self.auth_store.append(['Shared Key', IW_AUTH_ALG_SHARED_KEY])
self.auth_combo = Gtk.ComboBox(model=self.auth_store)
cell = Gtk.CellRendererText()
self.auth_combo.pack_start(cell, True)
self.auth_combo.add_attribute(cell, 'text', 0)
self.auth_combo.set_active(0)
hbox = Gtk.HBox()
hbox.pack_start(Gtk.Label(_('Authentication Type:')), True, True, 0)
hbox.pack_start(self.auth_combo, True, True, 0)
hbox.show_all()
self.vbox.pack_start(hbox, True, True, 0)
def _key_combo_changed_cb(self, widget):
self._update_response_sensitivity()
def _get_security(self):
key = self._entry.get_text()
it = self.key_combo.get_active_iter()
(key_type, ) = self.key_store.get(it, 1)
if key_type == WEP_PASSPHRASE:
key = hash_passphrase(key)
elif key_type == WEP_ASCII:
key = string_to_hex(key)
it = self.auth_combo.get_active_iter()
(auth_alg, ) = self.auth_store.get(it, 1)
return (key, auth_alg)
def print_security(self):
(key, auth_alg) = self._get_security()
print 'Key: %s' % key
print 'Auth: %d' % auth_alg
def create_security(self):
(key, auth_alg) = self._get_security()
wsec = {'wep-key0': key, 'auth-alg': auth_alg}
return {'802-11-wireless-security': wsec}
def _update_response_sensitivity(self, ignored=None):
key = self._entry.get_text()
it = self.key_combo.get_active_iter()
(key_type, ) = self.key_store.get(it, 1)
valid = False
if key_type == WEP_PASSPHRASE:
# As the md5 passphrase can be of any length and has no indicator,
# we cannot check for the validity of the input.
if len(key) > 0:
valid = True
elif key_type == WEP_ASCII:
if len(key) == 5 or len(key) == 13:
valid = string_is_ascii(key)
elif key_type == WEP_HEX:
if len(key) == 10 or len(key) == 26:
valid = string_is_hex(key)
self.set_response_sensitive(Gtk.ResponseType.OK, valid)
class WPAKeyDialog(KeyDialog):
def __init__(self, ssid, flags, wpa_flags, rsn_flags, dev_caps, response):
KeyDialog.__init__(self, ssid, flags, wpa_flags, rsn_flags,
dev_caps, response)
self.add_key_entry()
self.store = Gtk.ListStore(str)
self.store.append([_('WPA & WPA2 Personal')])
self.combo = Gtk.ComboBox(model=self.store)
cell = Gtk.CellRendererText()
self.combo.pack_start(cell, True)
self.combo.add_attribute(cell, 'text', 0)
self.combo.set_active(0)
self.hbox = Gtk.HBox()
self.hbox.pack_start(Gtk.Label(_('Wireless Security:')), True, True, 0)
self.hbox.pack_start(self.combo, True, True, 0)
self.hbox.show_all()
self.vbox.pack_start(self.hbox, True, True, 0)
def _get_security(self):
return self._entry.get_text()
def print_security(self):
key = self._get_security()
print 'Key: %s' % key
def create_security(self):
wsec = {'psk': self._get_security()}
return {'802-11-wireless-security': wsec}
def _update_response_sensitivity(self, ignored=None):
key = self._entry.get_text()
is_hex = string_is_hex(key)
valid = False
if len(key) == 64 and is_hex:
# hex key
valid = True
elif len(key) >= 8 and len(key) <= 63:
# passphrase
valid = True
self.set_response_sensitive(Gtk.ResponseType.OK, valid)
return False
def create(ssid, flags, wpa_flags, rsn_flags, dev_caps, response):
if wpa_flags == network.NM_802_11_AP_SEC_NONE and \
rsn_flags == network.NM_802_11_AP_SEC_NONE:
key_dialog = WEPKeyDialog(ssid, flags, wpa_flags, rsn_flags,
dev_caps, response)
else:
key_dialog = WPAKeyDialog(ssid, flags, wpa_flags, rsn_flags,
dev_caps, response)
key_dialog.connect('response', _key_dialog_response_cb)
key_dialog.show_all()
width, height = key_dialog.get_size()
key_dialog.move(Gdk.Screen.width() / 2 - width / 2,
style.GRID_CELL_SIZE * 2)
def _key_dialog_response_cb(key_dialog, response_id):
response = key_dialog.get_response_object()
secrets = None
if response_id == Gtk.ResponseType.OK:
secrets = key_dialog.create_security()
if response_id in [Gtk.ResponseType.CANCEL, Gtk.ResponseType.NONE,
Gtk.ResponseType.DELETE_EVENT]:
# key dialog dialog was canceled; send the error back to NM
response.set_error(CanceledKeyRequestError())
elif response_id == Gtk.ResponseType.OK:
if not secrets:
raise RuntimeError('Invalid security arguments.')
response.set_secrets(secrets)
else:
raise RuntimeError('Unhandled key dialog response %d' % response_id)
key_dialog.destroy()
|
from __future__ import unicode_literals
import frappe
from frappe.utils import cstr, cint, getdate
from frappe import msgprint, _
from calendar import monthrange
def execute(filters=None):
if not filters: filters = {}
conditions, filters = get_conditions(filters)
columns = get_columns(filters)
att_map = get_attendance_list(conditions, filters)
emp_map = get_employee_details()
data = []
for emp in sorted(att_map):
emp_det = emp_map.get(emp)
if not emp_det:
continue
row = [emp, emp_det.employee_name, emp_det.branch, emp_det.department, emp_det.designation,
emp_det.company]
total_p = total_a = 0.0
for day in range(filters["total_days_in_month"]):
status = att_map.get(emp).get(day + 1, "None")
status_map = {"Present": "P", "Absent": "A", "Half Day": "H", "None": ""}
row.append(status_map[status])
if status == "Present":
total_p += 1
elif status == "Absent":
total_a += 1
elif status == "Half Day":
total_p += 0.5
total_a += 0.5
row += [total_p, total_a]
data.append(row)
return columns, data
def get_columns(filters):
columns = [
_("Employee") + ":Link/Employee:120", _("Employee Name") + "::140", _("Branch")+ ":Link/Branch:120",
_("Department") + ":Link/Department:120", _("Designation") + ":Link/Designation:120",
_("Company") + ":Link/Company:120"
]
for day in range(filters["total_days_in_month"]):
columns.append(cstr(day+1) +"::20")
columns += [_("Total Present") + ":Float:80", _("Total Absent") + ":Float:80"]
return columns
def get_attendance_list(conditions, filters):
attendance_list = frappe.db.sql("""select employee, day(att_date) as day_of_month,
status from tabAttendance where docstatus = 1 %s order by employee, att_date""" %
conditions, filters, as_dict=1)
att_map = {}
for d in attendance_list:
att_map.setdefault(d.employee, frappe._dict()).setdefault(d.day_of_month, "")
att_map[d.employee][d.day_of_month] = d.status
return att_map
def get_conditions(filters):
if not (filters.get("month") and filters.get("year")):
msgprint(_("Please select month and year"), raise_exception=1)
filters["month"] = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov",
"Dec"].index(filters.month) + 1
filters["total_days_in_month"] = monthrange(cint(filters.year), filters.month)[1]
conditions = " and month(att_date) = %(month)s and year(att_date) = %(year)s"
if filters.get("company"): conditions += " and company = %(company)s"
if filters.get("employee"): conditions += " and employee = %(employee)s"
return conditions, filters
def get_employee_details():
emp_map = frappe._dict()
for d in frappe.db.sql("""select name, employee_name, designation,
department, branch, company
from tabEmployee""", as_dict=1):
emp_map.setdefault(d.name, d)
return emp_map
@frappe.whitelist()
def get_attendance_years():
year_list = frappe.db.sql_list("""select distinct YEAR(att_date) from tabAttendance ORDER BY YEAR(att_date) DESC""")
if not year_list:
year_list = [getdate().year]
return "\n".join(str(year) for year in year_list)
|
from .common import KARMA, TestForumCommon
from ..models.forum import KarmaError
from odoo.exceptions import UserError, AccessError
from odoo.tools import mute_logger
class TestForum(TestForumCommon):
@mute_logger('odoo.addons.base.models.ir_model', 'odoo.models')
def test_ask(self):
Post = self.env['forum.post']
# Public user asks a question: not allowed
with self.assertRaises(AccessError):
Post.sudo(self.user_public).create({
'name': " Question ?",
'forum_id': self.forum.id,
})
# Portal user asks a question with tags: not allowed, unsufficient karma
with self.assertRaises(KarmaError):
Post.sudo(self.user_portal).create({
'name': " Q_0",
'forum_id': self.forum.id,
'tag_ids': [(0, 0, {'name': 'Tag0', 'forum_id': self.forum.id})]
})
# Portal user asks a question with tags: ok if enough karma
self.user_portal.karma = KARMA['tag_create']
Post.sudo(self.user_portal).create({
'name': " Q0",
'forum_id': self.forum.id,
'tag_ids': [(0, 0, {'name': 'Tag1', 'forum_id': self.forum.id})]
})
self.assertEqual(self.user_portal.karma, KARMA['tag_create'], 'website_forum: wrong karma generation when asking question')
self.user_portal.karma = KARMA['post']
Post.sudo(self.user_portal).create({
'name': " Q0",
'forum_id': self.forum.id,
'tag_ids': [(0, 0, {'name': 'Tag42', 'forum_id': self.forum.id})]
})
self.assertEqual(self.user_portal.karma, KARMA['post'] + KARMA['gen_que_new'], 'website_forum: wrong karma generation when asking question')
@mute_logger('odoo.addons.base.models.ir_model', 'odoo.models')
def test_answer(self):
Post = self.env['forum.post']
# Answers its own question: not allowed, unsufficient karma
with self.assertRaises(KarmaError):
Post.sudo(self.user_employee).create({
'name': " A0",
'forum_id': self.forum.id,
'parent_id': self.post.id,
})
# Answers on question: ok if enough karma
self.user_employee.karma = KARMA['ans']
Post.sudo(self.user_employee).create({
'name': " A0",
'forum_id': self.forum.id,
'parent_id': self.post.id,
})
self.assertEqual(self.user_employee.karma, KARMA['ans'], 'website_forum: wrong karma generation when answering question')
@mute_logger('odoo.addons.base.models.ir_model', 'odoo.models')
def test_vote_crash(self):
Post = self.env['forum.post']
self.user_employee.karma = KARMA['ans']
emp_answer = Post.sudo(self.user_employee).create({
'name': 'TestAnswer',
'forum_id': self.forum.id,
'parent_id': self.post.id})
# upvote its own post
with self.assertRaises(UserError):
emp_answer.vote(upvote=True)
# not enough karma
with self.assertRaises(KarmaError):
self.post.sudo(self.user_portal).vote(upvote=True)
def test_vote(self):
self.post.create_uid.karma = KARMA['ask']
self.user_portal.karma = KARMA['upv']
self.post.sudo(self.user_portal).vote(upvote=True)
self.assertEqual(self.post.create_uid.karma, KARMA['ask'] + KARMA['gen_que_upv'], 'website_forum: wrong karma generation of upvoted question author')
@mute_logger('odoo.addons.base.models.ir_model', 'odoo.models')
def test_downvote_crash(self):
Post = self.env['forum.post']
self.user_employee.karma = KARMA['ans']
emp_answer = Post.sudo(self.user_employee).create({
'name': 'TestAnswer',
'forum_id': self.forum.id,
'parent_id': self.post.id})
# downvote its own post
with self.assertRaises(UserError):
emp_answer.vote(upvote=False)
# not enough karma
with self.assertRaises(KarmaError):
self.post.sudo(self.user_portal).vote(upvote=False)
def test_downvote(self):
self.post.create_uid.karma = 50
self.user_portal.karma = KARMA['dwv']
self.post.sudo(self.user_portal).vote(upvote=False)
self.assertEqual(self.post.create_uid.karma, 50 + KARMA['gen_que_dwv'], 'website_forum: wrong karma generation of downvoted question author')
def test_comment_crash(self):
with self.assertRaises(KarmaError):
self.post.sudo(self.user_portal).message_post(body='Should crash', message_type='comment')
def test_comment(self):
self.post.sudo(self.user_employee).message_post(body='Test0', message_type='notification')
self.user_employee.karma = KARMA['com_all']
self.post.sudo(self.user_employee).message_post(body='Test1', message_type='comment')
self.assertEqual(len(self.post.message_ids), 4, 'website_forum: wrong behavior of message_post')
def test_flag_a_post(self):
Post = self.env['forum.post']
self.user_portal.karma = KARMA['ask']
post = Post.sudo(self.user_portal).create({
'name': "Q0",
'forum_id': self.forum.id,
})
# portal user flags a post: not allowed, unsufficient karma
with self.assertRaises(KarmaError):
post.sudo(self.user_portal).flag()
# portal user flags a post: ok if enough karma
self.user_portal.karma = KARMA['flag']
post.state = 'active'
post.sudo(self.user_portal).flag()
self.assertEqual(post.state, 'flagged', 'website_forum: wrong state when flagging a post')
def test_validate_a_post(self):
Post = self.env['forum.post']
self.user_portal.karma = KARMA['ask']
post = Post.sudo(self.user_portal).create({
'name': "Q0",
'forum_id': self.forum.id,
})
# portal user validate a post: not allowed, unsufficient karma
with self.assertRaises(KarmaError):
post.sudo(self.user_portal).validate()
# portal user validate a pending post
self.user_portal.karma = KARMA['moderate']
post.state = 'pending'
init_karma = post.create_uid.karma
post.sudo(self.user_portal).validate()
self.assertEqual(post.state, 'active', 'website_forum: wrong state when validate a post after pending')
self.assertEqual(post.create_uid.karma, init_karma + KARMA['gen_que_new'], 'website_forum: wrong karma when validate a post after pending')
# portal user validate a flagged post: ok if enough karma
self.user_portal.karma = KARMA['moderate']
post.state = 'flagged'
post.sudo(self.user_portal).validate()
self.assertEqual(post.state, 'active', 'website_forum: wrong state when validate a post after flagged')
# portal user validate an offensive post: ok if enough karma
self.user_portal.karma = KARMA['moderate']
post.state = 'offensive'
init_karma = post.create_uid.karma
post.sudo(self.user_portal).validate()
self.assertEqual(post.state, 'active', 'website_forum: wrong state when validate a post after offensive')
def test_refuse_a_post(self):
Post = self.env['forum.post']
self.user_portal.karma = KARMA['ask']
post = Post.sudo(self.user_portal).create({
'name': "Q0",
'forum_id': self.forum.id,
})
# portal user validate a post: not allowed, unsufficient karma
with self.assertRaises(KarmaError):
post.sudo(self.user_portal).refuse()
# portal user validate a pending post
self.user_portal.karma = KARMA['moderate']
post.state = 'pending'
init_karma = post.create_uid.karma
post.sudo(self.user_portal).refuse()
self.assertEqual(post.moderator_id, self.user_portal, 'website_forum: wrong moderator_id when refusing')
self.assertEqual(post.create_uid.karma, init_karma, 'website_forum: wrong karma when refusing a post')
def test_mark_a_post_as_offensive(self):
Post = self.env['forum.post']
self.user_portal.karma = KARMA['ask']
post = Post.sudo(self.user_portal).create({
'name': "Q0",
'forum_id': self.forum.id,
})
# portal user mark a post as offensive: not allowed, unsufficient karma
with self.assertRaises(KarmaError):
post.sudo(self.user_portal).mark_as_offensive(12)
# portal user mark a post as offensive
self.user_portal.karma = KARMA['moderate']
post.state = 'flagged'
init_karma = post.create_uid.karma
post.sudo(self.user_portal).mark_as_offensive(12)
self.assertEqual(post.state, 'offensive', 'website_forum: wrong state when marking a post as offensive')
self.assertEqual(post.create_uid.karma, init_karma + KARMA['gen_ans_flag'], 'website_forum: wrong karma when marking a post as offensive')
def test_convert_answer_to_comment_crash(self):
Post = self.env['forum.post']
# converting a question does nothing
new_msg = self.post.sudo(self.user_portal).convert_answer_to_comment()
self.assertEqual(new_msg.id, False, 'website_forum: question to comment conversion failed')
self.assertEqual(Post.search([('name', '=', 'TestQuestion')])[0].forum_id.name, 'TestForum', 'website_forum: question to comment conversion failed')
with self.assertRaises(KarmaError):
self.answer.sudo(self.user_portal).convert_answer_to_comment()
def test_convert_answer_to_comment(self):
self.user_portal.karma = KARMA['com_conv_all']
post_author = self.answer.create_uid.partner_id
new_msg = self.answer.sudo(self.user_portal).convert_answer_to_comment()
self.assertEqual(len(new_msg), 1, 'website_forum: wrong answer to comment conversion')
self.assertEqual(new_msg.author_id, post_author, 'website_forum: wrong answer to comment conversion')
self.assertIn('I am an anteater', new_msg.body, 'website_forum: wrong answer to comment conversion')
def test_edit_post_crash(self):
with self.assertRaises(KarmaError):
self.post.sudo(self.user_portal).write({'name': 'I am not your father.'})
def test_edit_post(self):
self.post.create_uid.karma = KARMA['edit_own']
self.post.write({'name': 'Actually I am your dog.'})
self.user_portal.karma = KARMA['edit_all']
self.post.sudo(self.user_portal).write({'name': 'Actually I am your cat.'})
def test_close_post_crash(self):
with self.assertRaises(KarmaError):
self.post.sudo(self.user_portal).close(None)
def test_close_post_own(self):
self.post.create_uid.karma = KARMA['close_own']
self.post.close(None)
def test_close_post_all(self):
self.user_portal.karma = KARMA['close_all']
self.post.sudo(self.user_portal).close(None)
def test_deactivate_post_crash(self):
with self.assertRaises(KarmaError):
self.post.sudo(self.user_portal).write({'active': False})
def test_deactivate_post_own(self):
self.post.create_uid.karma = KARMA['unlink_own']
self.post.write({'active': False})
def test_deactivate_post_all(self):
self.user_portal.karma = KARMA['unlink_all']
self.post.sudo(self.user_portal).write({'active': False})
def test_unlink_post_crash(self):
with self.assertRaises(KarmaError):
self.post.sudo(self.user_portal).unlink()
def test_unlink_post_own(self):
self.post.create_uid.karma = KARMA['unlink_own']
self.post.unlink()
def test_unlink_post_all(self):
self.user_portal.karma = KARMA['unlink_all']
self.post.sudo(self.user_portal).unlink()
|
from __future__ import unicode_literals
from .common import InfoExtractor
from ..compat import (
compat_b64decode,
compat_urllib_parse_unquote,
)
from ..utils import int_or_none
class MangomoloBaseIE(InfoExtractor):
_BASE_REGEX = r'https?://(?:admin\.mangomolo\.com/analytics/index\.php/customers/embed/|player\.mangomolo\.com/v1/)'
def _get_real_id(self, page_id):
return page_id
def _real_extract(self, url):
page_id = self._get_real_id(self._match_id(url))
webpage = self._download_webpage(
'https://player.mangomolo.com/v1/%s?%s' % (self._TYPE, url.split('?')[1]), page_id)
hidden_inputs = self._hidden_inputs(webpage)
m3u8_entry_protocol = 'm3u8' if self._IS_LIVE else 'm3u8_native'
format_url = self._html_search_regex(
[
r'(?:file|src)\s*:\s*"(https?://[^"]+?/playlist\.m3u8)',
r'<a[^>]+href="(rtsp://[^"]+)"'
], webpage, 'format url')
formats = self._extract_wowza_formats(
format_url, page_id, m3u8_entry_protocol, ['smil'])
self._sort_formats(formats)
return {
'id': page_id,
'title': self._live_title(page_id) if self._IS_LIVE else page_id,
'uploader_id': hidden_inputs.get('userid'),
'duration': int_or_none(hidden_inputs.get('duration')),
'is_live': self._IS_LIVE,
'formats': formats,
}
class MangomoloVideoIE(MangomoloBaseIE):
_TYPE = 'video'
IE_NAME = 'mangomolo:' + _TYPE
_VALID_URL = MangomoloBaseIE._BASE_REGEX + r'video\?.*?\bid=(?P<id>\d+)'
_IS_LIVE = False
class MangomoloLiveIE(MangomoloBaseIE):
_TYPE = 'live'
IE_NAME = 'mangomolo:' + _TYPE
_VALID_URL = MangomoloBaseIE._BASE_REGEX + r'(live|index)\?.*?\bchannelid=(?P<id>(?:[A-Za-z0-9+/=]|%2B|%2F|%3D)+)'
_IS_LIVE = True
def _get_real_id(self, page_id):
return compat_b64decode(compat_urllib_parse_unquote(page_id)).decode()
|
'''
---
module: openshift_logging_facts
version_added: ""
short_description: Gather facts about the OpenShift logging stack
description:
- Determine the current facts about the OpenShift logging stack (e.g. cluster size)
options:
author: Red Hat, Inc
'''
import copy
import json
from subprocess import * # noqa: F402,F403
from ansible.module_utils.basic import * # noqa: F402,F403
import yaml
EXAMPLES = """
- action: opneshift_logging_facts
"""
RETURN = """
"""
DEFAULT_OC_OPTIONS = ["-o", "json"]
COMPONENT_KEY = "component"
LOGGING_INFRA_KEY = "logging-infra"
DS_FLUENTD_SELECTOR = LOGGING_INFRA_KEY + "=" + "fluentd"
LOGGING_SELECTOR = LOGGING_INFRA_KEY + "=" + "support"
ROUTE_SELECTOR = "component=support,logging-infra=support,provider=openshift"
COMPONENTS = ["kibana", "curator", "elasticsearch", "fluentd", "kibana_ops", "curator_ops", "elasticsearch_ops", "mux", "eventrouter"]
class OCBaseCommand(object):
''' The base class used to query openshift '''
def __init__(self, binary, kubeconfig, namespace):
''' the init method of OCBaseCommand class '''
self.binary = binary
self.kubeconfig = kubeconfig
self.user = self.get_system_admin(self.kubeconfig)
self.namespace = namespace
# pylint: disable=no-self-use
def get_system_admin(self, kubeconfig):
''' Retrieves the system admin '''
with open(kubeconfig, 'r') as kubeconfig_file:
config = yaml.load(kubeconfig_file)
for user in config["users"]:
if user["name"].startswith("system:admin"):
return user["name"]
raise Exception("Unable to find system:admin in: " + kubeconfig)
# pylint: disable=too-many-arguments, dangerous-default-value
def oc_command(self, sub, kind, namespace=None, name=None, add_options=None):
''' Wrapper method for the "oc" command '''
cmd = [self.binary, sub, kind]
if name is not None:
cmd = cmd + [name]
if namespace is not None:
cmd = cmd + ["-n", namespace]
if add_options is None:
add_options = []
cmd = cmd + ["--user=" + self.user, "--config=" + self.kubeconfig] + DEFAULT_OC_OPTIONS + add_options
try:
process = Popen(cmd, stdout=PIPE, stderr=PIPE) # noqa: F405
out, err = process.communicate(cmd)
err = err.decode(encoding='utf8', errors='replace')
if len(err) > 0:
if 'not found' in err:
return {'items': []}
if 'No resources found' in err:
return {'items': []}
raise Exception(err)
except Exception as excp:
err = "There was an exception trying to run the command '" + " ".join(cmd) + "' " + str(excp)
raise Exception(err)
return json.loads(out)
class OpenshiftLoggingFacts(OCBaseCommand):
''' The class structure for holding the OpenshiftLogging Facts'''
name = "facts"
def __init__(self, logger, binary, kubeconfig, namespace):
''' The init method for OpenshiftLoggingFacts '''
super(OpenshiftLoggingFacts, self).__init__(binary, kubeconfig, namespace)
self.logger = logger
self.facts = dict()
def default_keys_for(self, kind):
''' Sets the default key values for kind '''
for comp in COMPONENTS:
self.add_facts_for(comp, kind)
def add_facts_for(self, comp, kind, name=None, facts=None):
''' Add facts for the provided kind '''
if comp not in self.facts:
self.facts[comp] = dict()
if kind not in self.facts[comp]:
self.facts[comp][kind] = dict()
if name:
self.facts[comp][kind][name] = facts
def facts_for_routes(self, namespace):
''' Gathers facts for Routes in logging namespace '''
self.default_keys_for("routes")
route_list = self.oc_command("get", "routes", namespace=namespace, add_options=["-l", ROUTE_SELECTOR])
if len(route_list["items"]) == 0:
return None
for route in route_list["items"]:
name = route["metadata"]["name"]
comp = self.comp(name)
if comp is not None:
self.add_facts_for(comp, "routes", name, dict(host=route["spec"]["host"]))
self.facts["agl_namespace"] = namespace
def facts_for_daemonsets(self, namespace):
''' Gathers facts for Daemonsets in logging namespace '''
self.default_keys_for("daemonsets")
ds_list = self.oc_command("get", "daemonsets", namespace=namespace,
add_options=["-l", LOGGING_INFRA_KEY + "=fluentd"])
if len(ds_list["items"]) == 0:
return
for ds_item in ds_list["items"]:
name = ds_item["metadata"]["name"]
comp = self.comp(name)
spec = ds_item["spec"]["template"]["spec"]
result = dict(
selector=ds_item["spec"]["selector"],
containers=dict(),
nodeSelector=spec["nodeSelector"],
serviceAccount=spec["serviceAccount"],
terminationGracePeriodSeconds=spec["terminationGracePeriodSeconds"]
)
for container in spec["containers"]:
result["containers"][container["name"]] = container
self.add_facts_for(comp, "daemonsets", name, result)
def facts_for_pvcs(self, namespace):
''' Gathers facts for PVCS in logging namespace'''
self.default_keys_for("pvcs")
pvclist = self.oc_command("get", "pvc", namespace=namespace, add_options=["-l", LOGGING_INFRA_KEY])
if len(pvclist["items"]) == 0:
return
for pvc in pvclist["items"]:
name = pvc["metadata"]["name"]
comp = self.comp(name)
self.add_facts_for(comp, "pvcs", name, dict())
def facts_for_deploymentconfigs(self, namespace):
''' Gathers facts for DeploymentConfigs in logging namespace '''
self.default_keys_for("deploymentconfigs")
dclist = self.oc_command("get", "deploymentconfigs", namespace=namespace, add_options=["-l", LOGGING_INFRA_KEY])
if len(dclist["items"]) == 0:
return
dcs = dclist["items"]
for dc_item in dcs:
name = dc_item["metadata"]["name"]
comp = self.comp(name)
if comp is not None:
spec = dc_item["spec"]["template"]["spec"]
facts = dict(
name=name,
selector=dc_item["spec"]["selector"],
replicas=dc_item["spec"]["replicas"],
serviceAccount=spec["serviceAccount"],
containers=dict(),
volumes=dict()
)
if "nodeSelector" in spec:
facts["nodeSelector"] = spec["nodeSelector"]
if "supplementalGroups" in spec["securityContext"]:
facts["storageGroups"] = spec["securityContext"]["supplementalGroups"]
facts["spec"] = spec
if "volumes" in spec:
for vol in spec["volumes"]:
clone = copy.deepcopy(vol)
clone.pop("name", None)
facts["volumes"][vol["name"]] = clone
for container in spec["containers"]:
facts["containers"][container["name"]] = container
self.add_facts_for(comp, "deploymentconfigs", name, facts)
def facts_for_services(self, namespace):
''' Gathers facts for services in logging namespace '''
self.default_keys_for("services")
servicelist = self.oc_command("get", "services", namespace=namespace, add_options=["-l", LOGGING_SELECTOR])
if len(servicelist["items"]) == 0:
return
for service in servicelist["items"]:
name = service["metadata"]["name"]
comp = self.comp(name)
if comp is not None:
self.add_facts_for(comp, "services", name, dict())
# pylint: disable=too-many-arguments
def facts_from_configmap(self, comp, kind, name, config_key, yaml_file=None):
'''Extracts facts in logging namespace from configmap'''
if yaml_file is not None:
if config_key.endswith(".yml") or config_key.endswith(".yaml"):
config_facts = yaml.load(yaml_file)
self.facts[comp][kind][name][config_key] = config_facts
self.facts[comp][kind][name][config_key]["raw"] = yaml_file
def facts_for_configmaps(self, namespace):
''' Gathers facts for configmaps in logging namespace '''
self.default_keys_for("configmaps")
a_list = self.oc_command("get", "configmaps", namespace=namespace)
if len(a_list["items"]) == 0:
return
for item in a_list["items"]:
name = item["metadata"]["name"]
comp = self.comp(name)
if comp is not None:
self.add_facts_for(comp, "configmaps", name, dict(item["data"]))
if comp in ["elasticsearch", "elasticsearch_ops"]:
for config_key in item["data"]:
self.facts_from_configmap(comp, "configmaps", name, config_key, item["data"][config_key])
def facts_for_oauthclients(self, namespace):
''' Gathers facts for oauthclients used with logging '''
self.default_keys_for("oauthclients")
a_list = self.oc_command("get", "oauthclients", namespace=namespace, add_options=["-l", LOGGING_SELECTOR])
if len(a_list["items"]) == 0:
return
for item in a_list["items"]:
name = item["metadata"]["name"]
comp = self.comp(name)
if comp is not None:
result = dict(
redirectURIs=item["redirectURIs"]
)
self.add_facts_for(comp, "oauthclients", name, result)
def facts_for_secrets(self, namespace):
''' Gathers facts for secrets in the logging namespace '''
self.default_keys_for("secrets")
a_list = self.oc_command("get", "secrets", namespace=namespace)
if len(a_list["items"]) == 0:
return
for item in a_list["items"]:
name = item["metadata"]["name"]
comp = self.comp(name)
if comp is not None and item["type"] == "Opaque":
result = dict(
keys=item["data"].keys()
)
self.add_facts_for(comp, "secrets", name, result)
def facts_for_sccs(self):
''' Gathers facts for SCCs used with logging '''
self.default_keys_for("sccs")
scc = self.oc_command("get", "securitycontextconstraints.v1.security.openshift.io", name="privileged")
if len(scc["users"]) == 0:
return
for item in scc["users"]:
comp = self.comp(item)
if comp is not None:
self.add_facts_for(comp, "sccs", "privileged", dict())
def facts_for_clusterrolebindings(self, namespace):
''' Gathers ClusterRoleBindings used with logging '''
self.default_keys_for("clusterrolebindings")
role = self.oc_command("get", "clusterrolebindings", name="cluster-readers")
if "subjects" not in role or len(role["subjects"]) == 0:
return
for item in role["subjects"]:
comp = self.comp(item["name"])
if comp is not None and namespace == item.get("namespace"):
self.add_facts_for(comp, "clusterrolebindings", "cluster-readers", dict())
def facts_for_rolebindings(self, namespace):
''' Gathers facts for RoleBindings used with logging '''
self.default_keys_for("rolebindings")
role = self.oc_command("get", "rolebindings", namespace=namespace, name="logging-elasticsearch-view-role")
if "subjects" not in role or len(role["subjects"]) == 0:
return
for item in role["subjects"]:
comp = self.comp(item["name"])
if comp is not None and namespace == item.get("namespace"):
self.add_facts_for(comp, "rolebindings", "logging-elasticsearch-view-role", dict())
# pylint: disable=no-self-use, too-many-return-statements
def comp(self, name):
''' Does a comparison to evaluate the logging component '''
if name.startswith("logging-curator-ops"):
return "curator_ops"
elif name.startswith("logging-kibana-ops") or name.startswith("kibana-ops"):
return "kibana_ops"
elif name.startswith("logging-es-ops") or name.startswith("logging-elasticsearch-ops"):
return "elasticsearch_ops"
elif name.startswith("logging-curator"):
return "curator"
elif name.startswith("logging-kibana") or name.startswith("kibana"):
return "kibana"
elif name.startswith("logging-es") or name.startswith("logging-elasticsearch"):
return "elasticsearch"
elif name.startswith("logging-fluentd") or name.endswith("aggregated-logging-fluentd"):
return "fluentd"
elif name.startswith("logging-mux"):
return "mux"
elif name.startswith("logging-eventrouter"):
return "eventrouter"
else:
return None
def build_facts(self):
''' Builds the logging facts and returns them '''
self.facts_for_routes(self.namespace)
self.facts_for_daemonsets(self.namespace)
self.facts_for_deploymentconfigs(self.namespace)
self.facts_for_services(self.namespace)
self.facts_for_configmaps(self.namespace)
self.facts_for_sccs()
self.facts_for_oauthclients(self.namespace)
self.facts_for_clusterrolebindings(self.namespace)
self.facts_for_rolebindings(self.namespace)
self.facts_for_secrets(self.namespace)
self.facts_for_pvcs(self.namespace)
return self.facts
def main():
''' The main method '''
module = AnsibleModule( # noqa: F405
argument_spec=dict(
admin_kubeconfig={"default": "/etc/origin/master/admin.kubeconfig", "type": "str"},
oc_bin={"required": True, "type": "str"},
openshift_logging_namespace={"required": True, "type": "str"}
),
supports_check_mode=False
)
try:
cmd = OpenshiftLoggingFacts(module, module.params['oc_bin'], module.params['admin_kubeconfig'],
module.params['openshift_logging_namespace'])
module.exit_json(
ansible_facts={"openshift_logging_facts": cmd.build_facts()}
)
# ignore broad-except error to avoid stack trace to ansible user
# pylint: disable=broad-except
except Exception as error:
module.fail_json(msg=str(error))
if __name__ == '__main__':
main()
|
"""Extensions module. Each extension is initialized in the app factory located
in app.py
"""
from flask.ext.bcrypt import Bcrypt
bcrypt = Bcrypt()
from flask.ext.login import LoginManager
login_manager = LoginManager()
from flask.ext.sqlalchemy import SQLAlchemy
db = SQLAlchemy()
from flask.ext.migrate import Migrate
migrate = Migrate()
from flask.ext.cache import Cache
cache = Cache()
from flask.ext.debugtoolbar import DebugToolbarExtension
debug_toolbar = DebugToolbarExtension()
|
NSEEDS=600
import re
import sys
from subprocess import check_output
def main():
lines = sys.stdin.readlines()
ips = []
pattern = re.compile(r"^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3}):9999")
for line in lines:
m = pattern.match(line)
if m is None:
continue
ip = 0
for i in range(0,4):
ip = ip + (int(m.group(i+1)) << (8*(i)))
if ip == 0:
continue
ips.append(ip)
for row in range(0, min(NSEEDS,len(ips)), 8):
print " " + ", ".join([ "0x%08x"%i for i in ips[row:row+8] ]) + ","
if __name__ == '__main__':
main()
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('scheduler', '0013_auto_20150912_1334'),
]
operations = [
migrations.RemoveField(
model_name='need',
name='activate',
),
]
|
import abc
import numpy as np
import GPy
class RegressionMethod(object):
__metaclass__ = abc.ABCMeta
def __init__(self):
self.preprocess = True
def _preprocess(self, data, train):
"""Zero-mean, unit-variance normalization by default"""
if train:
inputs, labels = data
self.data_mean = inputs.mean(axis=0)
self.data_std = inputs.std(axis=0)
self.labels_mean = labels.mean(axis=0)
self.labels_std = labels.std(axis=0)
return ((inputs-self.data_mean)/self.data_std, (labels-self.labels_mean)/self.labels_std)
else:
return (data-self.data_mean)/self.data_std
def _reverse_trans_labels(self, labels):
return labels*self.labels_std+self.labels_mean
def fit(self, train_data):
if self.preprocess:
train_data = self._preprocess(train_data, True)
return self._fit(train_data)
def predict(self, test_data):
if self.preprocess:
test_data = self._preprocess(test_data, False)
labels = self._predict(test_data)
if self.preprocess:
labels = self._reverse_trans_labels(labels)
return labels
@abc.abstractmethod
def _fit(self, train_data):
"""Fit the model. Return True if successful"""
return True
@abc.abstractmethod
def _predict(self, test_data):
"""Predict on test data"""
return None
class GP_RBF(RegressionMethod):
name = 'GP_RBF'
def _fit(self, train_data):
inputs, labels = train_data
self.model = GPy.models.GPRegression(inputs, labels,kernel=GPy.kern.RBF(inputs.shape[-1],ARD=True) +GPy.kern.Linear(inputs.shape[1], ARD=True) )
self.model.likelihood.variance[:] = labels.var()*0.01
self.model.optimize()
return True
def _predict(self, test_data):
return self.model.predict(test_data)[0]
class SparseGP_RBF(RegressionMethod):
name = 'SparseGP_RBF'
def _fit(self, train_data):
inputs, labels = train_data
self.model = GPy.models.SparseGPRegression(inputs, labels,kernel=GPy.kern.RBF(inputs.shape[-1],ARD=True) +GPy.kern.Linear(inputs.shape[1], ARD=True) ,num_inducing=100)
self.model.likelihood.variance[:] = labels.var()*0.01
self.model.optimize()
return True
def _predict(self, test_data):
return self.model.predict(test_data)[0]
class SVIGP_RBF(RegressionMethod):
name = 'SVIGP_RBF'
def _fit(self, train_data):
X, Y = train_data
Z = X[np.random.permutation(X.shape[0])[:100]]
k = GPy.kern.RBF(X.shape[1], ARD=True) + GPy.kern.Linear(X.shape[1], ARD=True) + GPy.kern.White(X.shape[1],0.01)
lik = GPy.likelihoods.StudentT(deg_free=3.)
self.model = GPy.core.SVGP(X, Y, Z=Z, kernel=k, likelihood=lik)
[self.model.optimize('scg', max_iters=40, gtol=0, messages=0, xtol=0, ftol=0) for i in range(10)]
self.model.optimize('bfgs', max_iters=1000, gtol=0, messages=0)
return True
def _predict(self, test_data):
return self.model.predict(test_data)[0]
|
import pkgutil
import unittest
def all_names():
for _, modname, _ in pkgutil.iter_modules(__path__):
if modname.startswith('test_'):
yield 'stripe.test.' + modname
def all():
return unittest.defaultTestLoader.loadTestsFromNames(all_names())
def unit():
unit_names = [name for name in all_names() if 'integration' not in name]
return unittest.defaultTestLoader.loadTestsFromNames(unit_names)
|
from pylab import figure,pcolor,scatter,contour,colorbar,show,subplot,connect,axis
from numpy import concatenate
from numpy.random import randn
from modshogun import *
from modshogun import *
from modshogun import *
import util
util.set_title('Multiple SVMS')
num_svms=6
width=0.5
svmList = [None]*num_svms
trainfeatList = [None]*num_svms
traindatList = [None]*num_svms
trainlabList = [None]*num_svms
trainlabsList = [None]*num_svms
kernelList = [None]*num_svms
for i in range(num_svms):
pos=util.get_realdata(True)
neg=util.get_realdata(False)
traindatList[i] = concatenate((pos, neg), axis=1)
trainfeatList[i] = util.get_realfeatures(pos, neg)
trainlabsList[i] = util.get_labels(True)
trainlabList[i] = util.get_labels()
kernelList[i] = GaussianKernel(trainfeatList[i], trainfeatList[i], width)
svmList[i] = LibSVM(10, kernelList[i], trainlabList[i])
for i in range(num_svms):
print "Training svm nr. %d" % (i)
currentSVM = svmList[i]
currentSVM.train()
print currentSVM.get_num_support_vectors()
print "Done."
x, y, z=util.compute_output_plot_isolines(
currentSVM, kernelList[i], trainfeatList[i])
subplot(num_svms/2, 2, i+1)
pcolor(x, y, z, shading='interp')
contour(x, y, z, linewidths=1, colors='black', hold=True)
scatter(traindatList[i][0,:],traindatList[i][1,:], s=20, marker='o', c=trainlabsList[i], hold=True)
axis('tight')
connect('key_press_event', util.quit)
show()
|
COLOR_ADDR_SIZE = 16 if _idaapi.BADADDR == 0xFFFFFFFFFFFFFFFFL else 8
SCOLOR_FG_MAX = '\x28' # Max color number
SCOLOR_OPND1 = chr(cvar.COLOR_ADDR+1) # Instruction operand 1
SCOLOR_OPND2 = chr(cvar.COLOR_ADDR+2) # Instruction operand 2
SCOLOR_OPND3 = chr(cvar.COLOR_ADDR+3) # Instruction operand 3
SCOLOR_OPND4 = chr(cvar.COLOR_ADDR+4) # Instruction operand 4
SCOLOR_OPND5 = chr(cvar.COLOR_ADDR+5) # Instruction operand 5
SCOLOR_OPND6 = chr(cvar.COLOR_ADDR+6) # Instruction operand 6
SCOLOR_UTF8 = chr(cvar.COLOR_ADDR+10) # Following text is UTF-8 encoded
PALETTE_SIZE = (cvar.COLOR_FG_MAX+_idaapi.COLOR_BG_MAX)
def requires_color_esc(c):
"""
Checks if the given character requires escaping
@param c: character (string of one char)
@return: Boolean
"""
t = ord(c[0])
return c >= COLOR_ON and c <= COLOR_INV
def COLSTR(str, tag):
"""
Utility function to create a colored line
@param str: The string
@param tag: Color tag constant. One of SCOLOR_XXXX
"""
return SCOLOR_ON + tag + str + SCOLOR_OFF + tag
|
from ajenti import apis
from ajenti.com import *
from ajenti.ui import *
class SquidBindings(Plugin):
implements(apis.squid.IPluginPart)
weight = 15
title = 'Bindings'
tab = 0
cfg = 0
parent = None
def init(self, parent, cfg, tab):
self.parent = parent
self.cfg = cfg
self.tab = tab
parent._adding_http_binding = False
parent._adding_https_binding = False
def get_ui(self):
t1 = UI.DT()
t1.append(UI.DTR(UI.DTH(UI.Label(text='Host')), UI.DTH(UI.Label(text='Port')), UI.DTH(), header=True))
for a in self.cfg.http_port:
t1.append(
UI.DTR(
UI.Label(text=a[0]),
UI.Label(text=a[1]),
UI.TipIcon(
icon='/dl/core/ui/stock/delete.png',
text='Delete', id='del_http_binding/' + a[0] + '/' + a[1]
)
)
)
t2 = UI.DT()
t2.append(UI.DTR(UI.DTH(UI.Label(text='Host')), UI.DTH(UI.Label(text='Port')), UI.DTH(), header=True))
for a in self.cfg.https_port:
t2.append(
UI.DTR(
UI.Label(text=a[0]),
UI.Label(text=a[1]),
UI.TipIcon(
icon='/dl/core/ui/stock/delete.png',
text='Delete', id='del_https_binding/' + a[0] + '/' + a[1]
)
)
)
v1 = UI.VContainer(UI.Label(text='HTTP', size=3), t1, UI.Button(text='Add new', id='add_http_binding'))
v2 = UI.VContainer(UI.Label(text='HTTPS', size=3), t2, UI.Button(text='Add new', id='add_https_binding'))
c = UI.HContainer(v1, UI.Spacer(width=20), v2)
if self.parent._adding_http_binding or self.parent._adding_https_binding:
c.append(self.get_ui_add())
return c
def get_ui_add(self):
c = UI.HContainer(
UI.LT(
UI.LTR(
UI.Label(text='Host:'),
UI.TextInput(name='host')
),
UI.LTR(
UI.Label(text='Port:'),
UI.TextInput(name='port')
)
)
)
return UI.DialogBox(c, title='Add binding', id='dlgAddBinding')
def on_click(self, event, params, vars=None):
if params[0] == 'add_http_binding':
self.parent._tab = self.tab
self.parent._adding_http_binding = True
if params[0] == 'add_https_binding':
self.parent._tab = self.tab
self.parent._adding_https_binding = True
if params[0] == 'del_http_binding':
self.parent._tab = self.tab
self.cfg.http_port.remove((params[1], params[2]))
self.cfg.save()
if params[0] == 'del_https_binding':
self.parent._tab = self.tab
self.cfg.https_port.remove((params[1], params[2]))
self.cfg.save()
def on_submit(self, event, params, vars=None):
if params[0] == 'dlgAddBinding':
self.parent._tab = self.tab
if vars.getvalue('action', '') == 'OK':
h = vars.getvalue('host', '')
p = vars.getvalue('port', '')
if self.parent._adding_http_binding:
self.cfg.http_port.append((h, p))
if self.parent._adding_https_binding:
self.cfg.https_port.append((h, p))
self.cfg.save()
self.parent._adding_http_binding = False
self.parent._adding_https_binding = False
|
import os
import sys
import tempfile
import unittest
TOOLS_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(TOOLS_ROOT)
from v8_presubmit import FileContentsCache, CacheableSourceFileProcessor
class FakeCachedProcessor(CacheableSourceFileProcessor):
def __init__(self, cache_file_path):
super(FakeCachedProcessor, self).__init__(
use_cache=True, cache_file_path=cache_file_path, file_type='.test')
def GetProcessorWorker(self):
return object
def GetProcessorScript(self):
return "echo", []
def DetectUnformattedFiles(_, cmd, worker, files):
raise NotImplementedError
class FileContentsCacheTest(unittest.TestCase):
def setUp(self):
_, self.cache_file_path = tempfile.mkstemp()
cache = FileContentsCache(self.cache_file_path)
cache.Load()
def generate_file():
_, file_name = tempfile.mkstemp()
with open(file_name, "w") as f:
f.write(file_name)
return file_name
self.target_files = [generate_file() for _ in range(2)]
unchanged_files = cache.FilterUnchangedFiles(self.target_files)
self.assertEqual(len(unchanged_files), 2)
cache.Save()
def tearDown(self):
for file in [self.cache_file_path] + self.target_files:
os.remove(file)
def testCachesFiles(self):
cache = FileContentsCache(self.cache_file_path)
cache.Load()
changed_files = cache.FilterUnchangedFiles(self.target_files)
self.assertListEqual(changed_files, [])
modified_file = self.target_files[0]
with open(modified_file, "w") as f:
f.write("modification")
changed_files = cache.FilterUnchangedFiles(self.target_files)
self.assertListEqual(changed_files, [modified_file])
def testCacheableSourceFileProcessor(self):
class CachedProcessor(FakeCachedProcessor):
def DetectFilesToChange(_, files):
self.assertListEqual(files, [])
return []
cached_processor = CachedProcessor(cache_file_path=self.cache_file_path)
cached_processor.ProcessFiles(self.target_files)
def testCacheableSourceFileProcessorWithModifications(self):
modified_file = self.target_files[0]
with open(modified_file, "w") as f:
f.write("modification")
class CachedProcessor(FakeCachedProcessor):
def DetectFilesToChange(_, files):
self.assertListEqual(files, [modified_file])
return []
cached_processor = CachedProcessor(
cache_file_path=self.cache_file_path,
)
cached_processor.ProcessFiles(self.target_files)
if __name__ == '__main__':
unittest.main()
|
import requests
from cloudbot import hook
from cloudbot.util import web
class APIError(Exception):
pass
google_base = 'https://maps.googleapis.com/maps/api/'
geocode_api = google_base + 'geocode/json'
wunder_api = "http://api.wunderground.com/api/{}/forecast/geolookup/conditions/q/{}.json"
bias = None
def check_status(status):
"""
A little helper function that checks an API error code and returns a nice message.
Returns None if no errors found
"""
if status == 'REQUEST_DENIED':
return 'The geocode API is off in the Google Developers Console.'
elif status == 'ZERO_RESULTS':
return 'No results found.'
elif status == 'OVER_QUERY_LIMIT':
return 'The geocode API quota has run out.'
elif status == 'UNKNOWN_ERROR':
return 'Unknown Error.'
elif status == 'INVALID_REQUEST':
return 'Invalid Request.'
elif status == 'OK':
return None
def find_location(location):
"""
Takes a location as a string, and returns a dict of data
:param location: string
:return: dict
"""
params = {"address": location, "key": dev_key}
if bias:
params['region'] = bias
json = requests.get(geocode_api, params=params).json()
error = check_status(json['status'])
if error:
raise APIError(error)
return json['results'][0]['geometry']['location']
@hook.on_start
def on_start(bot):
""" Loads API keys """
global dev_key, wunder_key
dev_key = bot.config.get("api_keys", {}).get("google_dev_key", None)
wunder_key = bot.config.get("api_keys", {}).get("wunderground", None)
@hook.command("weather", "we")
def weather(text, reply):
"""weather <location> -- Gets weather data for <location>."""
if not wunder_key:
return "This command requires a Weather Underground API key."
if not dev_key:
return "This command requires a Google Developers Console API key."
# use find_location to get location data from the user input
try:
location_data = find_location(text)
except APIError as e:
return e
formatted_location = "{lat},{lng}".format(**location_data)
url = wunder_api.format(wunder_key, formatted_location)
response = requests.get(url).json()
if response['response'].get('error'):
return "{}".format(response['response']['error']['description'])
forecast_today = response["forecast"]["simpleforecast"]["forecastday"][0]
forecast_tomorrow = response["forecast"]["simpleforecast"]["forecastday"][1]
# put all the stuff we want to use in a dictionary for easy formatting of the output
weather_data = {
"place": response['current_observation']['display_location']['full'],
"conditions": response['current_observation']['weather'],
"temp_f": response['current_observation']['temp_f'],
"temp_c": response['current_observation']['temp_c'],
"humidity": response['current_observation']['relative_humidity'],
"wind_kph": response['current_observation']['wind_kph'],
"wind_mph": response['current_observation']['wind_mph'],
"wind_direction": response['current_observation']['wind_dir'],
"today_conditions": forecast_today['conditions'],
"today_high_f": forecast_today['high']['fahrenheit'],
"today_high_c": forecast_today['high']['celsius'],
"today_low_f": forecast_today['low']['fahrenheit'],
"today_low_c": forecast_today['low']['celsius'],
"tomorrow_conditions": forecast_tomorrow['conditions'],
"tomorrow_high_f": forecast_tomorrow['high']['fahrenheit'],
"tomorrow_high_c": forecast_tomorrow['high']['celsius'],
"tomorrow_low_f": forecast_tomorrow['low']['fahrenheit'],
"tomorrow_low_c": forecast_tomorrow['low']['celsius']
}
# Get the more accurate URL if available, if not, get the generic one.
if "?query=," in response["current_observation"]['ob_url']:
weather_data['url'] = web.shorten(response["current_observation"]['forecast_url'])
else:
weather_data['url'] = web.shorten(response["current_observation"]['ob_url'])
reply("{place} - \x02Current:\x02 {conditions}, {temp_f}F/{temp_c}C, {humidity}, "
"Wind: {wind_mph}MPH/{wind_kph}KPH {wind_direction}, \x02Today:\x02 {today_conditions}, "
"High: {today_high_f}F/{today_high_c}C, Low: {today_low_f}F/{today_low_c}C. "
"\x02Tomorrow:\x02 {tomorrow_conditions}, High: {tomorrow_high_f}F/{tomorrow_high_c}C, "
"Low: {tomorrow_low_f}F/{tomorrow_low_c}C - {url}".format(**weather_data))
|
"""Manipulators that can edit SON objects as they enter and exit a database.
New manipulators should be defined as subclasses of SONManipulator and can be
installed on a database by calling
`pymongo.database.Database.add_son_manipulator`."""
from bson.dbref import DBRef
from bson.objectid import ObjectId
from bson.son import SON
class SONManipulator(object):
"""A base son manipulator.
This manipulator just saves and restores objects without changing them.
"""
def will_copy(self):
"""Will this SON manipulator make a copy of the incoming document?
Derived classes that do need to make a copy should override this
method, returning True instead of False. All non-copying manipulators
will be applied first (so that the user's document will be updated
appropriately), followed by copying manipulators.
"""
return False
def transform_incoming(self, son, collection):
"""Manipulate an incoming SON object.
:Parameters:
- `son`: the SON object to be inserted into the database
- `collection`: the collection the object is being inserted into
"""
if self.will_copy():
return SON(son)
return son
def transform_outgoing(self, son, collection):
"""Manipulate an outgoing SON object.
:Parameters:
- `son`: the SON object being retrieved from the database
- `collection`: the collection this object was stored in
"""
if self.will_copy():
return SON(son)
return son
class ObjectIdInjector(SONManipulator):
"""A son manipulator that adds the _id field if it is missing.
.. versionchanged:: 2.7
ObjectIdInjector is no longer used by PyMongo, but remains in this
module for backwards compatibility.
"""
def transform_incoming(self, son, collection):
"""Add an _id field if it is missing.
"""
if not "_id" in son:
son["_id"] = ObjectId()
return son
class ObjectIdShuffler(SONManipulator):
"""A son manipulator that moves _id to the first position.
"""
def will_copy(self):
"""We need to copy to be sure that we are dealing with SON, not a dict.
"""
return True
def transform_incoming(self, son, collection):
"""Move _id to the front if it's there.
"""
if not "_id" in son:
return son
transformed = SON({"_id": son["_id"]})
transformed.update(son)
return transformed
class NamespaceInjector(SONManipulator):
"""A son manipulator that adds the _ns field.
"""
def transform_incoming(self, son, collection):
"""Add the _ns field to the incoming object
"""
son["_ns"] = collection.name
return son
class AutoReference(SONManipulator):
"""Transparently reference and de-reference already saved embedded objects.
This manipulator should probably only be used when the NamespaceInjector is
also being used, otherwise it doesn't make too much sense - documents can
only be auto-referenced if they have an *_ns* field.
NOTE: this will behave poorly if you have a circular reference.
TODO: this only works for documents that are in the same database. To fix
this we'll need to add a DatabaseInjector that adds *_db* and then make
use of the optional *database* support for DBRefs.
"""
def __init__(self, db):
self.database = db
def will_copy(self):
"""We need to copy so the user's document doesn't get transformed refs.
"""
return True
def transform_incoming(self, son, collection):
"""Replace embedded documents with DBRefs.
"""
def transform_value(value):
if isinstance(value, dict):
if "_id" in value and "_ns" in value:
return DBRef(value["_ns"], transform_value(value["_id"]))
else:
return transform_dict(SON(value))
elif isinstance(value, list):
return [transform_value(v) for v in value]
return value
def transform_dict(object):
for (key, value) in object.items():
object[key] = transform_value(value)
return object
return transform_dict(SON(son))
def transform_outgoing(self, son, collection):
"""Replace DBRefs with embedded documents.
"""
def transform_value(value):
if isinstance(value, DBRef):
return self.database.dereference(value)
elif isinstance(value, list):
return [transform_value(v) for v in value]
elif isinstance(value, dict):
return transform_dict(SON(value))
return value
def transform_dict(object):
for (key, value) in object.items():
object[key] = transform_value(value)
return object
return transform_dict(SON(son))
|
import errno
import fcntl
import os
import socket
import stat
import sys
import time
from gunicorn import util
from gunicorn.six import string_types
SD_LISTEN_FDS_START = 3
class BaseSocket(object):
def __init__(self, address, conf, log, fd=None):
self.log = log
self.conf = conf
self.cfg_addr = address
if fd is None:
sock = socket.socket(self.FAMILY, socket.SOCK_STREAM)
else:
sock = socket.fromfd(fd, self.FAMILY, socket.SOCK_STREAM)
self.sock = self.set_options(sock, bound=(fd is not None))
def __str__(self, name):
return "<socket %d>" % self.sock.fileno()
def __getattr__(self, name):
return getattr(self.sock, name)
def set_options(self, sock, bound=False):
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if not bound:
self.bind(sock)
sock.setblocking(0)
# make sure that the socket can be inherited
if hasattr(sock, "set_inheritable"):
sock.set_inheritable(True)
sock.listen(self.conf.backlog)
return sock
def bind(self, sock):
sock.bind(self.cfg_addr)
def close(self):
if self.sock is None:
return
try:
self.sock.close()
except socket.error as e:
self.log.info("Error while closing socket %s", str(e))
self.sock = None
class TCPSocket(BaseSocket):
FAMILY = socket.AF_INET
def __str__(self):
if self.conf.is_ssl:
scheme = "https"
else:
scheme = "http"
addr = self.sock.getsockname()
return "%s://%s:%d" % (scheme, addr[0], addr[1])
def set_options(self, sock, bound=False):
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
return super(TCPSocket, self).set_options(sock, bound=bound)
class TCP6Socket(TCPSocket):
FAMILY = socket.AF_INET6
def __str__(self):
(host, port, fl, sc) = self.sock.getsockname()
return "http://[%s]:%d" % (host, port)
class UnixSocket(BaseSocket):
FAMILY = socket.AF_UNIX
def __init__(self, addr, conf, log, fd=None):
if fd is None:
try:
st = os.stat(addr)
except OSError as e:
if e.args[0] != errno.ENOENT:
raise
else:
if stat.S_ISSOCK(st.st_mode):
os.remove(addr)
else:
raise ValueError("%r is not a socket" % addr)
super(UnixSocket, self).__init__(addr, conf, log, fd=fd)
def __str__(self):
return "unix:%s" % self.cfg_addr
def bind(self, sock):
old_umask = os.umask(self.conf.umask)
sock.bind(self.cfg_addr)
util.chown(self.cfg_addr, self.conf.uid, self.conf.gid)
os.umask(old_umask)
def close(self):
os.unlink(self.cfg_addr)
super(UnixSocket, self).close()
def _sock_type(addr):
if isinstance(addr, tuple):
if util.is_ipv6(addr[0]):
sock_type = TCP6Socket
else:
sock_type = TCPSocket
elif isinstance(addr, string_types):
sock_type = UnixSocket
else:
raise TypeError("Unable to create socket from: %r" % addr)
return sock_type
def create_sockets(conf, log):
"""
Create a new socket for the given address. If the
address is a tuple, a TCP socket is created. If it
is a string, a Unix socket is created. Otherwise
a TypeError is raised.
"""
# Systemd support, use the sockets managed by systemd and passed to
# gunicorn.
# http://www.freedesktop.org/software/systemd/man/systemd.socket.html
listeners = []
if ('LISTEN_PID' in os.environ
and int(os.environ.get('LISTEN_PID')) == os.getpid()):
for i in range(int(os.environ.get('LISTEN_FDS', 0))):
fd = i + SD_LISTEN_FDS_START
try:
sock = socket.fromfd(fd, socket.AF_UNIX, socket.SOCK_STREAM)
sockname = sock.getsockname()
if isinstance(sockname, str) and sockname.startswith('/'):
listeners.append(UnixSocket(sockname, conf, log, fd=fd))
elif len(sockname) == 2 and '.' in sockname[0]:
listeners.append(TCPSocket("%s:%s" % sockname, conf, log,
fd=fd))
elif len(sockname) == 4 and ':' in sockname[0]:
listeners.append(TCP6Socket("[%s]:%s" % sockname[:2], conf,
log, fd=fd))
except socket.error:
pass
del os.environ['LISTEN_PID'], os.environ['LISTEN_FDS']
if listeners:
log.debug('Socket activation sockets: %s',
",".join([str(l) for l in listeners]))
return listeners
# get it only once
laddr = conf.address
# check ssl config early to raise the error on startup
# only the certfile is needed since it can contains the keyfile
if conf.certfile and not os.path.exists(conf.certfile):
raise ValueError('certfile "%s" does not exist' % conf.certfile)
if conf.keyfile and not os.path.exists(conf.keyfile):
raise ValueError('keyfile "%s" does not exist' % conf.keyfile)
# sockets are already bound
if 'GUNICORN_FD' in os.environ:
fds = os.environ.pop('GUNICORN_FD').split(',')
for i, fd in enumerate(fds):
fd = int(fd)
addr = laddr[i]
sock_type = _sock_type(addr)
try:
listeners.append(sock_type(addr, conf, log, fd=fd))
except socket.error as e:
if e.args[0] == errno.ENOTCONN:
log.error("GUNICORN_FD should refer to an open socket.")
else:
raise
return listeners
# no sockets is bound, first initialization of gunicorn in this env.
for addr in laddr:
sock_type = _sock_type(addr)
# If we fail to create a socket from GUNICORN_FD
# we fall through and try and open the socket
# normally.
sock = None
for i in range(5):
try:
sock = sock_type(addr, conf, log)
except socket.error as e:
if e.args[0] == errno.EADDRINUSE:
log.error("Connection in use: %s", str(addr))
if e.args[0] == errno.EADDRNOTAVAIL:
log.error("Invalid address: %s", str(addr))
if i < 5:
msg = "connection to {addr} failed: {error}"
log.debug(msg.format(addr=str(addr), error=str(e)))
log.error("Retrying in 1 second.")
time.sleep(1)
else:
break
if sock is None:
log.error("Can't connect to %s", str(addr))
sys.exit(1)
listeners.append(sock)
return listeners
|
"""Support for switches that can be controlled using the RaspyRFM rc module."""
from raspyrfm_client import RaspyRFMClient
from raspyrfm_client.device_implementations.controlunit.actions import Action
from raspyrfm_client.device_implementations.controlunit.controlunit_constants import (
ControlUnitModel,
)
from raspyrfm_client.device_implementations.gateway.manufacturer.gateway_constants import (
GatewayModel,
)
from raspyrfm_client.device_implementations.manufacturer_constants import Manufacturer
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PORT,
CONF_SWITCHES,
DEVICE_DEFAULT_NAME,
)
import homeassistant.helpers.config_validation as cv
CONF_GATEWAY_MANUFACTURER = "gateway_manufacturer"
CONF_GATEWAY_MODEL = "gateway_model"
CONF_CONTROLUNIT_MANUFACTURER = "controlunit_manufacturer"
CONF_CONTROLUNIT_MODEL = "controlunit_model"
CONF_CHANNEL_CONFIG = "channel_config"
DEFAULT_HOST = "127.0.0.1"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_PORT): cv.port,
vol.Optional(CONF_GATEWAY_MANUFACTURER): cv.string,
vol.Optional(CONF_GATEWAY_MODEL): cv.string,
vol.Required(CONF_SWITCHES): vol.Schema(
[
{
vol.Optional(CONF_NAME, default=DEVICE_DEFAULT_NAME): cv.string,
vol.Required(CONF_CONTROLUNIT_MANUFACTURER): cv.string,
vol.Required(CONF_CONTROLUNIT_MODEL): cv.string,
vol.Required(CONF_CHANNEL_CONFIG): {cv.string: cv.match_all},
}
]
),
},
extra=vol.ALLOW_EXTRA,
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the RaspyRFM switch."""
gateway_manufacturer = config.get(
CONF_GATEWAY_MANUFACTURER, Manufacturer.SEEGEL_SYSTEME.value
)
gateway_model = config.get(CONF_GATEWAY_MODEL, GatewayModel.RASPYRFM.value)
host = config[CONF_HOST]
port = config.get(CONF_PORT)
switches = config[CONF_SWITCHES]
raspyrfm_client = RaspyRFMClient()
gateway = raspyrfm_client.get_gateway(
Manufacturer(gateway_manufacturer), GatewayModel(gateway_model), host, port
)
switch_entities = []
for switch in switches:
name = switch[CONF_NAME]
controlunit_manufacturer = switch[CONF_CONTROLUNIT_MANUFACTURER]
controlunit_model = switch[CONF_CONTROLUNIT_MODEL]
channel_config = switch[CONF_CHANNEL_CONFIG]
controlunit = raspyrfm_client.get_controlunit(
Manufacturer(controlunit_manufacturer), ControlUnitModel(controlunit_model)
)
controlunit.set_channel_config(**channel_config)
switch = RaspyRFMSwitch(raspyrfm_client, name, gateway, controlunit)
switch_entities.append(switch)
add_entities(switch_entities)
class RaspyRFMSwitch(SwitchEntity):
"""Representation of a RaspyRFM switch."""
def __init__(self, raspyrfm_client, name: str, gateway, controlunit):
"""Initialize the switch."""
self._raspyrfm_client = raspyrfm_client
self._name = name
self._gateway = gateway
self._controlunit = controlunit
self._state = None
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def should_poll(self):
"""Return True if polling should be used."""
return False
@property
def assumed_state(self):
"""Return True when the current state can not be queried."""
return True
@property
def is_on(self):
"""Return true if switch is on."""
return self._state
def turn_on(self, **kwargs):
"""Turn the switch on."""
self._raspyrfm_client.send(self._gateway, self._controlunit, Action.ON)
self._state = True
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Turn the switch off."""
if Action.OFF in self._controlunit.get_supported_actions():
self._raspyrfm_client.send(self._gateway, self._controlunit, Action.OFF)
else:
self._raspyrfm_client.send(self._gateway, self._controlunit, Action.ON)
self._state = False
self.schedule_update_ha_state()
|
import os, sys; sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", ".."))
from pattern.web import GOOGLE, YAHOO, BING, sort
results = sort(
terms = [
"arnold schwarzenegger",
"chuck norris",
"dolph lundgren",
"steven seagal",
"sylvester stallone",
"mickey mouse",
],
context = "dangerous", # Term used for sorting.
service = BING, # GOOGLE, YAHOO, BING, ...
license = None, # You should supply your own API license key for the given service.
strict = True, # Wraps the query in quotes, i.e. 'mac sweet'.
reverse = True, # Reverses term and context: 'sweet mac' instead of 'mac sweet'.
cached = True)
for weight, term in results:
print "%5.2f" % (weight * 100) + "%", term
|
import uwsgi
def send_request(env, client):
uwsgi.send(client, b"GET /intl/it_it/images/logo.gif HTTP/1.0\r\n")
# test for suspend/resume
uwsgi.suspend()
uwsgi.send(client, b"Host: www.google.it\r\n\r\n")
while 1:
yield uwsgi.wait_fd_read(client, 2)
if env['x-wsgiorg.fdevent.timeout']:
return
buf = uwsgi.recv(client, 4096)
if buf:
yield buf
else:
break
def application(env, start_response):
c = uwsgi.async_connect('74.125.232.115:80')
# wait for connection
yield uwsgi.wait_fd_write(c, 2)
if env['x-wsgiorg.fdevent.timeout']:
uwsgi.close(c)
raise StopIteration
if uwsgi.is_connected(c):
for r in send_request(env, c):
yield r
else:
start_response('500 Internal Server Error', [('Content-Type', 'text/html')])
yield "Internal Server Error"
uwsgi.close(c)
|
import pytest
from tests.support.asserts import assert_dialog_handled, assert_error, assert_success
def element_send_keys(session, element, text):
return session.transport.send(
"POST", "/session/{session_id}/element/{element_id}/value".format(
session_id=session.session_id,
element_id=element.id),
{"text": text})
@pytest.fixture
def check_user_prompt_closed_without_exception(session, create_dialog, inline):
def check_user_prompt_closed_without_exception(dialog_type, retval):
session.url = inline("<input type=text>")
element = session.find.css("input", all=False)
create_dialog(dialog_type, text=dialog_type)
response = element_send_keys(session, element, "foo")
assert_success(response)
assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
assert element.property("value") == "foo"
return check_user_prompt_closed_without_exception
@pytest.fixture
def check_user_prompt_closed_with_exception(session, create_dialog, inline):
def check_user_prompt_closed_with_exception(dialog_type, retval):
session.url = inline("<input type=text>")
element = session.find.css("input", all=False)
create_dialog(dialog_type, text=dialog_type)
response = element_send_keys(session, element, "foo")
assert_error(response, "unexpected alert open")
assert_dialog_handled(session, expected_text=dialog_type, expected_retval=retval)
assert element.property("value") == ""
return check_user_prompt_closed_with_exception
@pytest.fixture
def check_user_prompt_not_closed_but_exception(session, create_dialog, inline):
def check_user_prompt_not_closed_but_exception(dialog_type):
session.url = inline("<input type=text>")
element = session.find.css("input", all=False)
create_dialog(dialog_type, text=dialog_type)
response = element_send_keys(session, element, "foo")
assert_error(response, "unexpected alert open")
assert session.alert.text == dialog_type
session.alert.dismiss()
assert element.property("value") == ""
return check_user_prompt_not_closed_but_exception
@pytest.mark.capabilities({"unhandledPromptBehavior": "accept"})
@pytest.mark.parametrize("dialog_type, retval", [
("alert", None),
("confirm", True),
("prompt", ""),
])
def test_accept(check_user_prompt_closed_without_exception, dialog_type, retval):
check_user_prompt_closed_without_exception(dialog_type, retval)
@pytest.mark.capabilities({"unhandledPromptBehavior": "accept and notify"})
@pytest.mark.parametrize("dialog_type, retval", [
("alert", None),
("confirm", True),
("prompt", ""),
])
def test_accept_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
check_user_prompt_closed_with_exception(dialog_type, retval)
@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss"})
@pytest.mark.parametrize("dialog_type, retval", [
("alert", None),
("confirm", False),
("prompt", None),
])
def test_dismiss(check_user_prompt_closed_without_exception, dialog_type, retval):
check_user_prompt_closed_without_exception(dialog_type, retval)
@pytest.mark.capabilities({"unhandledPromptBehavior": "dismiss and notify"})
@pytest.mark.parametrize("dialog_type, retval", [
("alert", None),
("confirm", False),
("prompt", None),
])
def test_dismiss_and_notify(check_user_prompt_closed_with_exception, dialog_type, retval):
check_user_prompt_closed_with_exception(dialog_type, retval)
@pytest.mark.capabilities({"unhandledPromptBehavior": "ignore"})
@pytest.mark.parametrize("dialog_type", ["alert", "confirm", "prompt"])
def test_ignore(check_user_prompt_not_closed_but_exception, dialog_type):
check_user_prompt_not_closed_but_exception(dialog_type)
@pytest.mark.parametrize("dialog_type, retval", [
("alert", None),
("confirm", False),
("prompt", None),
])
def test_default(check_user_prompt_closed_with_exception, dialog_type, retval):
check_user_prompt_closed_with_exception(dialog_type, retval)
|
"""Implementing support for MySQL Authentication Plugins"""
from hashlib import sha1
import struct
from . import errors
from .catch23 import PY2, isstr
class BaseAuthPlugin(object):
"""Base class for authentication plugins
Classes inheriting from BaseAuthPlugin should implement the method
prepare_password(). When instantiating, auth_data argument is
required. The username, password and database are optional. The
ssl_enabled argument can be used to tell the plugin whether SSL is
active or not.
The method auth_response() method is used to retrieve the password
which was prepared by prepare_password().
"""
requires_ssl = False
plugin_name = ''
def __init__(self, auth_data, username=None, password=None, database=None,
ssl_enabled=False):
"""Initialization"""
self._auth_data = auth_data
self._username = username
self._password = password
self._database = database
self._ssl_enabled = ssl_enabled
def prepare_password(self):
"""Prepares and returns password to be send to MySQL
This method needs to be implemented by classes inheriting from
this class. It is used by the auth_response() method.
Raises NotImplementedError.
"""
raise NotImplementedError
def auth_response(self):
"""Returns the prepared password to send to MySQL
Raises InterfaceError on errors. For example, when SSL is required
by not enabled.
Returns str
"""
if self.requires_ssl and not self._ssl_enabled:
raise errors.InterfaceError("{name} requires SSL".format(
name=self.plugin_name))
return self.prepare_password()
class MySQLNativePasswordAuthPlugin(BaseAuthPlugin):
"""Class implementing the MySQL Native Password authentication plugin"""
requires_ssl = False
plugin_name = 'mysql_native_password'
def prepare_password(self):
"""Prepares and returns password as native MySQL 4.1+ password"""
if not self._auth_data:
raise errors.InterfaceError("Missing authentication data (seed)")
if not self._password:
return b''
password = self._password
if isstr(self._password):
password = self._password.encode('utf-8')
else:
password = self._password
if PY2:
password = buffer(password) # pylint: disable=E0602
try:
auth_data = buffer(self._auth_data) # pylint: disable=E0602
except TypeError:
raise errors.InterfaceError("Authentication data incorrect")
else:
password = password
auth_data = self._auth_data
hash4 = None
try:
hash1 = sha1(password).digest()
hash2 = sha1(hash1).digest()
hash3 = sha1(auth_data + hash2).digest()
if PY2:
xored = [ord(h1) ^ ord(h3) for (h1, h3) in zip(hash1, hash3)]
else:
xored = [h1 ^ h3 for (h1, h3) in zip(hash1, hash3)]
hash4 = struct.pack('20B', *xored)
except Exception as exc:
raise errors.InterfaceError(
"Failed scrambling password; {0}".format(exc))
return hash4
class MySQLClearPasswordAuthPlugin(BaseAuthPlugin):
"""Class implementing the MySQL Clear Password authentication plugin"""
requires_ssl = True
plugin_name = 'mysql_clear_password'
def prepare_password(self):
"""Returns password as as clear text"""
if not self._password:
return b'\x00'
password = self._password
if PY2:
if isinstance(password, unicode): # pylint: disable=E0602
password = password.encode('utf8')
elif isinstance(password, str):
password = password.encode('utf8')
return password + b'\x00'
class MySQLSHA256PasswordAuthPlugin(BaseAuthPlugin):
"""Class implementing the MySQL SHA256 authentication plugin
Note that encrypting using RSA is not supported since the Python
Standard Library does not provide this OpenSSL functionality.
"""
requires_ssl = True
plugin_name = 'sha256_password'
def prepare_password(self):
"""Returns password as as clear text"""
if not self._password:
return b'\x00'
password = self._password
if PY2:
if isinstance(password, unicode): # pylint: disable=E0602
password = password.encode('utf8')
elif isinstance(password, str):
password = password.encode('utf8')
return password + b'\x00'
def get_auth_plugin(plugin_name):
"""Return authentication class based on plugin name
This function returns the class for the authentication plugin plugin_name.
The returned class is a subclass of BaseAuthPlugin.
Raises errors.NotSupportedError when plugin_name is not supported.
Returns subclass of BaseAuthPlugin.
"""
for authclass in BaseAuthPlugin.__subclasses__(): # pylint: disable=E1101
if authclass.plugin_name == plugin_name:
return authclass
raise errors.NotSupportedError(
"Authentication plugin '{0}' is not supported".format(plugin_name))
|
"""Serializer tests for the GitHub addon."""
import mock
from nose.tools import * # noqa (PEP8 asserts)
from website.addons.base.testing.serializers import StorageAddonSerializerTestSuiteMixin
from website.addons.github.api import GitHubClient
from website.addons.github.tests.factories import GitHubAccountFactory
from website.addons.github.serializer import GitHubSerializer
from tests.base import OsfTestCase
class TestGitHubSerializer(StorageAddonSerializerTestSuiteMixin, OsfTestCase):
addon_short_name = 'github'
Serializer = GitHubSerializer
ExternalAccountFactory = GitHubAccountFactory
client = GitHubClient()
def set_provider_id(self, pid):
self.node_settings.repo = pid
## Overrides ##
def setUp(self):
super(TestGitHubSerializer, self).setUp()
self.mock_api_user = mock.patch("website.addons.github.api.GitHubClient.user")
self.mock_api_user.return_value = mock.Mock()
self.mock_api_user.start()
def tearDown(self):
self.mock_api_user.stop()
super(TestGitHubSerializer, self).tearDown()
|
from qgis._networkanalysis import *
|
from lxml import etree
from nova.api.openstack import common
from nova.api.openstack import xmlutil
from nova.openstack.common import log as logging
from nova.tests.integrated import integrated_helpers
LOG = logging.getLogger(__name__)
class XmlTests(integrated_helpers._IntegratedTestBase):
""""Some basic XML sanity checks."""
_api_version = 'v2'
def test_namespace_limits(self):
headers = {}
headers['Accept'] = 'application/xml'
response = self.api.api_request('/limits', headers=headers)
data = response.read()
LOG.debug("data: %s" % data)
root = etree.XML(data)
self.assertEqual(root.nsmap.get(None), xmlutil.XMLNS_COMMON_V10)
def test_namespace_servers(self):
# /servers should have v1.1 namespace (has changed in 1.1).
headers = {}
headers['Accept'] = 'application/xml'
response = self.api.api_request('/servers', headers=headers)
data = response.read()
LOG.debug("data: %s" % data)
root = etree.XML(data)
self.assertEqual(root.nsmap.get(None), common.XML_NS_V11)
|
from __future__ import with_statement
__license__ = 'GPL v3'
__copyright__ = '2008, Kovid Goyal kovid@kovidgoyal.net'
__docformat__ = 'restructuredtext en'
'''
Convert an ODT file into a Open Ebook
'''
from calibre.customize.conversion import InputFormatPlugin
class ODTInput(InputFormatPlugin):
name = 'ODT Input'
author = 'Kovid Goyal'
description = 'Convert ODT (OpenOffice) files to HTML'
file_types = set(['odt'])
def convert(self, stream, options, file_ext, log,
accelerators):
from calibre.ebooks.odt.input import Extract
return Extract()(stream, '.', log)
|
"""
Keystone In-Memory Dogpile.cache backend implementation.
"""
import copy
from dogpile.cache import api
NO_VALUE = api.NO_VALUE
class MemoryBackend(api.CacheBackend):
"""A backend that uses a plain dictionary.
There is no size management, and values which are placed into the
dictionary will remain until explicitly removed. Note that Dogpile's
expiration of items is based on timestamps and does not remove them from
the cache.
E.g.::
from dogpile.cache import make_region
region = make_region().configure(
'keystone.common.kvs.Memory'
)
"""
def __init__(self, arguments):
self._db = {}
def _isolate_value(self, value):
if value is not NO_VALUE:
return copy.deepcopy(value)
return value
def get(self, key):
return self._isolate_value(self._db.get(key, NO_VALUE))
def get_multi(self, keys):
return [self.get(key) for key in keys]
def set(self, key, value):
self._db[key] = self._isolate_value(value)
def set_multi(self, mapping):
for key, value in mapping.items():
self.set(key, value)
def delete(self, key):
self._db.pop(key, None)
def delete_multi(self, keys):
for key in keys:
self.delete(key)
|
from __future__ import with_statement
from contextlib import contextmanager
from fabric.api import hide, puts
@contextmanager
def msg(txt):
puts(txt + "...", end='', flush=True)
with hide('everything'):
yield
puts("done.", show_prefix=False, flush=True)
|
print(+1)
print(+100)
print(-1)
print(-(-1))
print(-0x3fffffff) # 32-bit edge case
print(-0x3fffffffffffffff) # 64-bit edge case
print(-(-0x3fffffff - 1)) # 32-bit edge case
print(-(-0x3fffffffffffffff - 1)) # 64-bit edge case
print(~0)
print(~1)
print(~-1)
print(~0x3fffffff) # 32-bit edge case
print(~0x3fffffffffffffff) # 64-bit edge case
print(~(-0x3fffffff - 1)) # 32-bit edge case
print(~(-0x3fffffffffffffff - 1)) # 64-bit edge case
print(1 + 2)
print(1 - 2)
print(2 - 1)
print(1 * 2)
print(123 * 456)
print(123 // 7, 123 % 7)
print(-123 // 7, -123 % 7)
print(123 // -7, 123 % -7)
print(-123 // -7, -123 % -7)
|
"""Test that the set of gen-* files is the same as the generated files."""
import fnmatch
import os
import sys
import generate
import logging
UPDATE_TIP = 'To update the generated tests, run:\n' \
'$ python third_party/WebKit/LayoutTests/bluetooth/generate.py'
def main():
logging.basicConfig(level=logging.INFO)
logging.info(UPDATE_TIP)
generated_files = set()
# Tests data in gen-* files is the same as the data generated.
for generated_test in generate.GetGeneratedTests():
generated_files.add(generated_test.path)
try:
with open(generated_test.path, 'r') as f:
data = f.read().decode('utf-8')
if data != generated_test.data:
logging.error('%s does not match template', generated_test.path)
return -1
except IOError as e:
if e.errno == 2:
logging.error('Missing generated test:\n%s\nFor template:\n%s',
generated_test.path,
generated_test.template)
return -1
# Tests that there are no obsolete generated files.
previous_generated_files = set()
current_path = os.path.dirname(os.path.realpath(__file__))
for root, _, filenames in os.walk(current_path):
for filename in fnmatch.filter(filenames, 'gen-*.https.window.js'):
previous_generated_files.add(os.path.join(root, filename))
if previous_generated_files != generated_files:
logging.error('There are extra generated tests. Please remove them.')
for test_path in previous_generated_files - generated_files:
logging.error('%s', test_path)
return -1
if __name__ == '__main__':
sys.exit(main())
|
import sys
import DataStore
import util
import logging
def verify_tx_merkle_hashes(store, logger, chain_id):
checked, bad = 0, 0
for block_id, merkle_root, num_tx in store.selectall("""
SELECT b.block_id, b.block_hashMerkleRoot, b.block_num_tx
FROM block b
JOIN chain_candidate cc ON (b.block_id = cc.block_id)
WHERE cc.chain_id = ?""", (chain_id,)):
merkle_root = store.hashout(merkle_root)
tree = []
for (tx_hash,) in store.selectall("""
SELECT tx.tx_hash
FROM block_tx bt
JOIN tx ON (bt.tx_id = tx.tx_id)
WHERE bt.block_id = ?
ORDER BY bt.tx_pos""", (block_id,)):
tree.append(store.hashout(tx_hash))
if len(tree) != num_tx:
logger.warning("block %d: block_num_tx=%d but found %d",
block_id, num_tx, len(tree))
root = util.merkle(tree) or DataStore.NULL_HASH
if root != merkle_root:
logger.error("block %d: block_hashMerkleRoot mismatch.",
block_id)
bad += 1
checked += 1
if checked % 1000 == 0:
logger.info("%d Merkle trees, %d bad", checked, bad)
if checked % 1000 > 0:
logger.info("%d Merkle trees, %d bad", checked, bad)
return checked, bad
def main(argv):
cmdline = util.CmdLine(argv)
cmdline.usage = lambda: \
"Usage: verify.py --dbtype=MODULE --connect-args=ARGS"
store, argv = cmdline.init()
if store is None:
return 0
logger = logging.getLogger("verify")
checked, bad = 0, 0
for (chain_id,) in store.selectall("""
SELECT chain_id FROM chain"""):
logger.info("checking chain %d", chain_id)
checked1, bad1 = verify_tx_merkle_hashes(store, logger, chain_id)
checked += checked1
bad += bad1
logger.info("All chains: %d Merkle trees, %d bad", checked, bad)
return bad and 1
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
class ModelTemplate:
def Generate(self):
print "Genertate() needs to be implemented in a Template class!"
|
from __future__ import absolute_import, division, print_function
import struct
import six
from cryptography.exceptions import (
UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.interfaces import HMACBackend
from cryptography.hazmat.primitives import constant_time, hmac
from cryptography.hazmat.primitives.hashes import SHA1, SHA256, SHA512
from cryptography.hazmat.primitives.twofactor import InvalidToken
from cryptography.hazmat.primitives.twofactor.utils import _generate_uri
class HOTP(object):
def __init__(self, key, length, algorithm, backend):
if not isinstance(backend, HMACBackend):
raise UnsupportedAlgorithm(
"Backend object does not implement HMACBackend.",
_Reasons.BACKEND_MISSING_INTERFACE
)
if len(key) < 16:
raise ValueError("Key length has to be at least 128 bits.")
if not isinstance(length, six.integer_types):
raise TypeError("Length parameter must be an integer type.")
if length < 6 or length > 8:
raise ValueError("Length of HOTP has to be between 6 to 8.")
if not isinstance(algorithm, (SHA1, SHA256, SHA512)):
raise TypeError("Algorithm must be SHA1, SHA256 or SHA512.")
self._key = key
self._length = length
self._algorithm = algorithm
self._backend = backend
def generate(self, counter):
truncated_value = self._dynamic_truncate(counter)
hotp = truncated_value % (10 ** self._length)
return "{0:0{1}}".format(hotp, self._length).encode()
def verify(self, hotp, counter):
if not constant_time.bytes_eq(self.generate(counter), hotp):
raise InvalidToken("Supplied HOTP value does not match.")
def _dynamic_truncate(self, counter):
ctx = hmac.HMAC(self._key, self._algorithm, self._backend)
ctx.update(struct.pack(">Q", counter))
hmac_value = ctx.finalize()
offset = six.indexbytes(hmac_value, len(hmac_value) - 1) & 0b1111
p = hmac_value[offset:offset + 4]
return struct.unpack(">I", p)[0] & 0x7fffffff
def get_provisioning_uri(self, account_name, counter, issuer):
return _generate_uri(self, "hotp", account_name, issuer, [
("counter", int(counter)),
])
|
from js2py.base import *
@Js
def console():
pass
@Js
def log():
print arguments[0]
console.put('log', log)
|
'''
libvirt external inventory script
=================================
Ansible has a feature where instead of reading from /etc/ansible/hosts
as a text file, it can query external programs to obtain the list
of hosts, groups the hosts are in, and even variables to assign to each host.
To use this, copy this file over /etc/ansible/hosts and chmod +x the file.
This, more or less, allows you to keep one central database containing
info about all of your managed instances.
'''
import argparse
import ConfigParser
import os
import sys
import libvirt
import xml.etree.ElementTree as ET
try:
import json
except ImportError:
import simplejson as json
class LibvirtInventory(object):
''' libvirt dynamic inventory '''
def __init__(self):
''' Main execution path '''
self.inventory = dict() # A list of groups and the hosts in that group
self.cache = dict() # Details about hosts in the inventory
# Read settings and parse CLI arguments
self.read_settings()
self.parse_cli_args()
if self.args.host:
print _json_format_dict(self.get_host_info(), self.args.pretty)
elif self.args.list:
print _json_format_dict(self.get_inventory(), self.args.pretty)
else: # default action with no options
print _json_format_dict(self.get_inventory(), self.args.pretty)
def read_settings(self):
''' Reads the settings from the libvirt.ini file '''
config = ConfigParser.SafeConfigParser()
config.read(
os.path.dirname(os.path.realpath(__file__)) + '/libvirt.ini'
)
self.libvirt_uri = config.get('libvirt', 'uri')
def parse_cli_args(self):
''' Command line argument processing '''
parser = argparse.ArgumentParser(
description='Produce an Ansible Inventory file based on libvirt'
)
parser.add_argument(
'--list',
action='store_true',
default=True,
help='List instances (default: True)'
)
parser.add_argument(
'--host',
action='store',
help='Get all the variables about a specific instance'
)
parser.add_argument(
'--pretty',
action='store_true',
default=False,
help='Pretty format (default: False)'
)
self.args = parser.parse_args()
def get_host_info(self):
''' Get variables about a specific host '''
inventory = self.get_inventory()
if self.args.host in inventory['_meta']['hostvars']:
return inventory['_meta']['hostvars'][self.args.host]
def get_inventory(self):
''' Construct the inventory '''
inventory = dict(_meta=dict(hostvars=dict()))
conn = libvirt.openReadOnly(self.libvirt_uri)
if conn is None:
print "Failed to open connection to %s" % self.libvirt_uri
sys.exit(1)
domains = conn.listAllDomains()
if domains is None:
print "Failed to list domains for connection %s" % self.libvirt_uri
sys.exit(1)
for domain in domains:
hostvars = dict(libvirt_name=domain.name(),
libvirt_id=domain.ID(),
libvirt_uuid=domain.UUIDString())
domain_name = domain.name()
# TODO: add support for guests that are not in a running state
state, _ = domain.state()
# 2 is the state for a running guest
if state != 1:
continue
hostvars['libvirt_status'] = 'running'
root = ET.fromstring(domain.XMLDesc())
ansible_ns = {'ansible': 'https://github.com/ansible/ansible'}
for tag_elem in root.findall('./metadata/ansible:tags/ansible:tag', ansible_ns):
tag = tag_elem.text
_push(inventory, "tag_%s" % tag, domain_name)
_push(hostvars, 'libvirt_tags', tag)
# TODO: support more than one network interface, also support
# interface types other than 'network'
interface = root.find("./devices/interface[@type='network']")
if interface is not None:
source_elem = interface.find('source')
mac_elem = interface.find('mac')
if source_elem is not None and \
mac_elem is not None:
# Adding this to disable pylint check specifically
# ignoring libvirt-python versions that
# do not include DHCPLeases
# This is needed until we upgrade the build bot to
# RHEL7 (>= 1.2.6 libvirt)
# pylint: disable=no-member
dhcp_leases = conn.networkLookupByName(source_elem.get('network')) \
.DHCPLeases(mac_elem.get('address'))
if len(dhcp_leases) > 0:
ip_address = dhcp_leases[0]['ipaddr']
hostvars['ansible_ssh_host'] = ip_address
hostvars['libvirt_ip_address'] = ip_address
inventory['_meta']['hostvars'][domain_name] = hostvars
return inventory
def _push(my_dict, key, element):
'''
Push element to the my_dict[key] list.
After having initialized my_dict[key] if it dosn't exist.
'''
if key in my_dict:
my_dict[key].append(element)
else:
my_dict[key] = [element]
def _json_format_dict(data, pretty=False):
''' Serialize data to a JSON formated str '''
if pretty:
return json.dumps(data, sort_keys=True, indent=2)
else:
return json.dumps(data)
LibvirtInventory()
|
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class ToughAnimationCasesPage(page_module.Page):
def __init__(self, url, page_set, need_measurement_ready):
super(ToughAnimationCasesPage, self).__init__(url=url, page_set=page_set)
self.archive_data_file = 'data/tough_animation_cases.json'
self._need_measurement_ready = need_measurement_ready
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
if self._need_measurement_ready:
action_runner.WaitForJavaScriptCondition('measurementReady')
def RunSmoothness(self, action_runner):
action_runner.Wait(10)
class ToughAnimationCasesPageSet(page_set_module.PageSet):
"""
Description: A collection of animation performance tests
"""
def __init__(self):
super(ToughAnimationCasesPageSet, self).__init__(
archive_data_file='data/tough_animation_cases.json',
bucket=page_set_module.PARTNER_BUCKET)
urls_list_one = [
# Why: Tests the balls animation implemented with SVG animations.
'file://tough_animation_cases/balls_svg_animations.html',
# Why: Tests the balls animation implemented with Javascript and canvas.
'file://tough_animation_cases/balls_javascript_canvas.html',
# Why: Tests the balls animation implemented with Javascript and CSS.
'file://tough_animation_cases/balls_javascript_css.html',
# Why: Tests the balls animation implemented with CSS keyframe animations.
'file://tough_animation_cases/balls_css_keyframe_animations.html',
# Why: Tests the balls animation implemented with transforms and CSS
# keyframe animations to be run on the compositor thread.
# pylint: disable=C0301
'file://tough_animation_cases/balls_css_keyframe_animations_composited_transform.html',
# Why: Tests the balls animation implemented with CSS transitions on 2
# properties.
'file://tough_animation_cases/balls_css_transition_2_properties.html',
# Why: Tests the balls animation implemented with CSS transitions on 40
# properties.
'file://tough_animation_cases/balls_css_transition_40_properties.html',
# Why: Tests the balls animation implemented with CSS transitions on all
# animatable properties.
'file://tough_animation_cases/balls_css_transition_all_properties.html',
# pylint: disable=C0301
'file://tough_animation_cases/overlay_background_color_css_transitions.html'
]
for url in urls_list_one:
self.AddPage(ToughAnimationCasesPage(url, self,
need_measurement_ready=True))
urls_list_two = [
# Why: Tests various keyframed animations.
'file://tough_animation_cases/keyframed_animations.html',
# Why: Tests various transitions.
'file://tough_animation_cases/transform_transitions.html',
# Why: Login page is slow because of ineffecient transform operations.
'http://ie.microsoft.com/testdrive/performance/robohornetpro/',
# Why: JS execution blocks CSS transition unless initial transform is set.
'file://tough_animation_cases/transform_transition_js_block.html'
]
for url in urls_list_two:
self.AddPage(ToughAnimationCasesPage(url, self,
need_measurement_ready=False))
|
try:
from math import *
except ImportError:
print("SKIP")
import sys
sys.exit()
test_values = [-100., -1.23456, -1, -0.5, 0.0, 0.5, 1.23456, 100.]
test_values_small = [-10., -1.23456, -1, -0.5, 0.0, 0.5, 1.23456, 10.] # so we don't overflow 32-bit precision
p_test_values = [0.1, 0.5, 1.23456]
unit_range_test_values = [-1., -0.75, -0.5, -0.25, 0., 0.25, 0.5, 0.75, 1.]
functions = [('sqrt', sqrt, p_test_values),
('exp', exp, test_values_small),
('expm1', expm1, test_values_small),
('log', log, p_test_values),
('log2', log2, p_test_values),
('log10', log10, p_test_values),
('cosh', cosh, test_values_small),
('sinh', sinh, test_values_small),
('tanh', tanh, test_values_small),
('acosh', acosh, [1.0, 5.0, 1.0]),
('asinh', asinh, test_values),
('atanh', atanh, [-0.99, -0.5, 0.0, 0.5, 0.99]),
('cos', cos, test_values),
('sin', sin, test_values),
('tan', tan, test_values),
('acos', acos, unit_range_test_values),
('asin', asin, unit_range_test_values),
('atan', atan, test_values),
('ceil', ceil, test_values),
('fabs', fabs, test_values),
('floor', floor, test_values),
('trunc', trunc, test_values),
('radians', radians, test_values),
('degrees', degrees, test_values),
]
for function_name, function, test_vals in functions:
print(function_name)
for value in test_vals:
print("{:.5g}".format(function(value)))
tuple_functions = [('frexp', frexp, test_values),
('modf', modf, test_values),
]
for function_name, function, test_vals in tuple_functions:
print(function_name)
for value in test_vals:
x, y = function(value)
print("{:.5g} {:.5g}".format(x, y))
binary_functions = [('copysign', copysign, [(23., 42.), (-23., 42.), (23., -42.),
(-23., -42.), (1., 0.0), (1., -0.0)]),
('pow', pow, ((1., 0.), (0., 1.), (2., 0.5), (-3., 5.), (-3., -4.),)),
('atan2', atan2, ((1., 0.), (0., 1.), (2., 0.5), (-3., 5.), (-3., -4.),)),
('fmod', fmod, ((1., 1.), (0., 1.), (2., 0.5), (-3., 5.), (-3., -4.),)),
('ldexp', ldexp, ((1., 0), (0., 1), (2., 2), (3., -2), (-3., -4),)),
('log', log, ((2., 2.), (3., 2.), (4., 5.))),
]
for function_name, function, test_vals in binary_functions:
print(function_name)
for value1, value2 in test_vals:
print("{:.5g}".format(function(value1, value2)))
|
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.flatpages.models import FlatPage
from django.contrib.sites.models import Site
from django.test import TestCase, modify_settings, override_settings
from django.test.utils import ignore_warnings
from django.utils.deprecation import RemovedInDjango20Warning
from .settings import FLATPAGES_TEMPLATES
class TestDataMixin(object):
@classmethod
def setUpTestData(cls):
# don't use the manager because we want to ensure the site exists
# with pk=1, regardless of whether or not it already exists.
cls.site1 = Site(pk=1, domain='example.com', name='example.com')
cls.site1.save()
cls.fp1 = FlatPage.objects.create(
url='/flatpage/', title='A Flatpage', content="Isn't it flat!",
enable_comments=False, template_name='', registration_required=False
)
cls.fp2 = FlatPage.objects.create(
url='/location/flatpage/', title='A Nested Flatpage', content="Isn't it flat and deep!",
enable_comments=False, template_name='', registration_required=False
)
cls.fp3 = FlatPage.objects.create(
url='/sekrit/', title='Sekrit Flatpage', content="Isn't it sekrit!",
enable_comments=False, template_name='', registration_required=True
)
cls.fp4 = FlatPage.objects.create(
url='/location/sekrit/', title='Sekrit Nested Flatpage', content="Isn't it sekrit and deep!",
enable_comments=False, template_name='', registration_required=True
)
cls.fp1.sites.add(cls.site1)
cls.fp2.sites.add(cls.site1)
cls.fp3.sites.add(cls.site1)
cls.fp4.sites.add(cls.site1)
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.flatpages'})
@override_settings(
LOGIN_URL='/accounts/login/',
MIDDLEWARE=[
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
],
ROOT_URLCONF='flatpages_tests.urls',
TEMPLATES=FLATPAGES_TEMPLATES,
SITE_ID=1,
)
class FlatpageMiddlewareTests(TestDataMixin, TestCase):
def test_view_flatpage(self):
"A flatpage can be served through a view, even when the middleware is in use"
response = self.client.get('/flatpage_root/flatpage/')
self.assertContains(response, "<p>Isn't it flat!</p>")
def test_view_non_existent_flatpage(self):
"A non-existent flatpage raises 404 when served through a view, even when the middleware is in use"
response = self.client.get('/flatpage_root/no_such_flatpage/')
self.assertEqual(response.status_code, 404)
def test_view_authenticated_flatpage(self):
"A flatpage served through a view can require authentication"
response = self.client.get('/flatpage_root/sekrit/')
self.assertRedirects(response, '/accounts/login/?next=/flatpage_root/sekrit/')
user = User.objects.create_user('testuser', 'test@example.com', 's3krit')
self.client.force_login(user)
response = self.client.get('/flatpage_root/sekrit/')
self.assertContains(response, "<p>Isn't it sekrit!</p>")
def test_fallback_flatpage(self):
"A flatpage can be served by the fallback middleware"
response = self.client.get('/flatpage/')
self.assertContains(response, "<p>Isn't it flat!</p>")
def test_fallback_non_existent_flatpage(self):
"A non-existent flatpage raises a 404 when served by the fallback middleware"
response = self.client.get('/no_such_flatpage/')
self.assertEqual(response.status_code, 404)
def test_fallback_authenticated_flatpage(self):
"A flatpage served by the middleware can require authentication"
response = self.client.get('/sekrit/')
self.assertRedirects(response, '/accounts/login/?next=/sekrit/')
user = User.objects.create_user('testuser', 'test@example.com', 's3krit')
self.client.force_login(user)
response = self.client.get('/sekrit/')
self.assertContains(response, "<p>Isn't it sekrit!</p>")
def test_fallback_flatpage_special_chars(self):
"A flatpage with special chars in the URL can be served by the fallback middleware"
fp = FlatPage.objects.create(
url="/some.very_special~chars-here/",
title="A very special page",
content="Isn't it special!",
enable_comments=False,
registration_required=False,
)
fp.sites.add(settings.SITE_ID)
response = self.client.get('/some.very_special~chars-here/')
self.assertContains(response, "<p>Isn't it special!</p>")
@ignore_warnings(category=RemovedInDjango20Warning)
@override_settings(
MIDDLEWARE=None,
MIDDLEWARE_CLASSES=[
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
],
)
class FlatpageMiddlewareClassesTests(FlatpageMiddlewareTests):
pass
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.flatpages'})
@override_settings(
APPEND_SLASH=True,
LOGIN_URL='/accounts/login/',
MIDDLEWARE=[
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
],
ROOT_URLCONF='flatpages_tests.urls',
TEMPLATES=FLATPAGES_TEMPLATES,
SITE_ID=1,
)
class FlatpageMiddlewareAppendSlashTests(TestDataMixin, TestCase):
def test_redirect_view_flatpage(self):
"A flatpage can be served through a view and should add a slash"
response = self.client.get('/flatpage_root/flatpage')
self.assertRedirects(response, '/flatpage_root/flatpage/', status_code=301)
def test_redirect_view_non_existent_flatpage(self):
"A non-existent flatpage raises 404 when served through a view and should not add a slash"
response = self.client.get('/flatpage_root/no_such_flatpage')
self.assertEqual(response.status_code, 404)
def test_redirect_fallback_flatpage(self):
"A flatpage can be served by the fallback middleware and should add a slash"
response = self.client.get('/flatpage')
self.assertRedirects(response, '/flatpage/', status_code=301)
def test_redirect_fallback_non_existent_flatpage(self):
"A non-existent flatpage raises a 404 when served by the fallback middleware and should not add a slash"
response = self.client.get('/no_such_flatpage')
self.assertEqual(response.status_code, 404)
def test_redirect_fallback_flatpage_special_chars(self):
"A flatpage with special chars in the URL can be served by the fallback middleware and should add a slash"
fp = FlatPage.objects.create(
url="/some.very_special~chars-here/",
title="A very special page",
content="Isn't it special!",
enable_comments=False,
registration_required=False,
)
fp.sites.add(settings.SITE_ID)
response = self.client.get('/some.very_special~chars-here')
self.assertRedirects(response, '/some.very_special~chars-here/', status_code=301)
def test_redirect_fallback_flatpage_root(self):
"A flatpage at / should not cause a redirect loop when APPEND_SLASH is set"
fp = FlatPage.objects.create(
url="/",
title="Root",
content="Root",
enable_comments=False,
registration_required=False,
)
fp.sites.add(settings.SITE_ID)
response = self.client.get('/')
self.assertContains(response, "<p>Root</p>")
@ignore_warnings(category=RemovedInDjango20Warning)
@override_settings(
MIDDLEWARE=None,
MIDDLEWARE_CLASSES=[
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
],
)
class FlatpageAppendSlashMiddlewareClassesTests(FlatpageMiddlewareAppendSlashTests):
pass
|
import re
from collections import namedtuple
from datetime import datetime
from enum import Enum, auto
from functools import reduce
from typing import Iterable, Iterator, List, Optional, Tuple
TestResult = namedtuple('TestResult', ['status','suites','log'])
class TestSuite(object):
def __init__(self) -> None:
self.status = TestStatus.SUCCESS
self.name = ''
self.cases = [] # type: List[TestCase]
def __str__(self) -> str:
return 'TestSuite(' + str(self.status) + ',' + self.name + ',' + str(self.cases) + ')'
def __repr__(self) -> str:
return str(self)
class TestCase(object):
def __init__(self) -> None:
self.status = TestStatus.SUCCESS
self.name = ''
self.log = [] # type: List[str]
def __str__(self) -> str:
return 'TestCase(' + str(self.status) + ',' + self.name + ',' + str(self.log) + ')'
def __repr__(self) -> str:
return str(self)
class TestStatus(Enum):
SUCCESS = auto()
FAILURE = auto()
SKIPPED = auto()
TEST_CRASHED = auto()
NO_TESTS = auto()
FAILURE_TO_PARSE_TESTS = auto()
class LineStream:
"""Provides a peek()/pop() interface over an iterator of (line#, text)."""
_lines: Iterator[Tuple[int, str]]
_next: Tuple[int, str]
_done: bool
def __init__(self, lines: Iterator[Tuple[int, str]]):
self._lines = lines
self._done = False
self._next = (0, '')
self._get_next()
def _get_next(self) -> None:
try:
self._next = next(self._lines)
except StopIteration:
self._done = True
def peek(self) -> str:
return self._next[1]
def pop(self) -> str:
n = self._next
self._get_next()
return n[1]
def __bool__(self) -> bool:
return not self._done
# Only used by kunit_tool_test.py.
def __iter__(self) -> Iterator[str]:
while bool(self):
yield self.pop()
def line_number(self) -> int:
return self._next[0]
kunit_start_re = re.compile(r'TAP version [0-9]+$')
kunit_end_re = re.compile('(List of all partitions:|'
'Kernel panic - not syncing: VFS:|reboot: System halted)')
def extract_tap_lines(kernel_output: Iterable[str]) -> LineStream:
def isolate_kunit_output(kernel_output: Iterable[str]) -> Iterator[Tuple[int, str]]:
line_num = 0
started = False
for line in kernel_output:
line_num += 1
line = line.rstrip() # line always has a trailing \n
if kunit_start_re.search(line):
prefix_len = len(line.split('TAP version')[0])
started = True
yield line_num, line[prefix_len:]
elif kunit_end_re.search(line):
break
elif started:
yield line_num, line[prefix_len:]
return LineStream(lines=isolate_kunit_output(kernel_output))
DIVIDER = '=' * 60
RESET = '\033[0;0m'
def red(text) -> str:
return '\033[1;31m' + text + RESET
def yellow(text) -> str:
return '\033[1;33m' + text + RESET
def green(text) -> str:
return '\033[1;32m' + text + RESET
def print_with_timestamp(message) -> None:
print('[%s] %s' % (datetime.now().strftime('%H:%M:%S'), message))
def format_suite_divider(message) -> str:
return '======== ' + message + ' ========'
def print_suite_divider(message) -> None:
print_with_timestamp(DIVIDER)
print_with_timestamp(format_suite_divider(message))
def print_log(log) -> None:
for m in log:
print_with_timestamp(m)
TAP_ENTRIES = re.compile(r'^(TAP|[\s]*ok|[\s]*not ok|[\s]*[0-9]+\.\.[0-9]+|[\s]*# (Subtest:|.*: kunit test case crashed!)).*$')
def consume_non_diagnostic(lines: LineStream) -> None:
while lines and not TAP_ENTRIES.match(lines.peek()):
lines.pop()
def save_non_diagnostic(lines: LineStream, test_case: TestCase) -> None:
while lines and not TAP_ENTRIES.match(lines.peek()):
test_case.log.append(lines.peek())
lines.pop()
OkNotOkResult = namedtuple('OkNotOkResult', ['is_ok','description', 'text'])
OK_NOT_OK_SKIP = re.compile(r'^[\s]*(ok|not ok) [0-9]+ - (.*) # SKIP(.*)$')
OK_NOT_OK_SUBTEST = re.compile(r'^[\s]+(ok|not ok) [0-9]+ - (.*)$')
OK_NOT_OK_MODULE = re.compile(r'^(ok|not ok) ([0-9]+) - (.*)$')
def parse_ok_not_ok_test_case(lines: LineStream, test_case: TestCase) -> bool:
save_non_diagnostic(lines, test_case)
if not lines:
test_case.status = TestStatus.TEST_CRASHED
return True
line = lines.peek()
match = OK_NOT_OK_SUBTEST.match(line)
while not match and lines:
line = lines.pop()
match = OK_NOT_OK_SUBTEST.match(line)
if match:
test_case.log.append(lines.pop())
test_case.name = match.group(2)
skip_match = OK_NOT_OK_SKIP.match(line)
if skip_match:
test_case.status = TestStatus.SKIPPED
return True
if test_case.status == TestStatus.TEST_CRASHED:
return True
if match.group(1) == 'ok':
test_case.status = TestStatus.SUCCESS
else:
test_case.status = TestStatus.FAILURE
return True
else:
return False
SUBTEST_DIAGNOSTIC = re.compile(r'^[\s]+# (.*)$')
DIAGNOSTIC_CRASH_MESSAGE = re.compile(r'^[\s]+# .*?: kunit test case crashed!$')
def parse_diagnostic(lines: LineStream, test_case: TestCase) -> bool:
save_non_diagnostic(lines, test_case)
if not lines:
return False
line = lines.peek()
match = SUBTEST_DIAGNOSTIC.match(line)
if match:
test_case.log.append(lines.pop())
crash_match = DIAGNOSTIC_CRASH_MESSAGE.match(line)
if crash_match:
test_case.status = TestStatus.TEST_CRASHED
return True
else:
return False
def parse_test_case(lines: LineStream) -> Optional[TestCase]:
test_case = TestCase()
save_non_diagnostic(lines, test_case)
while parse_diagnostic(lines, test_case):
pass
if parse_ok_not_ok_test_case(lines, test_case):
return test_case
else:
return None
SUBTEST_HEADER = re.compile(r'^[\s]+# Subtest: (.*)$')
def parse_subtest_header(lines: LineStream) -> Optional[str]:
consume_non_diagnostic(lines)
if not lines:
return None
match = SUBTEST_HEADER.match(lines.peek())
if match:
lines.pop()
return match.group(1)
else:
return None
SUBTEST_PLAN = re.compile(r'[\s]+[0-9]+\.\.([0-9]+)')
def parse_subtest_plan(lines: LineStream) -> Optional[int]:
consume_non_diagnostic(lines)
match = SUBTEST_PLAN.match(lines.peek())
if match:
lines.pop()
return int(match.group(1))
else:
return None
def max_status(left: TestStatus, right: TestStatus) -> TestStatus:
if left == right:
return left
elif left == TestStatus.TEST_CRASHED or right == TestStatus.TEST_CRASHED:
return TestStatus.TEST_CRASHED
elif left == TestStatus.FAILURE or right == TestStatus.FAILURE:
return TestStatus.FAILURE
elif left == TestStatus.SKIPPED:
return right
else:
return left
def parse_ok_not_ok_test_suite(lines: LineStream,
test_suite: TestSuite,
expected_suite_index: int) -> bool:
consume_non_diagnostic(lines)
if not lines:
test_suite.status = TestStatus.TEST_CRASHED
return False
line = lines.peek()
match = OK_NOT_OK_MODULE.match(line)
if match:
lines.pop()
if match.group(1) == 'ok':
test_suite.status = TestStatus.SUCCESS
else:
test_suite.status = TestStatus.FAILURE
skip_match = OK_NOT_OK_SKIP.match(line)
if skip_match:
test_suite.status = TestStatus.SKIPPED
suite_index = int(match.group(2))
if suite_index != expected_suite_index:
print_with_timestamp(
red('[ERROR] ') + 'expected_suite_index ' +
str(expected_suite_index) + ', but got ' +
str(suite_index))
return True
else:
return False
def bubble_up_errors(status_list: Iterable[TestStatus]) -> TestStatus:
return reduce(max_status, status_list, TestStatus.SKIPPED)
def bubble_up_test_case_errors(test_suite: TestSuite) -> TestStatus:
max_test_case_status = bubble_up_errors(x.status for x in test_suite.cases)
return max_status(max_test_case_status, test_suite.status)
def parse_test_suite(lines: LineStream, expected_suite_index: int) -> Optional[TestSuite]:
if not lines:
return None
consume_non_diagnostic(lines)
test_suite = TestSuite()
test_suite.status = TestStatus.SUCCESS
name = parse_subtest_header(lines)
if not name:
return None
test_suite.name = name
expected_test_case_num = parse_subtest_plan(lines)
if expected_test_case_num is None:
return None
while expected_test_case_num > 0:
test_case = parse_test_case(lines)
if not test_case:
break
test_suite.cases.append(test_case)
expected_test_case_num -= 1
if parse_ok_not_ok_test_suite(lines, test_suite, expected_suite_index):
test_suite.status = bubble_up_test_case_errors(test_suite)
return test_suite
elif not lines:
print_with_timestamp(red('[ERROR] ') + 'ran out of lines before end token')
return test_suite
else:
print(f'failed to parse end of suite "{name}", at line {lines.line_number()}: {lines.peek()}')
return None
TAP_HEADER = re.compile(r'^TAP version 14$')
def parse_tap_header(lines: LineStream) -> bool:
consume_non_diagnostic(lines)
if TAP_HEADER.match(lines.peek()):
lines.pop()
return True
else:
return False
TEST_PLAN = re.compile(r'[0-9]+\.\.([0-9]+)')
def parse_test_plan(lines: LineStream) -> Optional[int]:
consume_non_diagnostic(lines)
match = TEST_PLAN.match(lines.peek())
if match:
lines.pop()
return int(match.group(1))
else:
return None
def bubble_up_suite_errors(test_suites: Iterable[TestSuite]) -> TestStatus:
return bubble_up_errors(x.status for x in test_suites)
def parse_test_result(lines: LineStream) -> TestResult:
consume_non_diagnostic(lines)
if not lines or not parse_tap_header(lines):
return TestResult(TestStatus.FAILURE_TO_PARSE_TESTS, [], lines)
expected_test_suite_num = parse_test_plan(lines)
if expected_test_suite_num == 0:
return TestResult(TestStatus.NO_TESTS, [], lines)
elif expected_test_suite_num is None:
return TestResult(TestStatus.FAILURE_TO_PARSE_TESTS, [], lines)
test_suites = []
for i in range(1, expected_test_suite_num + 1):
test_suite = parse_test_suite(lines, i)
if test_suite:
test_suites.append(test_suite)
else:
print_with_timestamp(
red('[ERROR] ') + ' expected ' +
str(expected_test_suite_num) +
' test suites, but got ' + str(i - 2))
break
test_suite = parse_test_suite(lines, -1)
if test_suite:
print_with_timestamp(red('[ERROR] ') +
'got unexpected test suite: ' + test_suite.name)
if test_suites:
return TestResult(bubble_up_suite_errors(test_suites), test_suites, lines)
else:
return TestResult(TestStatus.NO_TESTS, [], lines)
class TestCounts:
passed: int
failed: int
crashed: int
skipped: int
def __init__(self):
self.passed = 0
self.failed = 0
self.crashed = 0
self.skipped = 0
def total(self) -> int:
return self.passed + self.failed + self.crashed + self.skipped
def print_and_count_results(test_result: TestResult) -> TestCounts:
counts = TestCounts()
for test_suite in test_result.suites:
if test_suite.status == TestStatus.SUCCESS:
print_suite_divider(green('[PASSED] ') + test_suite.name)
elif test_suite.status == TestStatus.SKIPPED:
print_suite_divider(yellow('[SKIPPED] ') + test_suite.name)
elif test_suite.status == TestStatus.TEST_CRASHED:
print_suite_divider(red('[CRASHED] ' + test_suite.name))
else:
print_suite_divider(red('[FAILED] ') + test_suite.name)
for test_case in test_suite.cases:
if test_case.status == TestStatus.SUCCESS:
counts.passed += 1
print_with_timestamp(green('[PASSED] ') + test_case.name)
elif test_case.status == TestStatus.SKIPPED:
counts.skipped += 1
print_with_timestamp(yellow('[SKIPPED] ') + test_case.name)
elif test_case.status == TestStatus.TEST_CRASHED:
counts.crashed += 1
print_with_timestamp(red('[CRASHED] ' + test_case.name))
print_log(map(yellow, test_case.log))
print_with_timestamp('')
else:
counts.failed += 1
print_with_timestamp(red('[FAILED] ') + test_case.name)
print_log(map(yellow, test_case.log))
print_with_timestamp('')
return counts
def parse_run_tests(kernel_output: Iterable[str]) -> TestResult:
counts = TestCounts()
lines = extract_tap_lines(kernel_output)
test_result = parse_test_result(lines)
if test_result.status == TestStatus.NO_TESTS:
print(red('[ERROR] ') + yellow('no tests run!'))
elif test_result.status == TestStatus.FAILURE_TO_PARSE_TESTS:
print(red('[ERROR] ') + yellow('could not parse test results!'))
else:
counts = print_and_count_results(test_result)
print_with_timestamp(DIVIDER)
if test_result.status == TestStatus.SUCCESS:
fmt = green
elif test_result.status == TestStatus.SKIPPED:
fmt = yellow
else:
fmt =red
print_with_timestamp(
fmt('Testing complete. %d tests run. %d failed. %d crashed. %d skipped.' %
(counts.total(), counts.failed, counts.crashed, counts.skipped)))
return test_result
|
from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^static/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.STATIC_ROOT,
'show_indexes': True,
}),
)
|
"""CGI test 2 - basic use of cgi module."""
import cgitb; cgitb.enable()
import cgi
def main():
form = cgi.FieldStorage()
print "Content-type: text/html"
print
if not form:
print "<h1>No Form Keys</h1>"
else:
print "<h1>Form Keys</h1>"
for key in form.keys():
value = form[key].value
print "<p>", cgi.escape(key), ":", cgi.escape(value)
if __name__ == "__main__":
main()
|
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_asm_policy
short_description: Manage BIG-IP ASM policies
description:
- Manage BIG-IP ASM policies.
version_added: 2.5
deprecated:
removed_in: '2.12'
alternative: bigip_asm_policy_manage
why: >
The bigip_asm_policy module has been split into three new modules to handle import, export and general policy
management. This will allow scalability of the asm policy management as well as ease of maintenance.
Additionally to further reduce the burden of having multiple smaller module F5 has created asm_policy
role in Ansible Galaxy for a more declarative way of ASM policy management.
options:
active:
description:
- If C(yes) will apply and activate existing inactive policy. If C(no), it will
deactivate existing active policy. Generally should be C(yes) only in cases where
you want to activate new or existing policy.
default: no
type: bool
name:
description:
- The ASM policy to manage or create.
required: True
state:
description:
- When C(state) is C(present), and C(file) or C(template) parameter is provided,
new ASM policy is imported and created with the given C(name).
- When C(state) is present and no C(file) or C(template) parameter is provided
new blank ASM policy is created with the given C(name).
- When C(state) is C(absent), ensures that the policy is removed, even if it is
currently active.
choices:
- present
- absent
default: present
file:
description:
- Full path to a policy file to be imported into the BIG-IP ASM.
- Policy files exported from newer versions of BIG-IP cannot be imported into older
versions of BIG-IP. The opposite, however, is true; you can import older into
newer.
template:
description:
- An ASM policy built-in template. If the template does not exist we will raise an error.
- Once the policy has been created, this value cannot change.
- The C(Comprehensive), C(Drupal), C(Fundamental), C(Joomla),
C(Vulnerability Assessment Baseline), and C(Wordpress) templates are only available
on BIG-IP versions >= 13.
choices:
- ActiveSync v1.0 v2.0 (http)
- ActiveSync v1.0 v2.0 (https)
- Comprehensive
- Drupal
- Fundamental
- Joomla
- LotusDomino 6.5 (http)
- LotusDomino 6.5 (https)
- OWA Exchange 2003 (http)
- OWA Exchange 2003 (https)
- OWA Exchange 2003 with ActiveSync (http)
- OWA Exchange 2003 with ActiveSync (https)
- OWA Exchange 2007 (http)
- OWA Exchange 2007 (https)
- OWA Exchange 2007 with ActiveSync (http)
- OWA Exchange 2007 with ActiveSync (https)
- OWA Exchange 2010 (http)
- OWA Exchange 2010 (https)
- Oracle 10g Portal (http)
- Oracle 10g Portal (https)
- Oracle Applications 11i (http)
- Oracle Applications 11i (https)
- PeopleSoft Portal 9 (http)
- PeopleSoft Portal 9 (https)
- Rapid Deployment Policy
- SAP NetWeaver 7 (http)
- SAP NetWeaver 7 (https)
- SharePoint 2003 (http)
- SharePoint 2003 (https)
- SharePoint 2007 (http)
- SharePoint 2007 (https)
- SharePoint 2010 (http)
- SharePoint 2010 (https)
- Vulnerability Assessment Baseline
- Wordpress
partition:
description:
- Device partition to manage resources on.
default: Common
extends_documentation_fragment: f5
author:
- Wojciech Wypior (@wojtek0806)
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Import and activate ASM policy
bigip_asm_policy:
name: new_asm_policy
file: /root/asm_policy.xml
active: yes
state: present
provider:
server: lb.mydomain.com
user: admin
password: secret
delegate_to: localhost
- name: Import ASM policy from template
bigip_asm_policy:
name: new_sharepoint_policy
template: SharePoint 2007 (http)
state: present
provider:
server: lb.mydomain.com
user: admin
password: secret
delegate_to: localhost
- name: Create blank ASM policy
bigip_asm_policy:
name: new_blank_policy
state: present
provider:
server: lb.mydomain.com
user: admin
password: secret
delegate_to: localhost
- name: Create blank ASM policy and activate
bigip_asm_policy:
name: new_blank_policy
active: yes
state: present
provider:
server: lb.mydomain.com
user: admin
password: secret
delegate_to: localhost
- name: Activate ASM policy
bigip_asm_policy:
name: inactive_policy
active: yes
state: present
provider:
server: lb.mydomain.com
user: admin
password: secret
delegate_to: localhost
- name: Deactivate ASM policy
bigip_asm_policy:
name: active_policy
state: present
provider:
server: lb.mydomain.com
user: admin
password: secret
delegate_to: localhost
- name: Import and activate ASM policy in Role
bigip_asm_policy:
name: new_asm_policy
file: "{{ role_path }}/files/asm_policy.xml"
active: yes
state: present
provider:
server: lb.mydomain.com
user: admin
password: secret
delegate_to: localhost
- name: Import ASM binary policy
bigip_asm_policy:
name: new_asm_policy
file: "/root/asm_policy.plc"
active: yes
state: present
provider:
server: lb.mydomain.com
user: admin
password: secret
delegate_to: localhost
'''
RETURN = r'''
active:
description: Set when activating/deactivating ASM policy
returned: changed
type: bool
sample: yes
state:
description: Action performed on the target device.
returned: changed
type: str
sample: absent
file:
description: Local path to ASM policy file.
returned: changed
type: str
sample: /root/some_policy.xml
template:
description: Name of the built-in ASM policy template
returned: changed
type: str
sample: OWA Exchange 2007 (https)
name:
description: Name of the ASM policy to be managed/created
returned: changed
type: str
sample: Asm_APP1_Transparent
'''
import os
import time
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
from distutils.version import LooseVersion
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.icontrol import upload_file
from library.module_utils.network.f5.icontrol import tmos_version
from library.module_utils.network.f5.icontrol import module_provisioned
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.icontrol import upload_file
from ansible.module_utils.network.f5.icontrol import tmos_version
from ansible.module_utils.network.f5.icontrol import module_provisioned
class Parameters(AnsibleF5Parameters):
updatables = [
'active',
]
returnables = [
'name',
'template',
'file',
'active',
]
api_attributes = [
'name',
'file',
'active',
]
api_map = {
'filename': 'file',
}
@property
def template_link(self):
if self._values['template_link'] is not None:
return self._values['template_link']
collection = self._templates_from_device()
for resource in collection['items']:
if resource['name'] == self.template.upper():
return dict(link=resource['selfLink'])
return None
@property
def full_path(self):
return fq_name(self.name)
def _templates_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/asm/policy-templates/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return response
def to_return(self):
result = {}
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
return result
class V1Parameters(Parameters):
@property
def template(self):
if self._values['template'] is None:
return None
template_map = {
'ActiveSync v1.0 v2.0 (http)': 'POLICY_TEMPLATE_ACTIVESYNC_V1_0_V2_0_HTTP',
'ActiveSync v1.0 v2.0 (https)': 'POLICY_TEMPLATE_ACTIVESYNC_V1_0_V2_0_HTTPS',
'LotusDomino 6.5 (http)': 'POLICY_TEMPLATE_LOTUSDOMINO_6_5_HTTP',
'LotusDomino 6.5 (https)': 'POLICY_TEMPLATE_LOTUSDOMINO_6_5_HTTPS',
'OWA Exchange 2003 (http)': 'POLICY_TEMPLATE_OWA_EXCHANGE_2003_HTTP',
'OWA Exchange 2003 (https)': 'POLICY_TEMPLATE_OWA_EXCHANGE_2003_HTTPS',
'OWA Exchange 2003 with ActiveSync (http)': 'POLICY_TEMPLATE_OWA_EXCHANGE_2003_WITH_ACTIVESYNC_HTTP',
'OWA Exchange 2003 with ActiveSync (https)': 'POLICY_TEMPLATE_OWA_EXCHANGE_2003_WITH_ACTIVESYNC_HTTPS',
'OWA Exchange 2007 (http)': 'POLICY_TEMPLATE_OWA_EXCHANGE_2007_HTTP',
'OWA Exchange 2007 (https)': 'POLICY_TEMPLATE_OWA_EXCHANGE_2007_HTTPS',
'OWA Exchange 2007 with ActiveSync (http)': 'POLICY_TEMPLATE_OWA_EXCHANGE_2007_WITH_ACTIVESYNC_HTTP',
'OWA Exchange 2007 with ActiveSync (https)': 'POLICY_TEMPLATE_OWA_EXCHANGE_2007_WITH_ACTIVESYNC_HTTPS',
'OWA Exchange 2010 (http)': 'POLICY_TEMPLATE_OWA_EXCHANGE_2010_HTTP',
'OWA Exchange 2010 (https)': 'POLICY_TEMPLATE_OWA_EXCHANGE_2010_HTTPS',
'Oracle 10g Portal (http)': 'POLICY_TEMPLATE_ORACLE_10G_PORTAL_HTTP',
'Oracle 10g Portal (https)': 'POLICY_TEMPLATE_ORACLE_10G_PORTAL_HTTPS',
'Oracle Applications 11i (http)': 'POLICY_TEMPLATE_ORACLE_APPLICATIONS_11I_HTTP',
'Oracle Applications 11i (https)': 'POLICY_TEMPLATE_ORACLE_APPLICATIONS_11I_HTTPS',
'PeopleSoft Portal 9 (http)': 'POLICY_TEMPLATE_PEOPLESOFT_PORTAL_9_HTTP',
'PeopleSoft Portal 9 (https)': 'POLICY_TEMPLATE_PEOPLESOFT_PORTAL_9_HTTPS',
'Rapid Deployment Policy': 'POLICY_TEMPLATE_RAPID_DEPLOYMENT',
'SAP NetWeaver 7 (http)': 'POLICY_TEMPLATE_SAP_NETWEAVER_7_HTTP',
'SAP NetWeaver 7 (https)': 'POLICY_TEMPLATE_SAP_NETWEAVER_7_HTTPS',
'SharePoint 2003 (http)': 'POLICY_TEMPLATE_SHAREPOINT_2003_HTTP',
'SharePoint 2003 (https)': 'POLICY_TEMPLATE_SHAREPOINT_2003_HTTPS',
'SharePoint 2007 (http)': 'POLICY_TEMPLATE_SHAREPOINT_2007_HTTP',
'SharePoint 2007 (https)': 'POLICY_TEMPLATE_SHAREPOINT_2007_HTTPS',
'SharePoint 2010 (http)': 'POLICY_TEMPLATE_SHAREPOINT_2010_HTTP',
'SharePoint 2010 (https)': 'POLICY_TEMPLATE_SHAREPOINT_2010_HTTPS'
}
if self._values['template'] in template_map:
return template_map[self._values['template']]
else:
raise F5ModuleError(
"The specified template is not valid for this version of BIG-IP."
)
class V2Parameters(Parameters):
@property
def template(self):
if self._values['template'] is None:
return None
template_map = {
'ActiveSync v1.0 v2.0 (http)': 'POLICY_TEMPLATE_ACTIVESYNC_V1_0_V2_0_HTTP',
'ActiveSync v1.0 v2.0 (https)': 'POLICY_TEMPLATE_ACTIVESYNC_V1_0_V2_0_HTTPS',
'Comprehensive': 'POLICY_TEMPLATE_COMPREHENSIVE', # v13
'Drupal': 'POLICY_TEMPLATE_DRUPAL', # v13
'Fundamental': 'POLICY_TEMPLATE_FUNDAMENTAL', # v13
'Joomla': 'POLICY_TEMPLATE_JOOMLA', # v13
'LotusDomino 6.5 (http)': 'POLICY_TEMPLATE_LOTUSDOMINO_6_5_HTTP',
'LotusDomino 6.5 (https)': 'POLICY_TEMPLATE_LOTUSDOMINO_6_5_HTTPS',
'OWA Exchange 2003 (http)': 'POLICY_TEMPLATE_OWA_EXCHANGE_2003_HTTP',
'OWA Exchange 2003 (https)': 'POLICY_TEMPLATE_OWA_EXCHANGE_2003_HTTPS',
'OWA Exchange 2003 with ActiveSync (http)': 'POLICY_TEMPLATE_OWA_EXCHANGE_2003_WITH_ACTIVESYNC_HTTP',
'OWA Exchange 2003 with ActiveSync (https)': 'POLICY_TEMPLATE_OWA_EXCHANGE_2003_WITH_ACTIVESYNC_HTTPS',
'OWA Exchange 2007 (http)': 'POLICY_TEMPLATE_OWA_EXCHANGE_2007_HTTP',
'OWA Exchange 2007 (https)': 'POLICY_TEMPLATE_OWA_EXCHANGE_2007_HTTPS',
'OWA Exchange 2007 with ActiveSync (http)': 'POLICY_TEMPLATE_OWA_EXCHANGE_2007_WITH_ACTIVESYNC_HTTP',
'OWA Exchange 2007 with ActiveSync (https)': 'POLICY_TEMPLATE_OWA_EXCHANGE_2007_WITH_ACTIVESYNC_HTTPS',
'OWA Exchange 2010 (http)': 'POLICY_TEMPLATE_OWA_EXCHANGE_2010_HTTP',
'OWA Exchange 2010 (https)': 'POLICY_TEMPLATE_OWA_EXCHANGE_2010_HTTPS',
'Oracle 10g Portal (http)': 'POLICY_TEMPLATE_ORACLE_10G_PORTAL_HTTP',
'Oracle 10g Portal (https)': 'POLICY_TEMPLATE_ORACLE_10G_PORTAL_HTTPS',
'Oracle Applications 11i (http)': 'POLICY_TEMPLATE_ORACLE_APPLICATIONS_11I_HTTP',
'Oracle Applications 11i (https)': 'POLICY_TEMPLATE_ORACLE_APPLICATIONS_11I_HTTPS',
'PeopleSoft Portal 9 (http)': 'POLICY_TEMPLATE_PEOPLESOFT_PORTAL_9_HTTP',
'PeopleSoft Portal 9 (https)': 'POLICY_TEMPLATE_PEOPLESOFT_PORTAL_9_HTTPS',
'Rapid Deployment Policy': 'POLICY_TEMPLATE_RAPID_DEPLOYMENT',
'SAP NetWeaver 7 (http)': 'POLICY_TEMPLATE_SAP_NETWEAVER_7_HTTP',
'SAP NetWeaver 7 (https)': 'POLICY_TEMPLATE_SAP_NETWEAVER_7_HTTPS',
'SharePoint 2003 (http)': 'POLICY_TEMPLATE_SHAREPOINT_2003_HTTP',
'SharePoint 2003 (https)': 'POLICY_TEMPLATE_SHAREPOINT_2003_HTTPS',
'SharePoint 2007 (http)': 'POLICY_TEMPLATE_SHAREPOINT_2007_HTTP',
'SharePoint 2007 (https)': 'POLICY_TEMPLATE_SHAREPOINT_2007_HTTPS',
'SharePoint 2010 (http)': 'POLICY_TEMPLATE_SHAREPOINT_2010_HTTP',
'SharePoint 2010 (https)': 'POLICY_TEMPLATE_SHAREPOINT_2010_HTTPS',
'Vulnerability Assessment Baseline': 'POLICY_TEMPLATE_VULNERABILITY_ASSESSMENT', # v13
'Wordpress': 'POLICY_TEMPLATE_WORDPRESS' # v13
}
return template_map[self._values['template']]
class Changes(Parameters):
@property
def template(self):
if self._values['template'] is None:
return None
template_map = {
'POLICY_TEMPLATE_ACTIVESYNC_V1_0_V2_0_HTTP': 'ActiveSync v1.0 v2.0 (http)',
'POLICY_TEMPLATE_ACTIVESYNC_V1_0_V2_0_HTTPS': 'ActiveSync v1.0 v2.0 (https)',
'POLICY_TEMPLATE_COMPREHENSIVE': 'Comprehensive',
'POLICY_TEMPLATE_DRUPAL': 'Drupal',
'POLICY_TEMPLATE_FUNDAMENTAL': 'Fundamental',
'POLICY_TEMPLATE_JOOMLA': 'Joomla',
'POLICY_TEMPLATE_LOTUSDOMINO_6_5_HTTP': 'LotusDomino 6.5 (http)',
'POLICY_TEMPLATE_LOTUSDOMINO_6_5_HTTPS': 'LotusDomino 6.5 (https)',
'POLICY_TEMPLATE_OWA_EXCHANGE_2003_HTTP': 'OWA Exchange 2003 (http)',
'POLICY_TEMPLATE_OWA_EXCHANGE_2003_HTTPS': 'OWA Exchange 2003 (https)',
'POLICY_TEMPLATE_OWA_EXCHANGE_2003_WITH_ACTIVESYNC_HTTP': 'OWA Exchange 2003 with ActiveSync (http)',
'POLICY_TEMPLATE_OWA_EXCHANGE_2003_WITH_ACTIVESYNC_HTTPS': 'OWA Exchange 2003 with ActiveSync (https)',
'POLICY_TEMPLATE_OWA_EXCHANGE_2007_HTTP': 'OWA Exchange 2007 (http)',
'POLICY_TEMPLATE_OWA_EXCHANGE_2007_HTTPS': 'OWA Exchange 2007 (https)',
'POLICY_TEMPLATE_OWA_EXCHANGE_2007_WITH_ACTIVESYNC_HTTP': 'OWA Exchange 2007 with ActiveSync (http)',
'POLICY_TEMPLATE_OWA_EXCHANGE_2007_WITH_ACTIVESYNC_HTTPS': 'OWA Exchange 2007 with ActiveSync (https)',
'POLICY_TEMPLATE_OWA_EXCHANGE_2010_HTTP': 'OWA Exchange 2010 (http)',
'POLICY_TEMPLATE_OWA_EXCHANGE_2010_HTTPS': 'OWA Exchange 2010 (https)',
'POLICY_TEMPLATE_ORACLE_10G_PORTAL_HTTP': 'Oracle 10g Portal (http)',
'POLICY_TEMPLATE_ORACLE_10G_PORTAL_HTTPS': 'Oracle 10g Portal (https)',
'POLICY_TEMPLATE_ORACLE_APPLICATIONS_11I_HTTP': 'Oracle Applications 11i (http)',
'POLICY_TEMPLATE_ORACLE_APPLICATIONS_11I_HTTPS': 'Oracle Applications 11i (https)',
'POLICY_TEMPLATE_PEOPLESOFT_PORTAL_9_HTTP': 'PeopleSoft Portal 9 (http)',
'POLICY_TEMPLATE_PEOPLESOFT_PORTAL_9_HTTPS': 'PeopleSoft Portal 9 (https)',
'POLICY_TEMPLATE_RAPID_DEPLOYMENT': 'Rapid Deployment Policy',
'POLICY_TEMPLATE_SAP_NETWEAVER_7_HTTP': 'SAP NetWeaver 7 (http)',
'POLICY_TEMPLATE_SAP_NETWEAVER_7_HTTPS': 'SAP NetWeaver 7 (https)',
'POLICY_TEMPLATE_SHAREPOINT_2003_HTTP': 'SharePoint 2003 (http)',
'POLICY_TEMPLATE_SHAREPOINT_2003_HTTPS': 'SharePoint 2003 (https)',
'POLICY_TEMPLATE_SHAREPOINT_2007_HTTP': 'SharePoint 2007 (http)',
'POLICY_TEMPLATE_SHAREPOINT_2007_HTTPS': 'SharePoint 2007 (https)',
'POLICY_TEMPLATE_SHAREPOINT_2010_HTTP': 'SharePoint 2010 (http)',
'POLICY_TEMPLATE_SHAREPOINT_2010_HTTPS': 'SharePoint 2010 (https)',
'POLICY_TEMPLATE_VULNERABILITY_ASSESSMENT': 'Vulnerability Assessment Baseline',
'POLICY_TEMPLATE_WORDPRESS': 'Wordpress',
}
return template_map[self._values['template']]
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def active(self):
if self.want.active is True and self.have.active is False:
return True
if self.want.active is False and self.have.active is True:
return False
class BaseManager(object):
def __init__(self, *args, **kwargs):
self.client = kwargs.get('client', None)
self.module = kwargs.get('module', None)
self.have = None
self.changes = Changes()
def exec_module(self):
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
changes = self.changes.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = Changes(params=changed)
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = Changes(params=changed)
return True
return False
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def absent(self):
if not self.exists():
return False
else:
return self.remove()
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/asm/policies/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if any(p['name'] == self.want.name and p['partition'] == self.want.partition for p in response['items']):
return True
return False
def _file_is_missing(self):
if self.want.template and self.want.file is None:
return False
if self.want.template is None and self.want.file is None:
return False
if not os.path.exists(self.want.file):
return True
return False
def create(self):
if self.want.active is None:
self.want.update(dict(active=False))
if self._file_is_missing():
raise F5ModuleError(
"The specified ASM policy file does not exist"
)
self._set_changed_options()
if self.module.check_mode:
return True
if self.want.template is None and self.want.file is None:
self.create_blank()
else:
if self.want.template is not None:
self.create_from_template()
elif self.want.file is not None:
self.create_from_file()
if self.want.active:
self.activate()
return True
else:
return True
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
if self.changes.active:
self.activate()
return True
def activate(self):
self.have = self.read_current_from_device()
task_id = self.apply_on_device()
if self.wait_for_task(task_id, 'apply'):
return True
else:
raise F5ModuleError('Apply policy task failed.')
def wait_for_task(self, task_id, task):
uri = ''
if task == 'apply':
uri = "https://{0}:{1}/mgmt/tm/asm/tasks/apply-policy/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
task_id
)
elif task == 'import':
uri = "https://{0}:{1}/mgmt/tm/asm/tasks/import-policy/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
task_id
)
while True:
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
if response['status'] in ['COMPLETED', 'FAILURE']:
break
time.sleep(1)
if response['status'] == 'FAILURE':
return False
if response['status'] == 'COMPLETED':
return True
def _get_policy_id(self):
name = self.want.name
partition = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/asm/policies/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
policy_id = next(
(p['id'] for p in response['items'] if p['name'] == name and p['partition'] == partition), None
)
if not policy_id:
raise F5ModuleError("The policy was not found")
return policy_id
def update_on_device(self):
params = self.changes.api_params()
policy_id = self._get_policy_id()
uri = "https://{0}:{1}/mgmt/tm/asm/policies/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
policy_id
)
if not params['active']:
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def create_blank(self):
self.create_on_device()
if self.exists():
return True
else:
raise F5ModuleError(
'Failed to create ASM policy: {0}'.format(self.want.name)
)
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError(
'Failed to delete ASM policy: {0}'.format(self.want.name)
)
return True
def is_activated(self):
if self.want.active is True:
return True
else:
return False
def read_current_from_device(self):
policy_id = self._get_policy_id()
uri = "https://{0}:{1}/mgmt/tm/asm/policies/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
policy_id
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
response.update((dict(self_link=response['selfLink'])))
return Parameters(params=response)
def upload_file_to_device(self, content, name):
url = 'https://{0}:{1}/mgmt/shared/file-transfer/uploads'.format(
self.client.provider['server'],
self.client.provider['server_port']
)
try:
upload_file(self.client, url, content, name)
except F5ModuleError:
raise F5ModuleError(
"Failed to upload the file."
)
def import_to_device(self):
name = os.path.split(self.want.file)[1]
self.upload_file_to_device(self.want.file, name)
time.sleep(2)
full_name = fq_name(self.want.partition, self.want.name)
cmd = 'tmsh load asm policy {0} file /var/config/rest/downloads/{1}'.format(full_name, name)
uri = "https://{0}:{1}/mgmt/tm/util/bash/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
args = dict(
command='run',
utilCmdArgs='-c "{0}"'.format(cmd)
)
resp = self.client.api.post(uri, json=args)
try:
response = resp.json()
if 'commandResult' in response:
if 'Unexpected Error' in response['commandResult']:
raise F5ModuleError(response['commandResult'])
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return True
def remove_temp_policy_from_device(self):
name = os.path.split(self.want.file)[1]
tpath_name = '/var/config/rest/downloads/{0}'.format(name)
uri = "https://{0}:{1}/mgmt/tm/util/unix-rm/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
args = dict(
command='run',
utilCmdArgs=tpath_name
)
resp = self.client.api.post(uri, json=args)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def apply_on_device(self):
uri = "https://{0}:{1}/mgmt/tm/asm/tasks/apply-policy/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
params = dict(policyReference={'link': self.have.self_link})
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return response['id']
def create_from_template_on_device(self):
full_name = fq_name(self.want.partition, self.want.name)
cmd = 'tmsh create asm policy {0} policy-template {1}'.format(full_name, self.want.template)
uri = "https://{0}:{1}/mgmt/tm/util/bash/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
args = dict(
command='run',
utilCmdArgs='-c "{0}"'.format(cmd)
)
resp = self.client.api.post(uri, json=args)
try:
response = resp.json()
if 'commandResult' in response:
if 'Unexpected Error' in response['commandResult']:
raise F5ModuleError(response['commandResult'])
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
# we need to remove active from params as API will raise an error if the active is set to True,
# policies can only be activated via apply-policy task endpoint.
params.pop('active')
uri = "https://{0}:{1}/mgmt/tm/asm/policies/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 401, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
time.sleep(2)
return response['selfLink']
def remove_from_device(self):
policy_id = self._get_policy_id()
uri = "https://{0}:{1}/mgmt/tm/asm/policies/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
policy_id
)
response = self.client.api.delete(uri)
if response.status in [200, 201]:
return True
raise F5ModuleError(response.content)
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.kwargs = kwargs
def exec_module(self):
if not module_provisioned(self.client, 'asm'):
raise F5ModuleError(
"ASM must be provisioned to use this module."
)
if self.version_is_less_than_13():
manager = self.get_manager('v1')
else:
manager = self.get_manager('v2')
return manager.exec_module()
def get_manager(self, type):
if type == 'v1':
return V1Manager(**self.kwargs)
elif type == 'v2':
return V2Manager(**self.kwargs)
def version_is_less_than_13(self):
version = tmos_version(self.client)
if LooseVersion(version) < LooseVersion('13.0.0'):
return True
else:
return False
class V1Manager(BaseManager):
def __init__(self, *args, **kwargs):
module = kwargs.get('module', None)
client = F5RestClient(**module.params)
super(V1Manager, self).__init__(client=client, module=module)
self.want = V1Parameters(params=module.params, client=client)
def create_from_file(self):
self.import_to_device()
self.remove_temp_policy_from_device()
def create_from_template(self):
self.create_from_template_on_device()
class V2Manager(BaseManager):
def __init__(self, *args, **kwargs):
module = kwargs.get('module', None)
client = F5RestClient(**module.params)
super(V2Manager, self).__init__(client=client, module=module)
self.want = V2Parameters(params=module.params, client=client)
def create_from_template(self):
if not self.create_from_template_on_device():
return False
def create_from_file(self):
if not self.import_to_device():
return False
self.remove_temp_policy_from_device()
class ArgumentSpec(object):
def __init__(self):
self.template_map = [
'ActiveSync v1.0 v2.0 (http)',
'ActiveSync v1.0 v2.0 (https)',
'Comprehensive',
'Drupal',
'Fundamental',
'Joomla',
'LotusDomino 6.5 (http)',
'LotusDomino 6.5 (https)',
'OWA Exchange 2003 (http)',
'OWA Exchange 2003 (https)',
'OWA Exchange 2003 with ActiveSync (http)',
'OWA Exchange 2003 with ActiveSync (https)',
'OWA Exchange 2007 (http)',
'OWA Exchange 2007 (https)',
'OWA Exchange 2007 with ActiveSync (http)',
'OWA Exchange 2007 with ActiveSync (https)',
'OWA Exchange 2010 (http)',
'OWA Exchange 2010 (https)',
'Oracle 10g Portal (http)',
'Oracle 10g Portal (https)',
'Oracle Applications 11i (http)',
'Oracle Applications 11i (https)',
'PeopleSoft Portal 9 (http)',
'PeopleSoft Portal 9 (https)',
'Rapid Deployment Policy',
'SAP NetWeaver 7 (http)',
'SAP NetWeaver 7 (https)',
'SharePoint 2003 (http)',
'SharePoint 2003 (https)',
'SharePoint 2007 (http)',
'SharePoint 2007 (https)',
'SharePoint 2010 (http)',
'SharePoint 2010 (https)',
'Vulnerability Assessment Baseline',
'Wordpress',
]
self.supports_check_mode = True
argument_spec = dict(
name=dict(
required=True,
),
file=dict(type='path'),
template=dict(
choices=self.template_map
),
active=dict(
type='bool'
),
state=dict(
default='present',
choices=['present', 'absent']
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
mutually_exclusive=[
['file', 'template']
]
)
client = F5RestClient(**module.params)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
|
"""
A tool to help keep .mailmap and AUTHORS up-to-date.
"""
from __future__ import unicode_literals
from __future__ import print_function
import os
import sys
from fabric.api import local, env
from fabric.colors import yellow, blue, green, red
from fabric.utils import error
mailmap_update_path = os.path.abspath(__file__)
mailmap_update_dir = os.path.dirname(mailmap_update_path)
sympy_top = os.path.split(mailmap_update_dir)[0]
sympy_dir = os.path.join(sympy_top, 'sympy')
if os.path.isdir(sympy_dir):
sys.path.insert(0, sympy_top)
from sympy.utilities.misc import filldedent
try:
# Only works in newer versions of fabric
env.colorize_errors = True
except AttributeError:
pass
git_command = 'git log --format="%aN <%aE>" | sort -u'
git_people = unicode(local(git_command, capture=True), 'utf-8').strip().split("\n")
from distutils.version import LooseVersion
git_ver = local('git --version', capture=True)[12:]
if LooseVersion(git_ver) < LooseVersion('1.8.4.2'):
print(yellow("Please use a newer git version >= 1.8.4.2"))
with open(os.path.realpath(os.path.join(__file__, os.path.pardir,
os.path.pardir, "AUTHORS"))) as fd:
AUTHORS = unicode(fd.read(), 'utf-8')
firstauthor = "Ondřej Čertík"
authors = AUTHORS[AUTHORS.find(firstauthor):].strip().split('\n')
authors_skip = ["Kirill Smelkov <kirr@landau.phys.spbu.ru>", "Sergey B Kirpichev <skirpichev@gmail.com>"]
predate_git = 0
exit1 = False
print(blue(filldedent("""Read the text at the top of AUTHORS and the text at
the top of .mailmap for information on how to fix the below errors. If
someone is missing from AUTHORS, add them where they would have been if they
were added after their first pull request was merged (checkout the merge
commit from the first pull request and see who is at the end of the AUTHORS
file at that commit.)""")))
print()
print(yellow("People who are in AUTHORS but not in git:"))
print()
for name in sorted(set(authors) - set(git_people)):
if name.startswith("*"):
# People who are in AUTHORS but predate git
predate_git += 1
continue
exit1 = True
print(name)
print()
print(yellow("People who are in git but not in AUTHORS:"))
print()
for name in sorted(set(git_people) - set(authors) - set(authors_skip)):
exit1 = True
print(name)
authors_count = AUTHORS[AUTHORS.find(firstauthor):].strip().count("\n") + 1
adjusted_authors_count = (
authors_count
- predate_git
+ len(authors_skip)
)
git_count = len(git_people)
print()
print(yellow("There are {git_count} people in git, and {adjusted_authors_count} "
"(adjusted) people from AUTHORS".format(git_count=git_count,
adjusted_authors_count=adjusted_authors_count)))
if git_count != adjusted_authors_count:
error("These two numbers are not the same!")
else:
print()
print(green(filldedent("""Congratulations. The AUTHORS and .mailmap files
appear to be up to date. You should now verify that doc/src/aboutus has %s
people.""" % authors_count)))
if exit1:
print()
print(red("There were errors. Please fix them."))
sys.exit(1)
|
import controllers
import report
import ir_qweb
|
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import sys
from nose.plugins.skip import SkipTest
if sys.version_info < (2, 7):
raise SkipTest("F5 Ansible modules require Python >= 2.7")
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, DEFAULT, Mock
from ansible.module_utils import basic
from ansible.module_utils._text import to_bytes
from ansible.module_utils.f5_utils import AnsibleF5Client
try:
from library.bigip_snmp_trap import NetworkedParameters
from library.bigip_snmp_trap import NonNetworkedParameters
from library.bigip_snmp_trap import ModuleManager
from library.bigip_snmp_trap import NetworkedManager
from library.bigip_snmp_trap import NonNetworkedManager
from library.bigip_snmp_trap import ArgumentSpec
except ImportError:
try:
from ansible.modules.network.f5.bigip_snmp_trap import NetworkedParameters
from ansible.modules.network.f5.bigip_snmp_trap import NonNetworkedParameters
from ansible.modules.network.f5.bigip_snmp_trap import ModuleManager
from ansible.modules.network.f5.bigip_snmp_trap import NetworkedManager
from ansible.modules.network.f5.bigip_snmp_trap import NonNetworkedManager
from ansible.modules.network.f5.bigip_snmp_trap import ArgumentSpec
except ImportError:
raise SkipTest("F5 Ansible modules require the f5-sdk Python library")
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def set_module_args(args):
args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
basic._ANSIBLE_ARGS = to_bytes(args)
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_networked_parameters(self):
args = dict(
name='foo',
snmp_version='1',
community='public',
destination='10.10.10.10',
port=1000,
network='other',
password='password',
server='localhost',
user='admin'
)
p = NetworkedParameters(args)
assert p.name == 'foo'
assert p.snmp_version == '1'
assert p.community == 'public'
assert p.destination == '10.10.10.10'
assert p.port == 1000
assert p.network == 'other'
def test_module_non_networked_parameters(self):
args = dict(
name='foo',
snmp_version='1',
community='public',
destination='10.10.10.10',
port=1000,
network='other',
password='password',
server='localhost',
user='admin'
)
p = NonNetworkedParameters(args)
assert p.name == 'foo'
assert p.snmp_version == '1'
assert p.community == 'public'
assert p.destination == '10.10.10.10'
assert p.port == 1000
assert p.network is None
def test_api_parameters(self):
args = dict(
name='foo',
community='public',
host='10.10.10.10',
network='other',
version=1,
port=1000
)
p = NetworkedParameters(args)
assert p.name == 'foo'
assert p.snmp_version == '1'
assert p.community == 'public'
assert p.destination == '10.10.10.10'
assert p.port == 1000
assert p.network == 'other'
@patch('ansible.module_utils.f5_utils.AnsibleF5Client._get_mgmt_root',
return_value=True)
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_create_trap(self, *args):
set_module_args(dict(
name='foo',
snmp_version='1',
community='public',
destination='10.10.10.10',
port=1000,
network='other',
password='password',
server='localhost',
user='admin'
))
client = AnsibleF5Client(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode,
f5_product_name=self.spec.f5_product_name
)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(client)
mm.is_version_non_networked = Mock(return_value=False)
patches = dict(
create_on_device=DEFAULT,
exists=DEFAULT
)
with patch.multiple(NetworkedManager, **patches) as mo:
mo['create_on_device'].side_effect = Mock(return_value=True)
mo['exists'].side_effect = Mock(return_value=False)
results = mm.exec_module()
assert results['changed'] is True
assert results['port'] == 1000
assert results['snmp_version'] == '1'
def test_create_trap_non_network(self, *args):
set_module_args(dict(
name='foo',
snmp_version='1',
community='public',
destination='10.10.10.10',
port=1000,
password='password',
server='localhost',
user='admin'
))
client = AnsibleF5Client(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode,
f5_product_name=self.spec.f5_product_name
)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(client)
mm.is_version_non_networked = Mock(return_value=True)
patches = dict(
create_on_device=DEFAULT,
exists=DEFAULT
)
with patch.multiple(NonNetworkedManager, **patches) as mo:
mo['create_on_device'].side_effect = Mock(return_value=True)
mo['exists'].side_effect = Mock(return_value=False)
results = mm.exec_module()
assert results['changed'] is True
assert results['port'] == 1000
assert results['snmp_version'] == '1'
|
def f(a, b, c, d):
"""
:param a : foo
:param b : bar
another line of description
:type b: str
:param c : baz
:param d : quux
"""
|
from __future__ import print_function
try:
from pandasql import sqldf
pysqldf = lambda q: sqldf(q, globals())
except ImportError:
pysqldf = lambda q: print("Can not run SQL over Pandas DataFrame" +
"Make sure 'pandas' and 'pandasql' libraries are installed")
|
import subprocess
import sys
import setup_util
import os
def start(args, logfile, errfile):
if os.name != 'nt':
return 1
try:
setup_util.replace_text("aspnet/src/Web.config", "localhost", args.database_host)
subprocess.check_call("powershell -Command .\\setup_iis.ps1 start", cwd="aspnet", stderr=errfile, stdout=logfile)
return 0
except subprocess.CalledProcessError:
return 1
def stop(logfile, errfile):
if os.name != 'nt':
return 0
subprocess.check_call("powershell -Command .\\setup_iis.ps1 stop", cwd="aspnet", stderr=errfile, stdout=logfile)
return 0
|
class Rule(object):
"""
A Lifcycle rule for an S3 bucket.
:ivar id: Unique identifier for the rule. The value cannot be longer
than 255 characters.
:ivar prefix: Prefix identifying one or more objects to which the
rule applies.
:ivar status: If Enabled, the rule is currently being applied.
If Disabled, the rule is not currently being applied.
:ivar expiration: An instance of `Expiration`. This indicates
the lifetime of the objects that are subject to the rule.
:ivar transition: An instance of `Transition`. This indicates
when to transition to a different storage class.
"""
def __init__(self, id=None, prefix=None, status=None, expiration=None,
transition=None):
self.id = id
self.prefix = prefix
self.status = status
if isinstance(expiration, (int, long)):
# retain backwards compatibility???
self.expiration = Expiration(days=expiration)
else:
# None or object
self.expiration = expiration
self.transition = transition
def __repr__(self):
return '<Rule: %s>' % self.id
def startElement(self, name, attrs, connection):
if name == 'Transition':
self.transition = Transition()
return self.transition
elif name == 'Expiration':
self.expiration = Expiration()
return self.expiration
return None
def endElement(self, name, value, connection):
if name == 'ID':
self.id = value
elif name == 'Prefix':
self.prefix = value
elif name == 'Status':
self.status = value
else:
setattr(self, name, value)
def to_xml(self):
s = '<Rule>'
s += '<ID>%s</ID>' % self.id
s += '<Prefix>%s</Prefix>' % self.prefix
s += '<Status>%s</Status>' % self.status
if self.expiration is not None:
s += self.expiration.to_xml()
if self.transition is not None:
s += self.transition.to_xml()
s += '</Rule>'
return s
class Expiration(object):
"""
When an object will expire.
:ivar days: The number of days until the object expires
:ivar date: The date when the object will expire. Must be
in ISO 8601 format.
"""
def __init__(self, days=None, date=None):
self.days = days
self.date = date
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'Days':
self.days = int(value)
elif name == 'Date':
self.date = value
def __repr__(self):
if self.days is None:
how_long = "on: %s" % self.date
else:
how_long = "in: %s days" % self.days
return '<Expiration: %s>' % how_long
def to_xml(self):
s = '<Expiration>'
if self.days is not None:
s += '<Days>%s</Days>' % self.days
elif self.date is not None:
s += '<Date>%s</Date>' % self.date
s += '</Expiration>'
return s
class Transition(object):
"""
A transition to a different storage class.
:ivar days: The number of days until the object should be moved.
:ivar date: The date when the object should be moved. Should be
in ISO 8601 format.
:ivar storage_class: The storage class to transition to. Valid
values are GLACIER.
"""
def __init__(self, days=None, date=None, storage_class=None):
self.days = days
self.date = date
self.storage_class = storage_class
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'Days':
self.days = int(value)
elif name == 'Date':
self.date = value
elif name == 'StorageClass':
self.storage_class = value
def __repr__(self):
if self.days is None:
how_long = "on: %s" % self.date
else:
how_long = "in: %s days" % self.days
return '<Transition: %s, %s>' % (how_long, self.storage_class)
def to_xml(self):
s = '<Transition>'
s += '<StorageClass>%s</StorageClass>' % self.storage_class
if self.days is not None:
s += '<Days>%s</Days>' % self.days
elif self.date is not None:
s += '<Date>%s</Date>' % self.date
s += '</Transition>'
return s
class Lifecycle(list):
"""
A container for the rules associated with a Lifecycle configuration.
"""
def startElement(self, name, attrs, connection):
if name == 'Rule':
rule = Rule()
self.append(rule)
return rule
return None
def endElement(self, name, value, connection):
setattr(self, name, value)
def to_xml(self):
"""
Returns a string containing the XML version of the Lifecycle
configuration as defined by S3.
"""
s = '<?xml version="1.0" encoding="UTF-8"?>'
s += '<LifecycleConfiguration>'
for rule in self:
s += rule.to_xml()
s += '</LifecycleConfiguration>'
return s
def add_rule(self, id, prefix, status, expiration, transition=None):
"""
Add a rule to this Lifecycle configuration. This only adds
the rule to the local copy. To install the new rule(s) on
the bucket, you need to pass this Lifecycle config object
to the configure_lifecycle method of the Bucket object.
:type id: str
:param id: Unique identifier for the rule. The value cannot be longer
than 255 characters.
:type prefix: str
:iparam prefix: Prefix identifying one or more objects to which the
rule applies.
:type status: str
:param status: If 'Enabled', the rule is currently being applied.
If 'Disabled', the rule is not currently being applied.
:type expiration: int
:param expiration: Indicates the lifetime, in days, of the objects
that are subject to the rule. The value must be a non-zero
positive integer. A Expiration object instance is also perfect.
:type transition: Transition
:param transition: Indicates when an object transitions to a
different storage class.
"""
rule = Rule(id, prefix, status, expiration, transition)
self.append(rule)
|
"""Exceptions used by ML2."""
from neutron.common import exceptions
class MechanismDriverError(exceptions.NeutronException):
"""Mechanism driver call failed."""
message = _("%(method)s failed.")
|
if 1:
pass
if 2:
pass
else<caret>
|
""" Python Character Mapping Codec iso8859_1 generated from 'MAPPINGS/ISO8859/8859-1.TXT' with gencodec.py.
"""#"
import codecs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
def getregentry():
return codecs.CodecInfo(
name='iso8859-1',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\x80' # 0x80 -> <control>
u'\x81' # 0x81 -> <control>
u'\x82' # 0x82 -> <control>
u'\x83' # 0x83 -> <control>
u'\x84' # 0x84 -> <control>
u'\x85' # 0x85 -> <control>
u'\x86' # 0x86 -> <control>
u'\x87' # 0x87 -> <control>
u'\x88' # 0x88 -> <control>
u'\x89' # 0x89 -> <control>
u'\x8a' # 0x8A -> <control>
u'\x8b' # 0x8B -> <control>
u'\x8c' # 0x8C -> <control>
u'\x8d' # 0x8D -> <control>
u'\x8e' # 0x8E -> <control>
u'\x8f' # 0x8F -> <control>
u'\x90' # 0x90 -> <control>
u'\x91' # 0x91 -> <control>
u'\x92' # 0x92 -> <control>
u'\x93' # 0x93 -> <control>
u'\x94' # 0x94 -> <control>
u'\x95' # 0x95 -> <control>
u'\x96' # 0x96 -> <control>
u'\x97' # 0x97 -> <control>
u'\x98' # 0x98 -> <control>
u'\x99' # 0x99 -> <control>
u'\x9a' # 0x9A -> <control>
u'\x9b' # 0x9B -> <control>
u'\x9c' # 0x9C -> <control>
u'\x9d' # 0x9D -> <control>
u'\x9e' # 0x9E -> <control>
u'\x9f' # 0x9F -> <control>
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\xa1' # 0xA1 -> INVERTED EXCLAMATION MARK
u'\xa2' # 0xA2 -> CENT SIGN
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa4' # 0xA4 -> CURRENCY SIGN
u'\xa5' # 0xA5 -> YEN SIGN
u'\xa6' # 0xA6 -> BROKEN BAR
u'\xa7' # 0xA7 -> SECTION SIGN
u'\xa8' # 0xA8 -> DIAERESIS
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\xaa' # 0xAA -> FEMININE ORDINAL INDICATOR
u'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xac' # 0xAC -> NOT SIGN
u'\xad' # 0xAD -> SOFT HYPHEN
u'\xae' # 0xAE -> REGISTERED SIGN
u'\xaf' # 0xAF -> MACRON
u'\xb0' # 0xB0 -> DEGREE SIGN
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\xb2' # 0xB2 -> SUPERSCRIPT TWO
u'\xb3' # 0xB3 -> SUPERSCRIPT THREE
u'\xb4' # 0xB4 -> ACUTE ACCENT
u'\xb5' # 0xB5 -> MICRO SIGN
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xb7' # 0xB7 -> MIDDLE DOT
u'\xb8' # 0xB8 -> CEDILLA
u'\xb9' # 0xB9 -> SUPERSCRIPT ONE
u'\xba' # 0xBA -> MASCULINE ORDINAL INDICATOR
u'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER
u'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
u'\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS
u'\xbf' # 0xBF -> INVERTED QUESTION MARK
u'\xc0' # 0xC0 -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\xc3' # 0xC3 -> LATIN CAPITAL LETTER A WITH TILDE
u'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE
u'\xc7' # 0xC7 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xc8' # 0xC8 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xca' # 0xCA -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\xcc' # 0xCC -> LATIN CAPITAL LETTER I WITH GRAVE
u'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0xCF -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\xd0' # 0xD0 -> LATIN CAPITAL LETTER ETH (Icelandic)
u'\xd1' # 0xD1 -> LATIN CAPITAL LETTER N WITH TILDE
u'\xd2' # 0xD2 -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE
u'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xd7' # 0xD7 -> MULTIPLICATION SIGN
u'\xd8' # 0xD8 -> LATIN CAPITAL LETTER O WITH STROKE
u'\xd9' # 0xD9 -> LATIN CAPITAL LETTER U WITH GRAVE
u'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xdd' # 0xDD -> LATIN CAPITAL LETTER Y WITH ACUTE
u'\xde' # 0xDE -> LATIN CAPITAL LETTER THORN (Icelandic)
u'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S (German)
u'\xe0' # 0xE0 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe3' # 0xE3 -> LATIN SMALL LETTER A WITH TILDE
u'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe6' # 0xE6 -> LATIN SMALL LETTER AE
u'\xe7' # 0xE7 -> LATIN SMALL LETTER C WITH CEDILLA
u'\xe8' # 0xE8 -> LATIN SMALL LETTER E WITH GRAVE
u'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
u'\xea' # 0xEA -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xec' # 0xEC -> LATIN SMALL LETTER I WITH GRAVE
u'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
u'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0xEF -> LATIN SMALL LETTER I WITH DIAERESIS
u'\xf0' # 0xF0 -> LATIN SMALL LETTER ETH (Icelandic)
u'\xf1' # 0xF1 -> LATIN SMALL LETTER N WITH TILDE
u'\xf2' # 0xF2 -> LATIN SMALL LETTER O WITH GRAVE
u'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
u'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE
u'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf7' # 0xF7 -> DIVISION SIGN
u'\xf8' # 0xF8 -> LATIN SMALL LETTER O WITH STROKE
u'\xf9' # 0xF9 -> LATIN SMALL LETTER U WITH GRAVE
u'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
u'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
u'\xfd' # 0xFD -> LATIN SMALL LETTER Y WITH ACUTE
u'\xfe' # 0xFE -> LATIN SMALL LETTER THORN (Icelandic)
u'\xff' # 0xFF -> LATIN SMALL LETTER Y WITH DIAERESIS
)
encoding_table=codecs.charmap_build(decoding_table)
|
"""
Check that all of the certs on all service endpoints validate.
"""
import unittest
from tests.integration import ServiceCertVerificationTest
import boto.ec2
class EC2CertVerificationTest(unittest.TestCase, ServiceCertVerificationTest):
ec2 = True
regions = boto.ec2.regions()
def sample_service_call(self, conn):
conn.get_all_reservations()
|
"""Tests harness for distutils.versionpredicate.
"""
import distutils.versionpredicate
import doctest
from test.test_support import run_unittest
def test_suite():
return doctest.DocTestSuite(distutils.versionpredicate)
if __name__ == '__main__':
run_unittest(test_suite())
|
"Decorator for views that gzips pages if the client supports it."
from django.utils.decorators import decorator_from_middleware
from django.middleware.gzip import GZipMiddleware
gzip_page = decorator_from_middleware(GZipMiddleware)
|
'''
Genesis Add-on
Copyright (C) 2014 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import urllib,urllib2,re,os,xbmc,xbmcgui,xbmcaddon,xbmcvfs
try:
import CommonFunctions as common
except:
import commonfunctionsdummy as common
try:
import json
except:
import simplejson as json
def get(url):
print url
pz = premiumize(url)
if not pz == None: return pz
rd = realdebrid(url)
if not rd == None: return rd
try:
u = None
import urlparse
u = urlparse.urlparse(url).netloc
u = u.replace('www.', '')
print 'common resolver',u
except:
pass
if u == 'vk.com': url = vk(url)
elif u == 'docs.google.com': url = googledocs(url)
elif u == 'youtube.com': url = youtube(url)
elif u == 'videomega.tv': url = videomega(url)
elif u == 'movreel.com': url = movreel(url)
elif u == 'billionuploads.com': url = billionuploads(url)
elif u == 'v-vids.com': url = v_vids(url)
elif u == 'vidbull.com': url = vidbull(url)
elif u == '180upload.com': url = _180upload(url)
elif u == 'hugefiles.net': url = hugefiles(url)
elif u == 'filecloud.io': url = filecloud(url)
elif u == 'uploadrocket.net': url = uploadrocket(url)
elif u == 'kingfiles.net': url = kingfiles(url)
elif u == 'streamin.to': url = streamin(url)
elif u == 'grifthost.com': url = grifthost(url)
elif u == 'ishared.eu': url = ishared(url)
elif u == 'cloudyvideos.com': url = cloudyvideos(url)
elif u == 'mrfile.me': url = mrfile(url)
elif u == 'datemule.com': url = datemule(url)
elif u == 'vimeo.com': url = vimeo(url)
elif u == 'odnoklassniki.ru': url = odnoklassniki(url)
elif u == 'videoapi.my.mail.ru': url = mailru(url)
elif u == 'my.mail.ru': url = mailru(url)
elif u == 'mail.ru': url = mailru(url)
else:
try:
import urlresolver
host = urlresolver.HostedMediaFile(url)
if host: resolver = urlresolver.resolve(url)
else: return url
if not resolver.startswith('http://'): return
if not resolver == url: return resolver
except:
pass
return url
class getUrl(object):
def __init__(self, url, close=True, proxy=None, post=None, mobile=False, referer=None, cookie=None, output='', timeout='10'):
if not proxy == None:
proxy_handler = urllib2.ProxyHandler({'http':'%s' % (proxy)})
opener = urllib2.build_opener(proxy_handler, urllib2.HTTPHandler)
opener = urllib2.install_opener(opener)
if output == 'cookie' or not close == True:
import cookielib
cookie_handler = urllib2.HTTPCookieProcessor(cookielib.LWPCookieJar())
opener = urllib2.build_opener(cookie_handler, urllib2.HTTPBasicAuthHandler(), urllib2.HTTPHandler())
opener = urllib2.install_opener(opener)
if not post == None:
request = urllib2.Request(url, post)
else:
request = urllib2.Request(url,None)
if mobile == True:
request.add_header('User-Agent', 'Mozilla/5.0 (iPhone; CPU; CPU iPhone OS 4_0 like Mac OS X; en-us) AppleWebKit/532.9 (KHTML, like Gecko) Version/4.0.5 Mobile/8A293 Safari/6531.22.7')
else:
request.add_header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:6.0) Gecko/20100101 Firefox/6.0')
if not referer == None:
request.add_header('Referer', referer)
if not cookie == None:
request.add_header('cookie', cookie)
response = urllib2.urlopen(request, timeout=int(timeout))
if output == 'cookie':
result = str(response.headers.get('Set-Cookie'))
elif output == 'geturl':
result = response.geturl()
else:
result = response.read()
if close == True:
response.close()
self.result = result
def cloudflare(url):
try:
import urlparse,cookielib
class NoRedirection(urllib2.HTTPErrorProcessor):
def http_response(self, request, response):
return response
def parseJSString(s):
try:
offset=1 if s[0]=='+' else 0
val = int(eval(s.replace('!+[]','1').replace('!![]','1').replace('[]','0').replace('(','str(')[offset:]))
return val
except:
pass
agent = 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:6.0) Gecko/20100101 Firefox/6.0'
cj = cookielib.CookieJar()
opener = urllib2.build_opener(NoRedirection, urllib2.HTTPCookieProcessor(cj))
opener.addheaders = [('User-Agent', agent)]
response = opener.open(url)
result = response.read()
jschl = re.compile('name="jschl_vc" value="(.+?)"/>').findall(result)[0]
init = re.compile('setTimeout\(function\(\){\s*.*?.*:(.*?)};').findall(result)[0]
builder = re.compile(r"challenge-form\'\);\s*(.*)a.v").findall(result)[0]
decryptVal = parseJSString(init)
lines = builder.split(';')
for line in lines:
if len(line)>0 and '=' in line:
sections=line.split('=')
line_val = parseJSString(sections[1])
decryptVal = int(eval(str(decryptVal)+sections[0][-1]+str(line_val)))
answer = decryptVal + len(urlparse.urlparse(url).netloc)
query = '%s/cdn-cgi/l/chk_jschl?jschl_vc=%s&jschl_answer=%s' % (url, jschl, answer)
opener = urllib2.build_opener(NoRedirection, urllib2.HTTPCookieProcessor(cj))
opener.addheaders = [('User-Agent', agent)]
response = opener.open(query)
cookie = str(response.headers.get('Set-Cookie'))
response.close()
return cookie
except:
return
def jsunpack(script):
def __itoa(num, radix):
result = ""
while num > 0:
result = "0123456789abcdefghijklmnopqrstuvwxyz"[num % radix] + result
num /= radix
return result
def __unpack(p, a, c, k, e, d):
while (c > 1):
c = c -1
if (k[c]):
p = re.sub('\\b' + str(__itoa(c, a)) +'\\b', k[c], p)
return p
aSplit = script.split(";',")
p = str(aSplit[0])
aSplit = aSplit[1].split(",")
a = int(aSplit[0])
c = int(aSplit[1])
k = aSplit[2].split(".")[0].replace("'", '').split('|')
e = ''
d = ''
sUnpacked = str(__unpack(p, a, c, k, e, d))
return sUnpacked.replace('\\', '')
def captcha(data):
try:
captcha = {}
def get_response(response):
try:
dataPath = xbmc.translatePath(xbmcaddon.Addon().getAddonInfo("profile"))
i = os.path.join(dataPath.decode("utf-8"),'img')
f = xbmcvfs.File(i, 'w')
f.write(getUrl(response).result)
f.close()
f = xbmcgui.ControlImage(450,5,375,115, i)
d = xbmcgui.WindowDialog()
d.addControl(f)
xbmcvfs.delete(i)
d.show()
xbmc.sleep(3000)
t = 'Type the letters in the image'
c = common.getUserInput(t, '')
d.close()
return c
except:
return
solvemedia = common.parseDOM(data, "iframe", ret="src")
solvemedia = [i for i in solvemedia if 'api.solvemedia.com' in i]
if len(solvemedia) > 0:
url = solvemedia[0]
result = getUrl(url).result
challenge = common.parseDOM(result, "input", ret="value", attrs = { "id": "adcopy_challenge" })[0]
response = common.parseDOM(result, "iframe", ret="src")
response += common.parseDOM(result, "img", ret="src")
response = [i for i in response if '/papi/media' in i][0]
response = 'http://api.solvemedia.com' + response
response = get_response(response)
captcha.update({'adcopy_challenge': challenge, 'adcopy_response': response})
return captcha
recaptcha = []
if data.startswith('http://www.google.com'): recaptcha += [data]
recaptcha += common.parseDOM(data, "script", ret="src", attrs = { "type": "text/javascript" })
recaptcha = [i for i in recaptcha if 'http://www.google.com' in i]
if len(recaptcha) > 0:
url = recaptcha[0]
result = getUrl(url).result
challenge = re.compile("challenge\s+:\s+'(.+?)'").findall(result)[0]
response = 'http://www.google.com/recaptcha/api/image?c=' + challenge
response = get_response(response)
captcha.update({'recaptcha_challenge_field': challenge, 'recaptcha_challenge': challenge, 'recaptcha_response_field': response, 'recaptcha_response': response})
return captcha
numeric = re.compile("left:(\d+)px;padding-top:\d+px;'>&#(.+?);<").findall(data)
if len(numeric) > 0:
result = sorted(numeric, key=lambda ltr: int(ltr[0]))
response = ''.join(str(int(num[1])-48) for num in result)
captcha.update({'code': response})
return captcha
except:
return captcha
def vk(url):
try:
url = url.replace('http://', 'https://')
result = getUrl(url).result
u = re.compile('url(720|540|480)=(.+?)&').findall(result)
url = []
try: url += [[{'quality': 'HD', 'url': i[1]} for i in u if i[0] == '720'][0]]
except: pass
try: url += [[{'quality': 'SD', 'url': i[1]} for i in u if i[0] == '540'][0]]
except: pass
try: url += [[{'quality': 'SD', 'url': i[1]} for i in u if i[0] == '480'][0]]
except: pass
if url == []: return
return url
except:
return
def google(url):
try:
if any(x in url for x in ['&itag=37&', '&itag=137&', '&itag=299&', '&itag=96&', '&itag=248&', '&itag=303&', '&itag=46&']): quality = '1080p'
elif any(x in url for x in ['&itag=22&', '&itag=84&', '&itag=136&', '&itag=298&', '&itag=120&', '&itag=95&', '&itag=247&', '&itag=302&', '&itag=45&', '&itag=102&']): quality = 'HD'
else: raise Exception()
url = [{'quality': quality, 'url': url}]
return url
except:
return
def googledocs(url):
try:
url = url.split('/preview', 1)[0]
result = getUrl(url).result
result = re.compile('"fmt_stream_map",(".+?")').findall(result)[0]
result = json.loads(result)
u = [i.split('|')[-1] for i in result.split(',')]
url = []
try: url += [[{'quality': '1080p', 'url': i} for i in u if any(x in i for x in ['&itag=37&', '&itag=137&', '&itag=299&', '&itag=96&', '&itag=248&', '&itag=303&', '&itag=46&'])][0]]
except: pass
try: url += [[{'quality': 'HD', 'url': i} for i in u if any(x in i for x in ['&itag=22&', '&itag=84&', '&itag=136&', '&itag=298&', '&itag=120&', '&itag=95&', '&itag=247&', '&itag=302&', '&itag=45&', '&itag=102&'])][0]]
except: pass
if url == []: return
return url
except:
return
def youtube(url):
try:
id = url.split("?v=")[-1].split("/")[-1].split("?")[0].split("&")[0]
result = getUrl('http://gdata.youtube.com/feeds/api/videos/%s?v=2' % id).result
state, reason = None, None
try: state = common.parseDOM(result, "yt:state", ret="name")[0]
except: pass
try: reason = common.parseDOM(result, "yt:state", ret="reasonCode")[0]
except: pass
if state == 'deleted' or state == 'rejected' or state == 'failed' or reason == 'requesterRegion' : return
url = 'plugin://plugin.video.youtube/?action=play_video&videoid=%s' % id
return url
except:
return
def premiumize(url):
try:
user = xbmcaddon.Addon().getSetting("premiumize_user")
password = xbmcaddon.Addon().getSetting("premiumize_password")
if (user == '' or password == ''): raise Exception()
url = 'https://api.premiumize.me/pm-api/v1.php?method=directdownloadlink¶ms[login]=%s¶ms[pass]=%s¶ms[link]=%s' % (user, password, url)
result = getUrl(url, close=False).result
url = json.loads(result)['result']['location']
return url
except:
return
def premiumize_hosts():
try:
user = xbmcaddon.Addon().getSetting("premiumize_user")
password = xbmcaddon.Addon().getSetting("premiumize_password")
if (user == '' or password == ''): raise Exception()
pz = getUrl('https://api.premiumize.me/pm-api/v1.php?method=hosterlist¶ms[login]=%s¶ms[pass]=%s' % (user, password)).result
pz = json.loads(pz)['result']['hosterlist']
pz = [i.rsplit('.' ,1)[0].lower() for i in pz]
return pz
except:
return
def realdebrid(url):
try:
user = xbmcaddon.Addon().getSetting("realdedrid_user")
password = xbmcaddon.Addon().getSetting("realdedrid_password")
if (user == '' or password == ''): raise Exception()
login_data = urllib.urlencode({'user' : user, 'pass' : password})
login_link = 'https://real-debrid.com/ajax/login.php?%s' % login_data
result = getUrl(login_link, close=False).result
result = json.loads(result)
error = result['error']
if not error == 0: raise Exception()
url = 'https://real-debrid.com/ajax/unrestrict.php?link=%s' % url
url = url.replace('filefactory.com/stream/', 'filefactory.com/file/')
result = getUrl(url).result
result = json.loads(result)
url = result['generated_links'][0][-1]
return url
except:
return
def realdebrid_hosts():
try:
rd = getUrl('https://real-debrid.com/api/hosters.php').result
rd = json.loads('[%s]' % rd)
rd = [i.rsplit('.' ,1)[0].lower() for i in rd]
return rd
except:
return
def videomega(url):
try:
url = url.replace('/?ref=', '/iframe.php?ref=')
result = getUrl(url).result
url = re.compile('document.write.unescape."(.+?)"').findall(result)[0]
url = urllib.unquote_plus(url)
url = re.compile('file: "(.+?)"').findall(url)[0]
return url
except:
return
def movreel(url):
try:
user = xbmcaddon.Addon().getSetting("movreel_user")
password = xbmcaddon.Addon().getSetting("movreel_password")
login = 'http://movreel.com/login.html'
post = {'op': 'login', 'login': user, 'password': password, 'redirect': url}
post = urllib.urlencode(post)
result = getUrl(url, close=False).result
result += getUrl(login, post=post, close=False).result
post = {}
f = common.parseDOM(result, "Form", attrs = { "name": "F1" })[-1]
k = common.parseDOM(f, "input", ret="name", attrs = { "type": "hidden" })
for i in k: post.update({i: common.parseDOM(f, "input", ret="value", attrs = { "name": i })[0]})
post.update({'method_free': '', 'method_premium': ''})
post = urllib.urlencode(post)
result = getUrl(url, post=post).result
url = re.compile('(<a .+?</a>)').findall(result)
url = [i for i in url if 'Download Link' in i][-1]
url = common.parseDOM(url, "a", ret="href")[0]
return url
except:
return
def billionuploads(url):
try:
import cookielib
cj = cookielib.CookieJar()
agent = 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:6.0) Gecko/20100101 Firefox/6.0'
base = 'http://billionuploads.com'
class NoRedirection(urllib2.HTTPErrorProcessor):
def http_response(self, request, response):
return response
opener = urllib2.build_opener(NoRedirection, urllib2.HTTPCookieProcessor(cj))
opener.addheaders = [('User-Agent', agent)]
response = opener.open(base)
response = opener.open(base)
result = response.read()
z = []
decoded = re.compile('(?i)var z="";var b="([^"]+?)"').findall(result)[0]
for i in range(len(decoded)/2): z.append(int(decoded[i*2:i*2+2],16))
decoded = ''.join(map(unichr, z))
incapurl = re.compile('(?i)"GET","(/_Incapsula_Resource[^"]+?)"').findall(decoded)[0]
incapurl = base + incapurl
response = opener.open(incapurl)
response = opener.open(url)
result = response.read()
post = {}
f = common.parseDOM(result, "Form", attrs = { "name": "F1" })[0]
enc_input = re.compile('decodeURIComponent\("(.+?)"\)').findall(result)
if enc_input: f += urllib2.unquote(enc_input[0])
extra = re.compile("append\(\$\(document.createElement\('input'\)\).attr\('type','hidden'\).attr\('name','(.*?)'\).val\((.*?)\)").findall(result)
for i, k in extra:
try:
k = re.compile('<textarea[^>]*?source="self"[^>]*?>([^<]*?)<').findall(result)[0].strip("'")
post.update({i: k})
except:
pass
k = common.parseDOM(f, "input", ret="name", attrs = { "type": "hidden" })
for i in k: post.update({i: common.parseDOM(f, "input", ret="value", attrs = { "name": i })[0]})
post.update({'submit_btn': ''})
k = re.findall('\'input\[name="([^"]+?)"\]\'\)\.remove\(\)', result)
for i in k: del post[i]
post = urllib.urlencode(post)
response = opener.open(url, post)
result = response.read()
response.close()
def custom_range(start, end, step):
while start <= end:
yield start
start += step
def checkwmv(e):
s = ""
i=[]
u=[[65,91],[97,123],[48,58],[43,44],[47,48]]
for z in range(0, len(u)):
for n in range(u[z][0],u[z][1]):
i.append(chr(n))
t = {}
for n in range(0, 64): t[i[n]]=n
for n in custom_range(0, len(e), 72):
a=0
h=e[n:n+72]
c=0
for l in range(0, len(h)):
f = t.get(h[l], 'undefined')
if f == 'undefined': continue
a = (a<<6) + f
c = c + 6
while c >= 8:
c = c - 8
s = s + chr( (a >> c) % 256 )
return s
try:
url = common.parseDOM(result, "input", ret="value", attrs = { "id": "dl" })[0]
url = url.split('GvaZu')[1]
url = checkwmv(url)
url = checkwmv(url)
return url
except:
pass
try:
url = common.parseDOM(result, "source", ret="src")[0]
return url
except:
pass
except:
return
def v_vids(url):
try:
result = getUrl(url).result
post = {}
f = common.parseDOM(result, "Form", attrs = { "name": "F1" })[0]
k = common.parseDOM(f, "input", ret="name", attrs = { "type": "hidden" })
for i in k: post.update({i: common.parseDOM(f, "input", ret="value", attrs = { "name": i })[0]})
post.update({'method_free': '', 'method_premium': ''})
post = urllib.urlencode(post)
result = getUrl(url, post=post).result
url = common.parseDOM(result, "a", ret="href", attrs = { "id": "downloadbutton" })[0]
return url
except:
return
def vidbull(url):
try:
result = getUrl(url, mobile=True).result
url = common.parseDOM(result, "source", ret="src", attrs = { "type": "video.+?" })[0]
return url
except:
return
def _180upload(url):
try:
u = re.compile('//.+?/([\w]+)').findall(url)[0]
u = 'http://180upload.com/embed-%s.html' % u
result = getUrl(u).result
post = {}
f = common.parseDOM(result, "form", attrs = { "id": "captchaForm" })[0]
k = common.parseDOM(f, "input", ret="name", attrs = { "type": "hidden" })
for i in k: post.update({i: common.parseDOM(f, "input", ret="value", attrs = { "name": i })[0]})
post = urllib.urlencode(post)
result = getUrl(u, post=post).result
result = re.compile('id="player_code".*?(eval.*?\)\)\))').findall(result)[0]
result = jsunpack(result)
u = re.compile('name="src"0="([^"]+)"/>').findall(result)[0]
return u
except:
pass
try:
result = getUrl(url).result
post = {}
f = common.parseDOM(result, "Form", attrs = { "name": "F1" })[0]
k = common.parseDOM(f, "input", ret="name", attrs = { "type": "hidden" })
for i in k: post.update({i: common.parseDOM(f, "input", ret="value", attrs = { "name": i })[0]})
post.update(captcha(result))
post = urllib.urlencode(post)
result = getUrl(url, post=post).result
url = common.parseDOM(result, "a", ret="href", attrs = { "id": "lnk_download" })[0]
return url
except:
return
def hugefiles(url):
try:
result = getUrl(url).result
post = {}
f = common.parseDOM(result, "Form", attrs = { "action": "" })
k = common.parseDOM(f, "input", ret="name", attrs = { "type": "hidden" })
for i in k: post.update({i: common.parseDOM(f, "input", ret="value", attrs = { "name": i })[0]})
post.update({'method_free': 'Free Download'})
post.update(captcha(result))
post = urllib.urlencode(post)
result = getUrl(url, post=post).result
post = {}
f = common.parseDOM(result, "Form", attrs = { "action": "" })
k = common.parseDOM(f, "input", ret="name", attrs = { "type": "hidden" })
for i in k: post.update({i: common.parseDOM(f, "input", ret="value", attrs = { "name": i })[0]})
post.update({'method_free': 'Free Download'})
post = urllib.urlencode(post)
u = getUrl(url, output='geturl', post=post).result
if not url == u: return u
except:
return
def filecloud(url):
try:
result = getUrl(url, close=False).result
result = getUrl('http://filecloud.io/download.html').result
url = re.compile("__requestUrl\s+=\s+'(.+?)'").findall(result)[0]
ukey = re.compile("'ukey'\s+:\s+'(.+?)'").findall(result)[0]
__ab1 = re.compile("__ab1\s+=\s+(\d+);").findall(result)[0]
ctype = re.compile("'ctype'\s+:\s+'(.+?)'").findall(result)[0]
challenge = re.compile("__recaptcha_public\s+=\s+'(.+?)'").findall(result)[0]
challenge = 'http://www.google.com/recaptcha/api/challenge?k=' + challenge
post = {'ukey': ukey, '__ab1': str(__ab1), 'ctype': ctype}
post.update(captcha(challenge))
post = urllib.urlencode(post)
result = getUrl(url, post=post).result
result = getUrl('http://filecloud.io/download.html').result
url = common.parseDOM(result, "a", ret="href", attrs = { "id": "downloadBtn" })[0]
return url
except:
return
def uploadrocket(url):
try:
result = getUrl(url).result
post = {}
f = common.parseDOM(result, "Form", attrs = { "name": "freeorpremium" })[0]
k = common.parseDOM(f, "input", ret="name", attrs = { "type": "hidden" })
for i in k: post.update({i: common.parseDOM(f, "input", ret="value", attrs = { "name": i })[0]})
post.update({'method_free': 'Free Download'})
post = urllib.urlencode(post)
result = getUrl(url, post=post).result
post = {}
f = common.parseDOM(result, "Form", attrs = { "name": "F1" })[0]
k = common.parseDOM(f, "input", ret="name", attrs = { "type": "hidden" })
for i in k: post.update({i: common.parseDOM(f, "input", ret="value", attrs = { "name": i })[0]})
post.update(captcha(result))
post = urllib.urlencode(post)
result = getUrl(url, post=post).result
url = common.parseDOM(result, "a", ret="href", attrs = { "onclick": "window[.]open.+?" })[0]
return url
except:
return
def kingfiles(url):
try:
result = getUrl(url).result
post = {}
f = common.parseDOM(result, "Form", attrs = { "action": "" })[0]
k = common.parseDOM(f, "input", ret="name", attrs = { "type": "hidden" })
for i in k: post.update({i: common.parseDOM(f, "input", ret="value", attrs = { "name": i })[0]})
post.update({'method_free': ' '})
post = urllib.urlencode(post)
result = getUrl(url, post=post).result
post = {}
f = common.parseDOM(result, "Form", attrs = { "action": "" })[0]
k = common.parseDOM(f, "input", ret="name", attrs = { "type": "hidden" })
for i in k: post.update({i: common.parseDOM(f, "input", ret="value", attrs = { "name": i })[0]})
post.update({'method_free': ' '})
post.update(captcha(result))
post = urllib.urlencode(post)
result = getUrl(url, post=post).result
url = re.compile("var download_url = '(.+?)'").findall(result)[0]
return url
except:
return
def streamin(url):
try:
url = url.replace('streamin.to/', 'streamin.to/embed-')
if not url.endswith('.html'): url = url + '.html'
result = getUrl(url, mobile=True).result
url = re.compile("file:'(.+?)'").findall(result)[0]
return url
except:
return
def grifthost(url):
try:
url = url.replace('/embed-', '/').split('-')[0]
url = re.compile('//.+?/([\w]+)').findall(url)[0]
url = 'http://grifthost.com/embed-%s.html' % url
result = getUrl(url).result
try:
post = {}
f = common.parseDOM(result, "Form", attrs = { "method": "POST" })[0]
f = f.replace('"submit"', '"hidden"')
k = common.parseDOM(f, "input", ret="name", attrs = { "type": "hidden" })
for i in k: post.update({i: common.parseDOM(f, "input", ret="value", attrs = { "name": i })[0]})
post = urllib.urlencode(post)
result = getUrl(url, post=post).result
except:
pass
result = re.compile('(eval.*?\)\)\))').findall(result)[0]
result = jsunpack(result)
url = re.compile("file:'(.+?)'").findall(result)[0]
return url
except:
return
def ishared(url):
try:
result = getUrl(url).result
url = re.compile('path:"(.+?)"').findall(result)[0]
return url
except:
return
def cloudyvideos(url):
try:
result = getUrl(url).result
post = {}
f = common.parseDOM(result, "Form", attrs = { "name": "F1" })[-1]
k = common.parseDOM(f, "input", ret="name", attrs = { "type": "hidden" })
for i in k: post.update({i: common.parseDOM(f, "input", ret="value", attrs = { "name": i })[0]})
post.update({'method_free': '', 'method_premium': ''})
post = urllib.urlencode(post)
import time
request = urllib2.Request(url, post)
for i in range(0, 4):
try:
response = urllib2.urlopen(request, timeout=5)
result = response.read()
response.close()
btn = common.parseDOM(result, "input", ret="value", attrs = { "class": "graybt.+?" })[0]
url = re.compile('href=[\'|\"](.+?)[\'|\"]><input.+?class=[\'|\"]graybt.+?[\'|\"]').findall(result)[0]
return url
except:
time.sleep(1)
except:
return
def mrfile(url):
try:
result = getUrl(url).result
post = {}
f = common.parseDOM(result, "Form", attrs = { "name": "F1" })[-1]
k = common.parseDOM(f, "input", ret="name", attrs = { "type": "hidden" })
for i in k: post.update({i: common.parseDOM(f, "input", ret="value", attrs = { "name": i })[0]})
post.update({'method_free': '', 'method_premium': ''})
post = urllib.urlencode(post)
result = getUrl(url, post=post).result
url = re.compile('(<a\s+href=.+?>Download\s+.+?</a>)').findall(result)[-1]
url = common.parseDOM(url, "a", ret="href")[0]
return url
except:
return
def datemule(url):
try:
url += '&mode=html5'
result = getUrl(url).result
url = re.compile('file:\s+"(.+?)"').findall(result)[0]
return url
except:
return
def vimeo(url):
try:
url = [i for i in url.split('/') if i.isdigit()][-1]
url = 'http://player.vimeo.com/video/%s/config' % url
result = getUrl(url).result
result = json.loads(result)
u = result['request']['files']['h264']
url = None
try: url = u['hd']['url']
except: pass
try: url = u['sd']['url']
except: pass
return url
except:
return
def odnoklassniki(url):
try:
url = [i for i in url.split('/') if i.isdigit()][-1]
url = 'http://www.odnoklassniki.ru/dk?cmd=videoPlayerMetadata&mid=%s' % url
result = getUrl(url).result
result = json.loads(result)
a = "&start=0|User-Agent=%s" % urllib.quote_plus('Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.57 Safari/537.36')
u = result['videos']
url = []
try: url += [[{'quality': 'HD', 'url': i['url'] + a} for i in u if i['name'] == 'hd'][0]]
except: pass
try: url += [[{'quality': 'SD', 'url': i['url'] + a} for i in u if i['name'] == 'sd'][0]]
except: pass
if url == []: return
return url
except:
return
def mailru(url):
try:
url = url.replace('/my.mail.ru/video/', '/api.video.mail.ru/videos/embed/')
url = url.replace('/videoapi.my.mail.ru/', '/api.video.mail.ru/')
result = getUrl(url).result
url = re.compile('metadataUrl":"(.+?)"').findall(result)[0]
cookie = getUrl(url, output='cookie').result
h = "|Cookie=%s" % urllib.quote(cookie)
result = getUrl(url).result
result = json.loads(result)
result = result['videos']
url = []
url += [{'quality': '1080p', 'url': i['url'] + h} for i in result if i['key'] == '1080p']
url += [{'quality': 'HD', 'url': i['url'] + h} for i in result if i['key'] == '720p']
url += [{'quality': 'SD', 'url': i['url'] + h} for i in result if not (i['key'] == '1080p' or i ['key'] == '720p')]
if url == []: return
return url
except:
return
|
class C:
def foo(self):
<selection>x = 1</selection>
y = 2
|
SECRET_KEY = 'docs'
|
"""SocksiPy - Python SOCKS module.
Version 1.00
Copyright 2006 Dan-Haim. All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of Dan Haim nor the names of his contributors may be used
to endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA
OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE.
This module provides a standard socket-like interface for Python
for tunneling connections through SOCKS proxies.
"""
import socket
import struct
PROXY_TYPE_SOCKS4 = 1
PROXY_TYPE_SOCKS5 = 2
PROXY_TYPE_HTTP = 3
_defaultproxy = None
_orgsocket = socket.socket
class ProxyError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class GeneralProxyError(ProxyError):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Socks5AuthError(ProxyError):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Socks5Error(ProxyError):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Socks4Error(ProxyError):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class HTTPError(ProxyError):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
_generalerrors = ("success",
"invalid data",
"not connected",
"not available",
"bad proxy type",
"bad input")
_socks5errors = ("succeeded",
"general SOCKS server failure",
"connection not allowed by ruleset",
"Network unreachable",
"Host unreachable",
"Connection refused",
"TTL expired",
"Command not supported",
"Address type not supported",
"Unknown error")
_socks5autherrors = ("succeeded",
"authentication is required",
"all offered authentication methods were rejected",
"unknown username or invalid password",
"unknown error")
_socks4errors = ("request granted",
"request rejected or failed",
"request rejected because SOCKS server cannot connect to identd on the client",
"request rejected because the client program and identd report different user-ids",
"unknown error")
def setdefaultproxy(proxytype=None,addr=None,port=None,rdns=True,username=None,password=None):
"""setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
Sets a default proxy which all further socksocket objects will use,
unless explicitly changed.
"""
global _defaultproxy
_defaultproxy = (proxytype,addr,port,rdns,username,password)
class socksocket(socket.socket):
"""socksocket([family[, type[, proto]]]) -> socket object
Open a SOCKS enabled socket. The parameters are the same as
those of the standard socket init. In order for SOCKS to work,
you must specify family=AF_INET, type=SOCK_STREAM and proto=0.
"""
def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None):
_orgsocket.__init__(self,family,type,proto,_sock)
if _defaultproxy != None:
self.__proxy = _defaultproxy
else:
self.__proxy = (None, None, None, None, None, None)
self.__proxysockname = None
self.__proxypeername = None
def __recvall(self, bytes):
"""__recvall(bytes) -> data
Receive EXACTLY the number of bytes requested from the socket.
Blocks until the required number of bytes have been received.
"""
data = ""
while len(data) < bytes:
data = data + self.recv(bytes-len(data))
return data
def setproxy(self,proxytype=None,addr=None,port=None,rdns=True,username=None,password=None):
"""setproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
Sets the proxy to be used.
proxytype - The type of the proxy to be used. Three types
are supported: PROXY_TYPE_SOCKS4 (including socks4a),
PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP
addr - The address of the server (IP or DNS).
port - The port of the server. Defaults to 1080 for SOCKS
servers and 8080 for HTTP proxy servers.
rdns - Should DNS queries be preformed on the remote side
(rather than the local side). The default is True.
Note: This has no effect with SOCKS4 servers.
username - Username to authenticate with to the server.
The default is no authentication.
password - Password to authenticate with to the server.
Only relevant when username is also provided.
"""
self.__proxy = (proxytype,addr,port,rdns,username,password)
def __negotiatesocks5(self,destaddr,destport):
"""__negotiatesocks5(self,destaddr,destport)
Negotiates a connection through a SOCKS5 server.
"""
# First we'll send the authentication packages we support.
if (self.__proxy[4]!=None) and (self.__proxy[5]!=None):
# The username/password details were supplied to the
# setproxy method so we support the USERNAME/PASSWORD
# authentication (in addition to the standard none).
self.sendall("\x05\x02\x00\x02")
else:
# No username/password were entered, therefore we
# only support connections with no authentication.
self.sendall("\x05\x01\x00")
# We'll receive the server's response to determine which
# method was selected
chosenauth = self.__recvall(2)
if chosenauth[0] != "\x05":
self.close()
raise GeneralProxyError((1,_generalerrors[1]))
# Check the chosen authentication method
if chosenauth[1] == "\x00":
# No authentication is required
pass
elif chosenauth[1] == "\x02":
# Okay, we need to perform a basic username/password
# authentication.
self.sendall("\x01" + chr(len(self.__proxy[4])) + self.__proxy[4] + chr(len(self.proxy[5])) + self.__proxy[5])
authstat = self.__recvall(2)
if authstat[0] != "\x01":
# Bad response
self.close()
raise GeneralProxyError((1,_generalerrors[1]))
if authstat[1] != "\x00":
# Authentication failed
self.close()
raise Socks5AuthError,((3,_socks5autherrors[3]))
# Authentication succeeded
else:
# Reaching here is always bad
self.close()
if chosenauth[1] == "\xFF":
raise Socks5AuthError((2,_socks5autherrors[2]))
else:
raise GeneralProxyError((1,_generalerrors[1]))
# Now we can request the actual connection
req = "\x05\x01\x00"
# If the given destination address is an IP address, we'll
# use the IPv4 address request even if remote resolving was specified.
try:
ipaddr = socket.inet_aton(destaddr)
req = req + "\x01" + ipaddr
except socket.error:
# Well it's not an IP number, so it's probably a DNS name.
if self.__proxy[3]==True:
# Resolve remotely
ipaddr = None
req = req + "\x03" + chr(len(destaddr)) + destaddr
else:
# Resolve locally
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
req = req + "\x01" + ipaddr
req = req + struct.pack(">H",destport)
self.sendall(req)
# Get the response
resp = self.__recvall(4)
if resp[0] != "\x05":
self.close()
raise GeneralProxyError((1,_generalerrors[1]))
elif resp[1] != "\x00":
# Connection failed
self.close()
if ord(resp[1])<=8:
raise Socks5Error((ord(resp[1]), _socks5errors[ord(resp[1])]))
else:
raise Socks5Error((9, _socks5errors[9]))
# Get the bound address/port
elif resp[3] == "\x01":
boundaddr = self.__recvall(4)
elif resp[3] == "\x03":
resp = resp + self.recv(1)
boundaddr = self.__recvall(resp[4])
else:
self.close()
raise GeneralProxyError((1,_generalerrors[1]))
boundport = struct.unpack(">H",self.__recvall(2))[0]
self.__proxysockname = (boundaddr,boundport)
if ipaddr != None:
self.__proxypeername = (socket.inet_ntoa(ipaddr),destport)
else:
self.__proxypeername = (destaddr,destport)
def getproxysockname(self):
"""getsockname() -> address info
Returns the bound IP address and port number at the proxy.
"""
return self.__proxysockname
def getproxypeername(self):
"""getproxypeername() -> address info
Returns the IP and port number of the proxy.
"""
return _orgsocket.getpeername(self)
def getpeername(self):
"""getpeername() -> address info
Returns the IP address and port number of the destination
machine (note: getproxypeername returns the proxy)
"""
return self.__proxypeername
def __negotiatesocks4(self,destaddr,destport):
"""__negotiatesocks4(self,destaddr,destport)
Negotiates a connection through a SOCKS4 server.
"""
# Check if the destination address provided is an IP address
rmtrslv = False
try:
ipaddr = socket.inet_aton(destaddr)
except socket.error:
# It's a DNS name. Check where it should be resolved.
if self.__proxy[3]==True:
ipaddr = "\x00\x00\x00\x01"
rmtrslv = True
else:
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
# Construct the request packet
req = "\x04\x01" + struct.pack(">H",destport) + ipaddr
# The username parameter is considered userid for SOCKS4
if self.__proxy[4] != None:
req = req + self.__proxy[4]
req = req + "\x00"
# DNS name if remote resolving is required
# NOTE: This is actually an extension to the SOCKS4 protocol
# called SOCKS4A and may not be supported in all cases.
if rmtrslv==True:
req = req + destaddr + "\x00"
self.sendall(req)
# Get the response from the server
resp = self.__recvall(8)
if resp[0] != "\x00":
# Bad data
self.close()
raise GeneralProxyError((1,_generalerrors[1]))
if resp[1] != "\x5A":
# Server returned an error
self.close()
if ord(resp[1]) in (91,92,93):
self.close()
raise Socks4Error((ord(resp[1]),_socks4errors[ord(resp[1])-90]))
else:
raise Socks4Error((94,_socks4errors[4]))
# Get the bound address/port
self.__proxysockname = (socket.inet_ntoa(resp[4:]),struct.unpack(">H",resp[2:4])[0])
if rmtrslv != None:
self.__proxypeername = (socket.inet_ntoa(ipaddr),destport)
else:
self.__proxypeername = (destaddr,destport)
def __negotiatehttp(self,destaddr,destport):
"""__negotiatehttp(self,destaddr,destport)
Negotiates a connection through an HTTP server.
"""
# If we need to resolve locally, we do this now
if self.__proxy[3] == False:
addr = socket.gethostbyname(destaddr)
else:
addr = destaddr
self.sendall("CONNECT " + addr + ":" + str(destport) + " HTTP/1.1\r\n" + "Host: " + destaddr + "\r\n\r\n")
# We read the response until we get the string "\r\n\r\n"
resp = self.recv(1)
while resp.find("\r\n\r\n")==-1:
resp = resp + self.recv(1)
# We just need the first line to check if the connection
# was successful
statusline = resp.splitlines()[0].split(" ",2)
if statusline[0] not in ("HTTP/1.0","HTTP/1.1"):
self.close()
raise GeneralProxyError((1,_generalerrors[1]))
try:
statuscode = int(statusline[1])
except ValueError:
self.close()
raise GeneralProxyError((1,_generalerrors[1]))
if statuscode != 200:
self.close()
raise HTTPError((statuscode,statusline[2]))
self.__proxysockname = ("0.0.0.0",0)
self.__proxypeername = (addr,destport)
def connect(self,destpair):
"""connect(self,despair)
Connects to the specified destination through a proxy.
destpar - A tuple of the IP/DNS address and the port number.
(identical to socket's connect).
To select the proxy server use setproxy().
"""
# Do a minimal input check first
if (type(destpair) in (list,tuple)==False) or (len(destpair)<2) or (type(destpair[0])!=str) or (type(destpair[1])!=int):
raise GeneralProxyError((5,_generalerrors[5]))
if self.__proxy[0] == PROXY_TYPE_SOCKS5:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 1080
_orgsocket.connect(self,(self.__proxy[1],portnum))
self.__negotiatesocks5(destpair[0],destpair[1])
elif self.__proxy[0] == PROXY_TYPE_SOCKS4:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 1080
_orgsocket.connect(self,(self.__proxy[1],portnum))
self.__negotiatesocks4(destpair[0],destpair[1])
elif self.__proxy[0] == PROXY_TYPE_HTTP:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 8080
_orgsocket.connect(self,(self.__proxy[1],portnum))
self.__negotiatehttp(destpair[0],destpair[1])
elif self.__proxy[0] == None:
_orgsocket.connect(self,(destpair[0],destpair[1]))
else:
raise GeneralProxyError((4,_generalerrors[4]))
|
"""
Tests for `yes`.
https://pubs.opengroup.org/onlinepubs/9699919799/utilities/yes.html
"""
from helpers import check, check_version, run
def test_version():
"""Check that we're using Boreutil's implementation."""
assert check_version("yes")
def test_missing_args():
"""Nothing to test: `yes` accepts any number args."""
pass
def test_extra_args():
"""Nothing to test: `yes` accepts any number args."""
pass
def test_help():
"""Passing -h or --help => print help text."""
assert run(["yes", "-h"]).stdout.split(' ')[0] == 'Usage:'
assert run(["yes", "--help"]).stdout.split(' ')[0] == 'Usage:'
assert run(["yes", "-h"]).returncode > 0
assert run(["yes", "--help"]).returncode > 0
def test_main():
"""`yes` normally prints stuff forever; the tests exit after 3 iterations."""
assert check(["yes", "-Wtesting"]).stdout == "y\ny\ny\n"
assert check(["yes", "-Wtesting", "2", "3"]).stdout == "-Wtesting 2 3\n-Wtesting 2 3\n-Wtesting 2 3\n"
|
"""
This package includes tools to predict and plot neighborhoods.
"""
import nbdpred
|
import abc
import gzip
import hashlib
import logging
import mimetypes
import pydoc
from functools import lru_cache
from io import BytesIO
from typing import ClassVar
from typing import Generic
from typing import NoReturn
from typing import Optional
from typing import Tuple
from typing import Type
from typing import TypeVar
from typing import Union
from django.core.cache import caches
from django.core.exceptions import ImproperlyConfigured
from django.core.files.storage import Storage
from django.utils.encoding import force_bytes
from collectfast import settings
_RemoteStorage = TypeVar("_RemoteStorage", bound=Storage)
cache = caches[settings.cache]
logger = logging.getLogger(__name__)
class Strategy(abc.ABC, Generic[_RemoteStorage]):
# Exceptions raised by storage backend for delete calls to non-existing
# objects. The command silently catches these.
delete_not_found_exception: ClassVar[Tuple[Type[Exception], ...]] = ()
def __init__(self, remote_storage: _RemoteStorage) -> None:
self.remote_storage = remote_storage
@abc.abstractmethod
def should_copy_file(
self, path: str, prefixed_path: str, local_storage: Storage
) -> bool:
"""
Called for each file before copying happens, this method decides
whether a file should be copied or not. Return False to indicate that
the file is already up-to-date and should not be copied, or True to
indicate that it is stale and needs updating.
"""
...
def pre_should_copy_hook(self) -> None:
"""Hook called before calling should_copy_file."""
...
def post_copy_hook(
self, path: str, prefixed_path: str, local_storage: Storage
) -> None:
"""Hook called after a file is copied."""
...
def on_skip_hook(
self, path: str, prefixed_path: str, local_storage: Storage
) -> None:
"""Hook called when a file copy is skipped."""
...
class HashStrategy(Strategy[_RemoteStorage], abc.ABC):
use_gzip = False
def should_copy_file(
self, path: str, prefixed_path: str, local_storage: Storage
) -> bool:
local_hash = self.get_local_file_hash(path, local_storage)
remote_hash = self.get_remote_file_hash(prefixed_path)
return local_hash != remote_hash
def get_gzipped_local_file_hash(
self, uncompressed_file_hash: str, path: str, contents: str
) -> str:
buffer = BytesIO()
zf = gzip.GzipFile(mode="wb", fileobj=buffer, mtime=0.0)
zf.write(force_bytes(contents))
zf.close()
return hashlib.md5(buffer.getvalue()).hexdigest()
@lru_cache(maxsize=None)
def get_local_file_hash(self, path: str, local_storage: Storage) -> str:
"""Create md5 hash from file contents."""
# Read file contents and handle file closing
file = local_storage.open(path)
try:
contents = file.read()
finally:
file.close()
file_hash = hashlib.md5(contents).hexdigest()
# Check if content should be gzipped and hash gzipped content
content_type = mimetypes.guess_type(path)[0] or "application/octet-stream"
if self.use_gzip and content_type in settings.gzip_content_types:
file_hash = self.get_gzipped_local_file_hash(file_hash, path, contents)
return file_hash
@abc.abstractmethod
def get_remote_file_hash(self, prefixed_path: str) -> Optional[str]:
...
class CachingHashStrategy(HashStrategy[_RemoteStorage], abc.ABC):
@lru_cache(maxsize=None)
def get_cache_key(self, path: str) -> str:
path_hash = hashlib.md5(path.encode()).hexdigest()
return settings.cache_key_prefix + path_hash
def invalidate_cached_hash(self, path: str) -> None:
cache.delete(self.get_cache_key(path))
def should_copy_file(
self, path: str, prefixed_path: str, local_storage: Storage
) -> bool:
local_hash = self.get_local_file_hash(path, local_storage)
remote_hash = self.get_cached_remote_file_hash(path, prefixed_path)
if local_hash != remote_hash:
# invalidate cached hash, since we expect its corresponding file to
# be overwritten
self.invalidate_cached_hash(path)
return True
return False
def get_cached_remote_file_hash(self, path: str, prefixed_path: str) -> str:
"""Cache the hash of the remote storage file."""
cache_key = self.get_cache_key(path)
hash_ = cache.get(cache_key, False)
if hash_ is False:
hash_ = self.get_remote_file_hash(prefixed_path)
cache.set(cache_key, hash_)
return str(hash_)
def get_gzipped_local_file_hash(
self, uncompressed_file_hash: str, path: str, contents: str
) -> str:
"""Cache the hash of the gzipped local file."""
cache_key = self.get_cache_key("gzip_hash_%s" % uncompressed_file_hash)
file_hash = cache.get(cache_key, False)
if file_hash is False:
file_hash = super().get_gzipped_local_file_hash(
uncompressed_file_hash, path, contents
)
cache.set(cache_key, file_hash)
return str(file_hash)
def post_copy_hook(
self, path: str, prefixed_path: str, local_storage: Storage
) -> None:
"""Cache the hash of the just copied local file."""
super().post_copy_hook(path, prefixed_path, local_storage)
key = self.get_cache_key(path)
value = self.get_local_file_hash(path, local_storage)
cache.set(key, value)
class DisabledStrategy(Strategy):
def should_copy_file(
self, path: str, prefixed_path: str, local_storage: Storage
) -> NoReturn:
raise NotImplementedError
def pre_should_copy_hook(self) -> NoReturn:
raise NotImplementedError
def load_strategy(klass: Union[str, type, object]) -> Type[Strategy[Storage]]:
if isinstance(klass, str):
klass = pydoc.locate(klass)
if not isinstance(klass, type) or not issubclass(klass, Strategy):
raise ImproperlyConfigured(
"Configured strategies must be subclasses of %s.%s"
% (Strategy.__module__, Strategy.__qualname__)
)
return klass
|
from .mx_in_class import ToDict
from .mx_in_class import JsonMixin
from .mx_in_class import BinaryTree
from .mx_in_class import BinaryTreeWithParent
from .class_property import VoltageResistance
from .class_property import BoundedResistance
|
import os
import sys, getopt
import socket
import string
import shutil
import getopt
import syslog
import errno
import logging
import tempfile
import datetime
import subprocess
import json
import ConfigParser
from operator import itemgetter
from functools import wraps
from getpass import getpass, getuser
from glob import glob
from contextlib import contextmanager
from fabric.api import env, cd, prefix, sudo, run, hide, local, put, get, settings
from fabric.contrib.files import exists, upload_template
from fabric.colors import yellow, green, blue, red
try:
import json
except importError:
import simplejson as json
script_name='auto_deploy_app_v_final.py'
log_path='~/logs'
"""
-----------------------------------------------------------------------------
Auto generate testing reports.
Use the -h or the --help flag to get a listing of options.
Program: Auto generate testing reports.
Author: Robin Wen
Date: December 22, 2014
Revision: 1.0
"""
def main(argv):
try:
# If no arguments print usage
if len(argv) == 0:
usage()
sys.exit()
# Receive the command line arguments. The execute the corresponding function.
if sys.argv[1] == "-h" or sys.argv[1] == "--help":
usage()
sys.exit()
elif sys.argv[1] == "-c" or sys.argv[1] == "--git-clone":
git_clone()
elif sys.argv[1] == "-u" or sys.argv[1] == "--git-pull":
git_pull()
elif sys.argv[1] == "-p" or sys.argv[1] == "--pre-conf":
pre_conf()
elif sys.argv[1] == "-a" or sys.argv[1] == "--auto-gen":
auto_gen()
elif sys.argv[1] == "-s" or sys.argv[1] == "--scp-report":
scp_report()
else:
print red('Unsupported option! Please refer the help.')
print ''
usage()
except getopt.GetoptError, msg:
# If an error happens print the usage and exit with an error
usage()
sys.exit(errno.EIO)
"""
Prints out the usage for the command line.
"""
def usage():
usage = [" Auto generate testing reports. Write in Python.\n"]
usage.append("Version 1.0. By Robin Wen. Email:blockxyz@gmail.com\n")
usage.append("\n")
usage.append("Usage auto_deploy_app.py [-hpas]\n")
usage.append(" [-h | --help] Prints this help and usage message\n")
usage.append(" [-c | --git-clone] Clone the repo via git\n")
usage.append(" [-u | --git-pull] Update the repo via git\n")
usage.append(" [-p | --pre-conf] Pre config before generate testing reports\n")
usage.append(" [-a | --auto-gen] Auto generate testing reports\n")
usage.append(" [-s | --scp-report] SCP generated testing reports\n")
message = string.join(usage)
print message
def git_clone():
print green('Clone the repo via git.')
print 'Logs output to the '+log_path+'/git_clone.log'
os.system('mkdir -p '+log_path+' 2>/dev/null >/dev/null')
os.system("echo '' > "+log_path+"/git_clone.log")
os.system("fab -f "+script_name+" git_clone > "+log_path+"/git_clone.log")
print green('Clone the repo via git.')
def git_pull():
print green('Update the repo via git.')
print 'Logs output to the '+log_path+'/git_pull.log'
os.system('mkdir -p '+log_path+' 2>/dev/null >/dev/null')
os.system("echo '' > "+log_path+"/git_pull.log")
os.system("fab -f "+script_name+" git_pull> "+log_path+"/git_pull.log")
print green('Update the repo via git.')
def pre_conf():
print green('Pre config before generate testing reports.')
print 'Logs output to the '+log_path+'/pre_conf.log'
os.system('mkdir -p '+log_path+' 2>/dev/null >/dev/null')
os.system("echo '' > "+log_path+"/pre_conf.log")
os.system("fab -f "+script_name+" pre_conf > "+log_path+"/pre_conf.log")
print green('Pre config before generate testing reports finished!')
def auto_gen():
print green('Auto generate testing reports.')
print 'Logs output to the '+log_path+'/auto_gen.log'
os.system('mkdir -p '+log_path+' 2>/dev/null >/dev/null')
os.system("echo '' > "+log_path+"/auto_gen.log")
os.system("fab -f "+script_name+" auto_gen > "+log_path+"/auto_gen.log")
print green('Auto generate testing reports finished!')
def scp_report():
print green('SCP generated testing reports.')
print 'Logs output to the '+log_path+'/scp_report.log'
os.system('mkdir -p '+log_path+' 2>/dev/null >/dev/null')
os.system("echo '' > "+log_path+"/scp_report.log")
os.system("fab -f "+script_name+" scp_report > "+log_path+"/scp_report.log")
print green('SCP generated testing reports finished!')
if __name__=='__main__':
main(sys.argv[1:])
|
from pythonwarrior.abilities.base import AbilityBase
class DistanceOf(AbilityBase):
def description(self):
return ("Pass a Space as an argument, and it will return an integer "
"representing the distance to that space.")
def perform(self, space):
return self._unit.position.distance_of(space)
|
from tkinter import *
import math
canvas_width = 1000
canvas_height =1000
python_green = "#476042"
master = Tk()
w = Canvas(master,
width=canvas_width,
height=canvas_height)
w.pack()
points = [[i, 500+ math.sin(i/30)*20]for i in range(0,2000) if i%2 == 0]+[10000,1000]+[0,1000]
w.create_polygon(points, outline=python_green,
fill='yellow', width=3)
mainloop()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.