prefix stringlengths 0 918k | middle stringlengths 0 812k | suffix stringlengths 0 962k |
|---|---|---|
from spectrocrunch.materials.compoundfromformula import compoundfromformula
from spectrocrunch.materials.compoundfromname import compoundfromname
from spectrocrunch.materials.mixture import mixture
from spectrocrunch.materials.types import fraction
from spectrocrunch.simulation import calcnoise
from spectrocrunch.simulation import materials
from spectrocrunch.math import noisepropagation
import numpy as np
import scipy.optimize
import matplotlib.pyplot as plt
class sample(object):
@staticmethod
def getnframes(totaltime, frametime, fracflat):
n = int(round(totaltime / frametime))
nflat = max(int(round(fracflat * n / 2.0)), 1)
nflat *= 2 # before and after
ndata = max(n - nflat, 1)
return ndata, nflat
@staticmethod
def getrealtime(totaltime, frametime, fracflat):
ndata, nflat = self.getnframes(totaltime, frametime, fracflat)
n = ndata + nflat
overhead = 6.50305 + 0.0131498 * n
return frametime * n + overhead
def xanes(
self, I0, energy, totaltime=None, frametime=None, fracflat=None, ndark=None
):
ndata, nflat = self.getnframes(totaltime, frametime, fracflat)
energy = np.asarray(energy)
N, N0, D, D0 = calcnoise.id21_ffnoise(
I0,
energy,
self.composition,
tframe_data=frametime,
nframe_data=ndata,
tframe_flat=frametime,
nframe_flat=nflat,
nframe_dark=ndark,
)
T = calcnoise.transmission(
N,
N0,
D=D,
D0=D0,
tframe_data=frametime,
nframe_data=ndata,
tframe_flat=frametime,
nframe_flat=nflat,
nframe_dark=ndark,
)
XAS = calcnoise.absorbance(T)
signal = noisepropagation.E(XAS)
noise = noisepropagation.S(XAS)
return signal, noise
def costfunc(self, I0, energy, **kwargs):
signal, noise = self.xanes(I0, energy, **kwargs)
# return np.max(noise/signal*100)
return np.mean(noise) / (signal[-1] - signal[0])
def __str__(self):
return str(self.composition)
def plotxanesnoise(self, I0, energy, **kwargs):
signal, noise = self.xanes(I0, energy, **kwargs)
plt.plot(energy, noise / signal * 100)
plt.xlabel("Energy (keV)")
plt.ylabe | l("N/S (%)")
def plotxanes(self, I0, energy, **kwargs):
signal, _ = self.xanes(I0, energy, **kwargs)
plt.plot(energy, signal)
plt.xlabel("Energy (keV)")
| plt.ylabel("Absorbance")
class sample_hg115(sample):
def __init__(self, wpigment=10, paintthickness=10):
binder = compoundfromname("linseed oil")
pigment = compoundfromname("verdigris")
paint = mixture(
[binder, pigment], [1 - wpigment / 100.0, wpigment / 100.0], fraction.mass
)
ultralene = compoundfromname("ultralene")
sfreetape = compoundfromname("sulfur-free tape")
# ultralene = compoundfromname("vacuum")
# sfreetape = compoundfromname("vacuum")
m = [ultralene, paint, sfreetape]
thickness = [4, paintthickness, 10]
# m = [compoundfromname("vacuum"),compoundfromname("vacuum"),compoundfromname("vacuum")]
self.composition = materials.factory(
"Multilayer",
material=m,
thickness=thickness,
anglein=0,
angleout=0,
azimuth=0,
)
self.paintindex = 1
def set_wpigment(self, wpigment):
w = self.composition.material[self.paintindex].massfractions()
w["verdigris"] = wpigment / 100.0
w["linseed oil"] = 1 - wpigment / 100.0
self.composition.material[self.paintindex].change_fractions(w, fraction.mass)
def get_wpigment(self):
return (
self.composition.material[self.paintindex].massfractions()["verdigris"]
* 100
)
def set_paintthickness(self, paintthickness):
self.composition.thickness[self.paintindex] = paintthickness
def get_paintthickness(self):
return self.composition.thickness[self.paintindex]
def optimize_thickness(self, I0, energy, **kwargs):
def costfunc(paintthickness):
self.set_paintthickness(paintthickness[0])
c = self.costfunc(I0, energy, **kwargs)
return c
guess = self.get_paintthickness()
result = scipy.optimize.least_squares(costfunc, guess, gtol=1e-015, ftol=1e-015)
print result.message
return result.x[0], result.success
def optimize_wpigment(self, I0, energy, **kwargs):
def costfunc(wpigment):
self.set_wpigment(wpigment[0])
c = self.costfunc(I0, energy, **kwargs)
return c
guess = self.get_wpigment()
result = scipy.optimize.least_squares(
costfunc, guess, bounds=([0, 100]), gtol=1e-015, ftol=1e-015
)
print result.message
return result.x[0], result.success
def optimize_thickness_plot(self, I0, energy, **kwargs):
thickness = self.get_paintthickness()
t = np.linspace(max(thickness - 100, 0), thickness + 100, 50)
r = np.zeros(len(t))
for i, paintthickness in enumerate(t):
self.set_paintthickness(paintthickness)
r[i] = self.costfunc(I0, energy, **kwargs)
self.set_paintthickness(thickness)
plt.plot(t, 1 / r, "-o", label="{} %".format(self.get_wpigment()))
plt.xlabel("thickness ($\mu$m)")
plt.ylabel("Jump-to-noise")
def optimize_wpigment_plot(self, I0, energy, **kwargs):
w = self.get_wpigment()
t = np.linspace(0, 20, 50)
r = np.zeros(len(t))
for i, wpigment in enumerate(t):
self.set_wpigment(wpigment)
r[i] = self.costfunc(I0, energy, **kwargs)
self.set_wpigment(w)
plt.plot(t, 1 / r, "-o", label="{} $\mu$m".format(self.get_paintthickness()))
plt.xlabel("Verdigris (%)")
plt.ylabel("Jump-to-noise")
def optimize(self, I0, energy, **kwargs):
def costfunc(p):
self.set_wpigment(p[0])
self.set_paintthickness(p[1])
return self.costfunc(I0, energy, **kwargs)
guess = (self.get_wpigment(), self.get_paintthickness())
result = scipy.optimize.least_squares(
costfunc, guess, bounds=([0, 0], [100, 1e6]), gtol=1e-015
)
print result.message
return result.x, result.success
def hg115_ff():
sample = sample_hg115()
I0 = 1e6
energy = np.linspace(8.9, 9.3, 100)
totaltime = 70
frametime = 0.07
fracflat = 1 / 3.0
ndark = 30
kwargs = {
"totaltime": totaltime,
"frametime": frametime,
"fracflat": fracflat,
"ndark": ndark,
}
opt = 1
energyopt = [8.97, 9]
if opt == 0:
sample.set_wpigment(10)
t, s = sample.optimize_thickness(I0, energyopt, **kwargs)
sample.set_paintthickness(t)
elif opt == 1:
sample.set_paintthickness(20)
w, s = sample.optimize_wpigment(I0, energyopt, **kwargs)
sample.set_wpigment(w)
else:
wt, s = sample.optimize(I0, energy, **kwargs)
sample.set_wpigment(wt[0])
sample.set_paintthickness(wt[1])
print "Thickness = {} μm".format(sample.get_paintthickness())
print "Verdigris = {} wt%".format(sample.get_wpigment())
print "Jump to noise = {}".format(1 / sample.costfunc(I0, energyopt, **kwargs))
print ""
plt.figure()
for thickness in [10, 15, 20]:
sample.set_paintthickness(thickness)
sample.optimize_wpigment_plot(I0, energy, **kwargs)
plt.legend(loc="best")
plt.show()
exit()
sample.optimize_thickness_plot(I0, energy, **kwargs)
sample.optimize_wpigment_plot(I0, energy, **kwargs)
plt.figure()
sample.plotxanes(I0, energy, **kwargs)
plt.figure()
sample.plotxanesnoise(I0, energy, **kwargs)
plt.show()
def hg115_xrd():
sample = sample_hg115()
energy = 8.5
|
# -*- coding:utf-8 -*-
from werkzeug.routing import Map, Submount
import libcloud
from libcloud_rest.api.handlers import app_handler
from libcloud_rest.api.handlers.compute import compute_handler
from libcloud_rest.api.handlers.dns import dns_handler
from libcloud_rest.api.handlers.loadbalancer import lb_handler
from libcloud_rest.api.handlers.storage import storage_hand | ler
from libcloud_rest.api.versions import versions
api_version = '/%s' % (versions[libcloud.__version__])
urls = Map([
app_handler.get_rules(),
Submount(api_version, [
compute_handler.get_ru | les(),
dns_handler.get_rules(),
lb_handler.get_rules(),
storage_handler.get_rules(),
])
])
|
from models.pusher import push
|
print repr(push("Bazin | ga..."))
|
footer.
- `Messages`: Placement of system messages stored in
`nodes.document.transform_messages`.
- `TestMessages`: Like `Messages`, used on test runs.
- `FinalReferences`: Resolve remaining references.
"""
__docformat__ = 'reStructuredText'
import re
import sys
import time
from docutils import nodes, utils
from docutils.transforms import TransformError, Transform
from docutils.utils import smartquotes
class Decorations(Transform):
"""
Populate a document's decoration element (header, footer).
"""
default_priority = 820
def apply(self):
header_nodes = self.generate_header()
if header_nodes:
decoration = self.document.get_decoration()
header = decoration.get_header()
header.extend(header_nodes)
footer_nodes = self.generate_footer()
if footer_nodes:
decoration = self.document.get_decoration()
footer = decoration.get_footer()
footer.extend(footer_nodes)
def generate_header(self):
return None
def generate_footer(self):
# @@@ Text is hard-coded for now.
# Should be made dynamic (language-dependent).
settings = self.document.settings
if settings.generator or settings.datestamp or settings.source_link \
or settings.source_url:
text = []
if settings.source_link and settings._source \
or settings.source_url:
if settings.source_url:
source = settings.source_url
else:
source = utils.relative_path(settings._destination,
settings._source)
text.extend([
nodes.reference('', 'View document source',
refuri=source),
nodes.Text('.\n')])
if settings.datestamp:
datestamp = time.strftime(settings.datestamp, time.gmtime())
text.append(nodes.Text('Generated on: ' + datestamp + '.\n'))
if settings.generator:
text.extend([
nodes.Text('Generated by '),
nodes.reference('', 'Docutils', refuri=
'http://docutils.sourceforge.net/'),
nodes.Text(' from '),
nodes.reference('', 'reStructuredText', refuri='http://'
'docutils.sourceforge.net/rst.html'),
nodes.Text(' source.\n')])
return [nodes.paragraph('', '', *text)]
else:
return None
class ExposeInternals(Transform):
"""
Expose internal attributes if ``expose_internals`` setting is set.
"""
default_priority = 840
def not_Text(self, node):
return not isinstance(node, nodes.Text)
def apply(self):
if self.document.settings.expose_internals:
for node in self.document.traverse(self.not_Text):
for att in self.document.settings.expose_internals:
value = getattr(node, att, None)
if value is not None:
node['internal:' + att] = value
class Messages(Transform):
"""
Place any system messages generated after parsing into a dedicated section
of the document.
"""
default_priority = 860
def apply(self):
unfiltered = self.document.transform_messages
threshold = self.document.reporter.report_level
messages = []
for msg in unfiltered:
if msg['level'] >= threshold and not msg.parent:
messages.append(msg)
if messages:
section = nodes.section(classes=['system-messages'])
# @@@ get this from the language module?
section += nodes.title('', 'Docutils System Messages')
section += messages
self.document.transform_messages[:] = []
self.document += section
class FilterMessages(Transform):
"""
Remove system messages below verbosity threshold.
"""
default_priority = 870
def apply(self):
for node in self.document.traverse(nodes.system_message):
if node['level'] < self.document.reporter.report_level:
node.parent.remove(node)
class TestMessages(Transform):
"""
Append all post-parse system messages to the end of the document.
Used for testing purposes.
"""
default_priority = 880
def apply(self):
for msg in self.document.transform_messages:
if not msg.parent:
self.document += msg
class StripComments(Transform):
"""
Remove comment elements from the document tree (only if the
``strip_comments`` setting is enabled).
"""
default_priority = 740
def apply(self):
if self.document.settings.strip_comments:
for node in self.document.traverse(nodes.comment):
node.parent.remove(node)
class StripClassesAndElements(Transform):
"""
Remove from the document tree all elements with classes in
`self.document.settings.strip_elements_with_classes` and all "classes"
attribute values in `self.document.settings.strip_classes`.
"""
default | _priority = 420
def apply(self):
if not (self.document.settings.strip_elements_with_classes
or self.document.settings.strip_classes):
return
# prepare dicts for lookup (not sets, for Python 2.2 compatibility):
self.strip_elements = dict(
[(key, None)
for key in (self.document.settings.strip_elements_with_classes
or [])])
self.strip_classes = dict(
[(key, None) for | key in (self.document.settings.strip_classes
or [])])
for node in self.document.traverse(self.check_classes):
node.parent.remove(node)
def check_classes(self, node):
if isinstance(node, nodes.Element):
for class_value in node['classes'][:]:
if class_value in self.strip_classes:
node['classes'].remove(class_value)
if class_value in self.strip_elements:
return 1
class SmartQuotes(Transform):
"""
Replace ASCII quotation marks with typographic form.
Also replace multiple dashes with em-dash/en-dash characters.
"""
default_priority = 850
def __init__(self, document, startnode):
Transform.__init__(self, document, startnode=startnode)
self.unsupported_languages = set()
def get_tokens(self, txtnodes):
# A generator that yields ``(texttype, nodetext)`` tuples for a list
# of "Text" nodes (interface to ``smartquotes.educate_tokens()``).
texttype = {True: 'literal', # "literal" text is not changed:
False: 'plain'}
for txtnode in txtnodes:
nodetype = texttype[isinstance(txtnode.parent,
(nodes.literal,
nodes.math,
nodes.image,
nodes.raw,
nodes.problematic))]
yield (nodetype, txtnode.astext())
def apply(self):
smart_quotes = self.document.settings.smart_quotes
if not smart_quotes:
return
try:
alternative = smart_quotes.startswith('alt')
except AttributeError:
alternative = False
# print repr(alternative)
document_language = self.document.settings.language_code
# "Educate" quotes in normal text. Handle each block of text
# (TextElement node) as a unit to keep context around inline nodes:
for node in self.document.traverse(nodes.TextElement):
# skip preformatted text blocks and special elements:
if isinstance(node, (nodes.FixedTextElement, nodes.Special)):
continue
# nested Text |
# Authors: Tomas Babej <tbabej@redhat.com>
#
# Copyright (C) 2013 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistr | ibute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, eit | her version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from ipaserver.install import service
class OtpdInstance(service.SimpleServiceInstance):
def __init__(self):
service.SimpleServiceInstance.__init__(self, "ipa-otpd")
|
#make sure there is one utxo
assert_equal(len(json_obj['utxos']), 1)
assert_equal(json_obj['utxos'][0]['value'], 0.1)
#################################################
# GETUTXOS: now query an already spent outpoint #
#################################################
json_request = '/checkmempool/'+vintx+'-0'
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
#check chainTip response
assert_equal(json_obj['chaintipHash'], bb_hash)
#make sure there is no utox in the response because this oupoint has been spent
assert_equal(len(json_obj['utxos']), 0)
#check bitmap
assert_equal(json_obj['bitmap'], "0")
##################################################
# GETUTXOS: now check both with the same request #
##################################################
json_request = '/checkmempool/'+txid+'-'+str(n)+'/'+vintx+'-0'
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
assert_equal(len(json_obj['utxos']), 1)
assert_equal(json_obj['bitmap'], "10")
#test binary response
bb_hash = self.nodes[0].getbestblockhash()
binaryRequest = b'\x01\x02'
binaryRequest += hex_str_to_bytes(txid)
binaryRequest += pack("i", n)
binaryRequest += hex_str_to_bytes(vintx)
binaryRequest += pack("i", 0)
bin_response = http_post_call(url.hostname, url.port, '/rest/getutxos'+self.FORMAT_SEPARATOR+'bin', binaryRequest)
output = BytesIO()
output.write(bin_response)
output.seek(0)
chainHeight = unpack("i", output.read(4))[0]
hashFromBinResponse = hex(deser_uint256(output))[2:].zfill(64)
assert_equal(bb_hash, hashFromBinResponse) #check if getutxo's chaintip during calculation was fine
assert_equal(chainHeight, 102) #chain height must be 102
############################
# GETUTXOS: mempool checks #
############################
# do a tx and don't sync
txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
json_string = http_get_call(url.hostname, url.port, '/rest/tx/'+txid+self.FORMAT_SEPARATOR+"json")
json_obj = json.loads(json_string)
vintx = json_obj['vin'][0]['txid'] # get the vin to later check for utxo (should be spent by then)
# get n of 0.1 outpoint
n = 0
for vout in json_obj['vout']:
if vout['value'] == 0.1:
n = vout['n']
json_request = '/'+txid+'-'+str(n)
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
assert_equal(len(json_obj['utxos']), 0) #there should be an outpoint because it has just added to the mempool
json_request = '/checkmempool/'+txid+'-'+str(n)
json_string = http_get_call(url.hostn | ame, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
assert_equal(len(json_obj['utxos']), 1) #there should be an outpoint because it has just added to the mempool
#do some invalid requests
j | son_request = '{"checkmempool'
response = http_post_call(url.hostname, url.port, '/rest/getutxos'+self.FORMAT_SEPARATOR+'json', json_request, True)
assert_equal(response.status, 400) #must be a 400 because we send an invalid json request
json_request = '{"checkmempool'
response = http_post_call(url.hostname, url.port, '/rest/getutxos'+self.FORMAT_SEPARATOR+'bin', json_request, True)
assert_equal(response.status, 400) #must be a 400 because we send an invalid bin request
response = http_post_call(url.hostname, url.port, '/rest/getutxos/checkmempool'+self.FORMAT_SEPARATOR+'bin', '', True)
assert_equal(response.status, 400) #must be a 400 because we send an invalid bin request
#test limits
json_request = '/checkmempool/'
for x in range(0, 20):
json_request += txid+'-'+str(n)+'/'
json_request = json_request.rstrip("/")
response = http_post_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json', '', True)
assert_equal(response.status, 400) #must be a 400 because we exceeding the limits
json_request = '/checkmempool/'
for x in range(0, 15):
json_request += txid+'-'+str(n)+'/'
json_request = json_request.rstrip("/")
response = http_post_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json', '', True)
assert_equal(response.status, 200) #must be a 200 because we are within the limits
self.nodes[0].generate(1) #generate block to not affect upcoming tests
self.sync_all()
################
# /rest/block/ #
################
# check binary format
response = http_get_call(url.hostname, url.port, '/rest/block/'+bb_hash+self.FORMAT_SEPARATOR+"bin", True)
assert_equal(response.status, 200)
assert_greater_than(int(response.getheader('content-length')), 80)
response_str = response.read()
# compare with block header
response_header = http_get_call(url.hostname, url.port, '/rest/headers/1/'+bb_hash+self.FORMAT_SEPARATOR+"bin", True)
assert_equal(response_header.status, 200)
response_header_length = int(response_header.getheader('content-length'))
assert_greater_than(response_header_length, 80)
response_header_str = response_header.read()
assert_equal(response_str[0:response_header_length], response_header_str)
# check block hex format
response_hex = http_get_call(url.hostname, url.port, '/rest/block/'+bb_hash+self.FORMAT_SEPARATOR+"hex", True)
assert_equal(response_hex.status, 200)
assert_greater_than(int(response_hex.getheader('content-length')), 160)
response_hex_str = response_hex.read()
assert_equal(encode(response_str, "hex_codec")[0:160], response_hex_str[0:160])
# compare with hex block header
response_header_hex = http_get_call(url.hostname, url.port, '/rest/headers/1/'+bb_hash+self.FORMAT_SEPARATOR+"hex", True)
assert_equal(response_header_hex.status, 200)
assert_greater_than(int(response_header_hex.getheader('content-length')), 160)
response_header_hex_str = response_header_hex.read()
assert_equal(response_hex_str[0:160], response_header_hex_str[0:160])
assert_equal(encode(response_header_str, "hex_codec")[0:160], response_header_hex_str[0:160])
# check json format
block_json_string = http_get_call(url.hostname, url.port, '/rest/block/'+bb_hash+self.FORMAT_SEPARATOR+'json')
block_json_obj = json.loads(block_json_string)
assert_equal(block_json_obj['hash'], bb_hash)
# compare with json block header
response_header_json = http_get_call(url.hostname, url.port, '/rest/headers/1/'+bb_hash+self.FORMAT_SEPARATOR+"json", True)
assert_equal(response_header_json.status, 200)
response_header_json_str = response_header_json.read().decode('utf-8')
json_obj = json.loads(response_header_json_str, parse_float=Decimal)
assert_equal(len(json_obj), 1) #ensure that there is one header in the json response
assert_equal(json_obj[0]['hash'], bb_hash) #request/response hash should be the same
#compare with normal RPC block response
rpc_block_json = self.nodes[0].getblock(bb_hash)
assert_equal(json_obj[0]['hash'], rpc_block_json['hash'])
assert_equal(json_obj[0]['confirmations'], rpc_block_json['confirmations'])
assert_equal(json_obj[0]['height'], rpc_block_json['height'])
assert_equal(json_obj[0]['version'], |
# Original topological sort code written by Ofer Faigon (www.bitformation.com) and used with permission
# Permission is hereby granted to copy, modify and use this source code for any purpose as long as the above comment line is included with it.
"""
Loop detection and depth calculation added by Zhenlei Cai (c) 2010. All the modifications made to Ofer Faigon's original code are hereby donated to the public domain. They may be copied, modified an | d used for any purpise.
"""
def topological_sort(items, partial_order):
"""Perform topological sort.
items is a list of items to be sorted.
partial_order is a list of pairs. If pair (a,b) is in it, it means
that item a should appear before item b.
Returns a list of the items in one of the possible orders, or
a | tuple (True, [loop]) where loop is a list of items found to
form a loop.
"""
def add_node(graph, node):
"""Add a node to the graph if not already exists."""
if not graph.has_key(node):
graph[node] = [0] # 0 = number of arcs coming into this node.
def add_arc(graph, fromnode, tonode):
"""Add an arc to a graph. Can create multiple arcs.
The end nodes must already exist."""
graph[fromnode].append(tonode)
# Update the count of incoming arcs in tonode.
graph[tonode][0] = graph[tonode][0] + 1
# step 1 - create a directed graph with an arc a->b for each input
# pair (a,b).
# The graph is represented by a dictionary. The dictionary contains
# a pair item:list for each node in the graph. /item/ is the value
# of the node. /list/'s 1st item is the count of incoming arcs, and
# the rest are the destinations of the outgoing arcs. For example:
# {'a':[0,'b','c'], 'b':[1], 'c':[1]}
# represents the graph: c <-- a --> b
# The graph may contain loops and multiple arcs.
# Note that our representation does not contain reference loops to
# cause GC problems even when the represented graph contains loops,
# because we keep the node names rather than references to the nodes.
graph = {}
for v in items:
add_node(graph, v)
for a,b in partial_order:
add_arc(graph, a, b)
# Step 2 - find all roots (nodes with zero incoming arcs).
roots = [node for (node,nodeinfo) in graph.items() if nodeinfo[0] == 0]
# step 3 - repeatedly emit a root and remove it from the graph. Removing
# a node may convert some of the node's direct children into roots.
# Whenever that happens, we append the new roots to the list of
# current roots.
sorted = []
while len(roots) != 0:
# If len(roots) is always 1 when we get here, it means that
# the input describes a complete ordering and there is only
# one possible output.
# When len(roots) > 1, we can choose any root to send to the
# output; this freedom represents the multiple complete orderings
# that satisfy the input restrictions. We arbitrarily take one of
# the roots using pop(). Note that for the algorithm to be efficient,
# this operation must be done in O(1) time.
root = roots.pop()
sorted.append(root)
for child in graph[root][1:]:
graph[child][0] = graph[child][0] - 1
if graph[child][0] == 0:
roots.append(child)
del graph[root]
if len(graph.items()) != 0:
# There is a loop in the input.
start = graph.popitem()
loop = [start[0]]
next = start[1][1]
while next != loop[0]:
loop.append(next)
next = graph[next][1]
loop.append(loop[0])
return (True, loop)
return sorted
|
"""holds locking functionality that works across processes"""
from __future__ import absolute_import, unicode_literals
from contextlib import contextmanager
import py
from filelock import FileLock, Timeout
from tox.reporter import verbosity1
@contextmanager
def hold_lock(lock_file, reporter=verbosity1):
py.path.local(lock_file.dirname).ensure(dir=1)
lock = FileLock(str(lock_file))
try:
try:
lock.acquire(0.0001)
except Timeout:
reporter("lock file {} | present, will block until released".format(lock_file))
lock | .acquire()
yield
finally:
lock.release(force=True)
def get_unique_file(path, prefix, suffix):
"""get a unique file in a folder having a given prefix and suffix,
with unique number in between"""
lock_file = path.join(".lock")
prefix = "{}-".format(prefix)
with hold_lock(lock_file):
max_value = -1
for candidate in path.listdir("{}*{}".format(prefix, suffix)):
try:
max_value = max(max_value, int(candidate.basename[len(prefix) : -len(suffix)]))
except ValueError:
continue
winner = path.join("{}{}{}".format(prefix, max_value + 1, suffix))
winner.ensure(dir=0)
return winner
|
__all | __ = ['stac | k']
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from invenio.legacy.dbquery import run_sql, IntegrityError
# Number of retries to insert a value in the DB storage
MAX_DB_RETRY = 10
class SequenceGenerator(object):
seq_name = None
def __init__(self):
assert self.seq_name
def _value_exists(self, value):
"""
Checks if the value exists in the storage
@param value: value to be checked in storage
@type value: string
@return: result of select SQL query
@rtype: tuple
"""
return run_sql("""SELECT seq_value FROM seqSTORE
| WHERE seq_value=%s AND seq_name=%s""",
(value, self.seq_name))
def _insert_value(self, value):
"""
Inserts value into storage
@param value: value to be stored
@type value: string
@return: result of insert SQL query
@rtype: tuple
"""
run_sql("""INSERT INTO seqSTORE (seq_name, seq_value)
VALUES (%s, %s)"" | ",
(self.seq_name, value))
def _next_value(self, *args, **kwargs):
"""
Internal implementation to calculate next value in sequence
"""
raise NotImplementedError
def next_value(self, *args, **kwargs):
"""
Get the next value in the sequence
@return: next value in sequence
@rtype: string
"""
db_retries = 0
value = None
while MAX_DB_RETRY > db_retries:
value = self._next_value(*args, **kwargs)
try:
self._insert_value(value)
break
except IntegrityError:
# The value is already in the storage, get next one
db_retries += 1
return value
|
def exgcd(a, b):
"""Uses the e | xtended Euclidean algorithm to return
the gcd as well as the solutions to Bézout's identity:
coefficients x and y such that ax + by = gcd(a, b)."""
x, y = 0, 1
u, v = 1, 0
while a != 0:
quo = b // a
rem = b % a
m = x - (quo * u)
n = y - (quo * v)
b, a = a, rem
x, y = u, v
u, v = m, n
gcd = b
| return gcd, x, y
|
#!/usr/bin/env python
"""
@package mi.dataset.driver.wc_wm.cspp
@file mi/dataset/driver/wc_wm/cspp/wc_wm_cspp_telemetered_driver.py
@author Jeff Roy
@brief Driver for the wc_wm_cspp instrument
Release notes:
Initial Release
"""
from mi.dataset.dataset_parser import DataSetDriverConfigKeys
from mi.dataset.dataset_driver import SimpleDatasetDriver
from mi.dataset.parser.wc_wm_cspp import \
WcWmCsppParser, \
WcWmEngTelemeteredDataParticle, \
WcWmMetadataTelemeteredDataParticle
from mi.dataset.parser.cspp_base import \
METADATA_PARTICLE_CLAS | S_KEY, \
DATA_PARTICLE_CLASS_KEY
from mi.core.versioning import version
@version("15.6.2")
def parse(unused, source_file_path, particle_data_handler):
"""
This is the method called by Uframe
:param unuse | d
:param source_file_path This is the full path and filename of the file to be parsed
:param particle_data_handler Java Object to consume the output of the parser
:return particle_data_handler
"""
with open(source_file_path, 'rU') as stream_handle:
# create and instance of the concrete driver class defined below
driver = WcWmCsppRecoveredDriver(unused, stream_handle, particle_data_handler)
driver.processFileStream()
return particle_data_handler
class WcWmCsppRecoveredDriver(SimpleDatasetDriver):
"""
Derived wc_wm_cspp driver class
All this needs to do is create a concrete _build_parser method
"""
def _build_parser(self, stream_handle):
parser_config = {
DataSetDriverConfigKeys.PARTICLE_CLASS: None,
DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: {
METADATA_PARTICLE_CLASS_KEY: WcWmMetadataTelemeteredDataParticle,
DATA_PARTICLE_CLASS_KEY: WcWmEngTelemeteredDataParticle
}
}
parser = WcWmCsppParser(parser_config, stream_handle,
self._exception_callback)
return parser
|
import os
from angular_flask import app
from flask.ext.restless import APIManager
| from flask.ext.mongoengine import MongoEngine
app.config["MONGODB_SETTINGS"] = {'DB':os.environ.get('MONGODB_DB'),"host":os.environ.get('MONGODB_URI')} |
mongo_db = MongoEngine(app)
api_manager = APIManager(app)
|
# -*- coding: utf-8 -*-
# ####################################################################
# Copyright (C) 2005-2009 by the FIFE team
# http://www.fifengine.de
# This file is part of FIFE.
#
# FIFE is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# Yo | u should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# ############### | #####################################################
from fife.extensions import pychan
import fife.extensions.pychan.widgets as widgets
class InputDialog(object):
"""
Input supplies a text box for entering data. The result is passed to onEntry.
onEntry - the function to call when a input is complete. Accepts one argument: a string of text.
"""
def __init__(self, prompt, onEntry, onCancel):
self._callback = onEntry
self._cancelCallback = onCancel
self._widget = pychan.loadXML('gui/input.xml')
self._widget.mapEvents({
'okButton' : self._complete,
'cancelButton' : self._cancel
})
self._widget.distributeInitialData({
'prompt' : prompt
})
self._widget.show()
def _complete(self):
self._callback(self._widget.collectData('inputBox'))
self._widget.hide()
def _cancel(self):
self._cancelCallback()
self._widget.hide()
|
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def Action(vim, *args, **kwargs):
'''This data object t | ype defines the action initiated by a scheduled task or
alarm.This is an abstract type. A client creates a scheduled task or an alarm
each of which triggers an action, defined by a subclass of this type.'''
obj = vim.client.factory.create('ns0:Action')
# do some validation checking...
if (len(args) + len(kwargs)) < 0:
| raise IndexError('Expected at least 1 arguments got: %d' % len(args))
required = [ ]
optional = [ 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
|
, connection_info, instance,
disk_bus=constants.CTRL_TYPE_SCSI,
update_device_metadata=False):
LOG.debug(
"Attaching volume: %(connection_info)s to %(instance_name)s",
{'connection_info': strutils.mask_dict_password(connection_info),
'instance_name': instance.name})
volume_driver = self._get_volume_driver(connection_info)
volume_driver.attach_volume(connection_info,
instance.name,
disk_bus)
if update_device_metadata:
# When attaching volumes to already existing instances,
# the connection info passed to the driver is not saved
# yet within the BDM table.
self._block_dev_man.set_volume_bdm_connection_info(
context, instance, connection_info)
self._vmops.update_device_metadata(
context, instance)
qos_specs = connection_info['data'].get('qos_specs') or {}
if qos_specs:
volume_driver.set_disk_qos_specs(connection_info,
qos_specs)
def disconnect_volume(self, connection_info):
volume_driver = self._get_volume_driver(connection_info)
volume_driver.disconnect_volume(connection_info)
def detach_volume(self, context, connection_info, instance,
update_device_metadata=False):
LOG.debug("Detaching volume: %(connection_info)s "
"from %(instance_name)s",
{'connection_info': strutils.mask_dict_password(
connection_info),
'instance_name': instance.name})
volume_driver = self._get_volume_driver(connection_info)
volume_driver.detach_volume(connection_info, instance.name)
volume_driver.disconnect_volume(connection_info)
if update_device_metadata:
self._vmops.update_device_metadata(context, instance)
def fix_instance_volume_disk_paths(self, instance_name, block_device_info):
# Mapping containing the current disk paths for each volume.
actual_disk_mapping = self.get_disk_path_mapping(block_device_info)
if not actual_disk_mapping:
return
# Mapping containing virtual disk resource path and the physical
# disk path for each volume serial number. The physical path
# associated with this resource may not be the right one,
# as physical disk paths can get swapped after host reboots.
vm_disk_mapping = self._vmutils.get_vm_physical_disk_mapping(
instance_name)
for serial, vm_disk in vm_disk_mapping.items():
actual_disk_path = actual_disk_mapping[serial]
if vm_disk['mounted_disk_path'] != actual_disk_path:
self._vmutils.set_disk_host_res(vm_disk['resource_path'],
actual_disk_path)
def get_volume_connector(self):
# NOTE(lpetrut): the Windows os-brick connectors
# do not use a root helper.
conn = connector.get_connector_properties(
root_helper=None,
my_ip=CONF.my_block_storage_ip,
multipath=CONF.hyperv.use_multipath_io,
enforce_multipath=True,
host=CONF.host)
return conn
def connect_volumes(self, block_device_info):
mapping = driver.block_device_info_get_mapping(block_device_info)
for vol in mapping:
connection_info = vol['connection_info']
volume_driver = self._get_volume_driver(connection_info)
volume_driver.connect_volume(connection_info)
def get_disk_path_mapping(self, block_device_info, block_dev_only=False):
block_mapping = driver.block_device_info_get_mapping(block_device_info)
disk_path_mapping = {}
for vol in block_mapping:
connection_info = vol['connection_info']
disk_serial = connection_info['serial']
volume_driver = self._get_volume_driver(connection_info)
if block_dev_only and not volume_driver._is_block_dev:
continue
disk_path = volume_driver.get_disk_resource_path(connection_info)
| disk_path_mapping[disk_serial] = disk_path
return disk_path_mapping
def get_disk_resource_path(self, connection_info):
volume_driver = self._get_volume_driver(connection_info)
return volume_driver.get_disk_resource_path(con | nection_info)
@staticmethod
def bytes_per_sec_to_iops(no_bytes):
# Hyper-v uses normalized IOPS (8 KB increments)
# as IOPS allocation units.
return (
(no_bytes + constants.IOPS_BASE_SIZE - 1) //
constants.IOPS_BASE_SIZE)
@staticmethod
def validate_qos_specs(qos_specs, supported_qos_specs):
unsupported_specs = set(qos_specs.keys()).difference(
supported_qos_specs)
if unsupported_specs:
LOG.warning('Got unsupported QoS specs: '
'%(unsupported_specs)s. '
'Supported qos specs: %(supported_qos_specs)s',
{'unsupported_specs': unsupported_specs,
'supported_qos_specs': supported_qos_specs})
@volume_snapshot_lock
def volume_snapshot_create(self, context, instance, volume_id,
create_info):
LOG.debug("Creating snapshot for volume %(volume_id)s on instance "
"%(instance_name)s with create info %(create_info)s",
{"volume_id": volume_id,
"instance_name": instance.name,
"create_info": create_info})
snapshot_id = create_info['snapshot_id']
snapshot_failed = False
try:
instance.task_state = task_states.IMAGE_SNAPSHOT_PENDING
instance.save(expected_task_state=[None])
bdm = objects.BlockDeviceMapping.get_by_volume_and_instance(
context, volume_id, instance.uuid)
driver_bdm = driver_block_device.convert_volume(bdm)
connection_info = driver_bdm['connection_info']
volume_driver = self._get_volume_driver(connection_info)
volume_driver.create_snapshot(connection_info, instance,
create_info)
# The volume driver is expected to
# update the connection info.
driver_bdm.save()
except Exception:
with excutils.save_and_reraise_exception():
snapshot_failed = True
err_msg = ('Error occurred while snapshotting volume. '
'sending error status to Cinder.')
LOG.exception(err_msg,
instance=instance)
finally:
instance.task_state = None
instance.save(
expected_task_state=[task_states.IMAGE_SNAPSHOT_PENDING])
snapshot_status = 'error' if snapshot_failed else 'creating'
self._volume_api.update_snapshot_status(
context, snapshot_id, snapshot_status)
@volume_snapshot_lock
def volume_snapshot_delete(self, context, instance, volume_id,
snapshot_id, delete_info):
LOG.debug("Deleting snapshot for volume %(volume_id)s on instance "
"%(instance_name)s with delete info %(delete_info)s",
{"volume_id": volume_id,
"instance_name": instance.name,
"delete_info": delete_info})
snapshot_delete_failed = False
try:
instance.task_state = task_states.IMAGE_SNAPSHOT_PENDING
instance.save(expected_task_state=[None])
bdm = objects.BlockDeviceMapping.get_by_volume_and_instance(
context, volume_id, instance.uuid)
driver_bdm = driver_block_device.convert_volume(bdm)
connection_info = driver_bdm['connection_info']
volume_driver = self._get_volume_driver |
"""
ABP analyzer and graphics tests
"""
cases = [
('Run Pymodel Graphics to generate dot file from FSM model, no need use pma',
'pmg.py ABP'),
('Generate SVG file from dot',
'dotsvg ABP'),
# Now display ABP.dot in browser
('Run PyModel Analyzer to generate FSM from original FSM, should be the same',
'pma.py ABP'),
('Run PyModel Graphics to generate a file of graphics commands from new FSM',
'pmg. | py ABPFSM'),
('Generate an svg file from the graphics commands',
'dotsvg ABPFSM'),
# Now display ABPFSM.svg in browser, should look the same as ABP.s | vg
]
|
from string import digits, ascii_letters
valid_values = list(digits + ascii_letters) # приводим строку к списку
radix = len(valid_values) #основание
def convert(number):
result =[] #будем сюда складывать остатки от деления
whi | le number:
result.insert(0,valid_values[number % radix])
number //= radix
return ''.j | oin(result)
def inverse(number):
result = 0
for p, i in enumerate(reversed(number)):
n = valid_values.index(i) # получаем индекс нужного нам элемента списка
result += n * radix ** p
return result
|
###############################################################################
#cyn.in is an open source Collaborative Knowledge Management Appliance that
#enables teams to seamlessly work together on files, documents and content in
#a secure central environment.
#
#cyn.in v2 an open source appliance is distributed under the GPL v3 license
#along with commercial support options.
#
#cyn.in is a Cynapse Invention.
#
#Copyright (C) 2008 Cynapse India Pvt. Ltd.
#
#This program is free software: you can redistribute it and/or modify it under
#the terms of the GNU General Public License as published by the Free Software
#Foundation, either version 3 of the License, or any later version and observe
#the Additional Terms applicable to this program and must display appropriate
#legal notices. In accordance with Section 7(b) of the GNU General Public
#License version 3, these Appropriate Legal Notices must retain the display of
#the "Powered by cyn.in" AND "A Cynapse Invention" logos. You should have
#received a copy of the detailed Additional Terms License with this program.
#
#This program is distributed in the hope that it will be u | seful,
#but WITHOUT ANY WARRANTY; without even the implied w | arranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
#Public License for more details.
#
#You should have received a copy of the GNU General Public License along with
#this program. If not, see <http://www.gnu.org/licenses/>.
#
#You can contact Cynapse at support@cynapse.com with any problems with cyn.in.
#For any queries regarding the licensing, please send your mails to
# legal@cynapse.com
#
#You can also contact Cynapse at:
#802, Building No. 1,
#Dheeraj Sagar, Malad(W)
#Mumbai-400064, India
###############################################################################
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from plone.app.layout.viewlets.common import ViewletBase
from zope.component import getMultiAdapter
class SiteTitleViewlet(ViewletBase):
render = ViewPageTemplateFile('sitetitle.pt')
def update(self):
portal_state = getMultiAdapter((self.context, self.request),
name=u'plone_portal_state')
self.site_title = portal_state.portal_title()
self.has_site_title = True
self.site_url = portal_state.portal_url()
|
'''
PIL: PIL image loader
'''
__all__ = ('ImageLoaderPIL', )
try:
from PIL import Image as PILImage
except:
import Image as PILImage
from kivy.logger import Logger
from kivy.core.image import ImageLoaderBase, ImageData, ImageLoader
class ImageLoaderPIL(ImageLoaderBase):
'''Image loader based on the PIL library.
.. versionadded:: 1.0.8
Support for GIF animation added.
Gif animation has a lot of issues(transparency/color depths... etc).
In order to keep it simple, what is implimented here is what is
natively supported by the PIL library.
As a general rule, try to use gifs that have no transparency.
Gif's with transparency will work but be prepared for some
artifacts until transparency support is improved.
'''
@staticmethod
def can_save():
return True
@staticmethod
def extensions():
'''Return accepted extensions for this loader'''
# See http://www.pythonware.com/library/pil/handbook/index.htm
return ('bmp', 'bufr', 'cur', 'dcx', 'fits', 'fl', 'fpx', 'gbr',
'gd', 'gif', 'grib', 'hdf5', 'ico', 'im', 'imt', 'iptc',
'jpeg', 'jpg', 'jpe', 'mcidas', 'mic', 'mpeg', 'msp',
'pcd', 'pcx', 'pixar', 'png', 'ppm', 'psd', 'sgi',
'spider', 'tga', 'tiff', 'wal', 'wmf', 'xbm', 'xpm',
'xv')
def _img_correct(self, _img_tmp):
'''Convert image to the correct format and orientation.
'''
# image loader work only with rgb/rgba image
if _img_tmp.mode.lower() not in ('rgb', 'rgba'):
try:
imc = _img_tmp.convert('RGBA')
except:
Logger.warning(
'Image: Unable to convert image to rgba (was %s)' %
(_img_tmp.mode.lower()))
raise
_img_tmp = imc
return _img_tmp
def _img_read(self, im):
'''Read images from an animated file.
'''
im.seek(0)
# Read all images inside
try:
img_ol = None
while True:
img_tmp = im
img_tmp = self._img_correct(img_tmp)
if img_ol:
| # paste new frame over old so as to handle
| # transparency properly
img_ol.paste(img_tmp, (0, 0), img_tmp)
img_tmp = img_ol
img_ol = img_tmp
yield ImageData(img_tmp.size[0], img_tmp.size[1],
img_tmp.mode.lower(), img_tmp.tostring())
im.seek(im.tell() + 1)
except EOFError:
pass
def load(self, filename):
try:
im = PILImage.open(filename)
except:
Logger.warning('Image: Unable to load image <%s>' % filename)
raise
# update internals
self.filename = filename
# returns an array of type ImageData len 1 if not a sequence image
return list(self._img_read(im))
@staticmethod
def save(filename, width, height, fmt, pixels):
image = PILImage.fromstring(fmt.upper(), (width, height), pixels)
image.save(filename)
return True
# register
ImageLoader.register(ImageLoaderPIL)
|
from guardian.shortcuts import get_perms
from rest_framework import serializers as ser
from rest_framework.exceptions import ValidationError
from reviews.workflow import Workflows
from api.actions.serializers import ReviewableCountsRelationshipField
from api.base.utils import absolute_reverse, get_user_auth
from api.base.serializers import JSONAPISerializer, LinksField, RelationshipField, ShowIfVersion
class PreprintProviderSerializer(JSONAPISerializer):
filterable_fields = frozenset([
'allow_submissions',
'description',
'domain',
'domain_redirect_enabled',
'id',
'name',
'share_publish_type',
'reviews_workflow',
'permissions',
])
name = ser.CharField(read_only=True)
description = ser.CharField(read_only=True)
id = ser.CharField(read_only=True, max_length=200, source='_id')
advisory_board = ser.CharField(read_only=True)
example = ser.CharField(read_only=True, allow_null=True)
domain = ser.CharField(read_only=True, allow_null=False)
domain_redirect_enabled = ser.BooleanField(read_only=True)
footer_links = ser.CharField(read_only=True)
share_source = ser.CharField(read_only=True)
sha | re_publish_type = ser.CharField(read_only=True)
email_support = ser.CharField(read_only=True, allow_null=True)
preprint_word = ser.CharField(read_only=True, allow_null=True)
allow_submissions = ser.BooleanField(read_only=True)
additional_providers = ser.ListField(read_only=True, child=ser.CharField())
# Reviews settings are the only writable | fields
reviews_workflow = ser.ChoiceField(choices=Workflows.choices())
reviews_comments_private = ser.BooleanField()
reviews_comments_anonymous = ser.BooleanField()
permissions = ser.SerializerMethodField()
preprints = ReviewableCountsRelationshipField(
related_view='preprint_providers:preprints-list',
related_view_kwargs={'provider_id': '<_id>'}
)
taxonomies = RelationshipField(
related_view='preprint_providers:taxonomy-list',
related_view_kwargs={'provider_id': '<_id>'}
)
highlighted_taxonomies = RelationshipField(
related_view='preprint_providers:highlighted-taxonomy-list',
related_view_kwargs={'provider_id': '<_id>'}
)
licenses_acceptable = RelationshipField(
related_view='preprint_providers:license-list',
related_view_kwargs={'provider_id': '<_id>'}
)
links = LinksField({
'self': 'get_absolute_url',
'preprints': 'get_preprints_url',
'external_url': 'get_external_url'
})
# Deprecated fields
header_text = ShowIfVersion(
ser.CharField(read_only=True, default=''),
min_version='2.0', max_version='2.3'
)
banner_path = ShowIfVersion(
ser.CharField(read_only=True, default=''),
min_version='2.0', max_version='2.3'
)
logo_path = ShowIfVersion(
ser.CharField(read_only=True, default=''),
min_version='2.0', max_version='2.3'
)
email_contact = ShowIfVersion(
ser.CharField(read_only=True, allow_null=True),
min_version='2.0', max_version='2.3'
)
social_twitter = ShowIfVersion(
ser.CharField(read_only=True, allow_null=True),
min_version='2.0', max_version='2.3'
)
social_facebook = ShowIfVersion(
ser.CharField(read_only=True, allow_null=True),
min_version='2.0', max_version='2.3'
)
social_instagram = ShowIfVersion(
ser.CharField(read_only=True, allow_null=True),
min_version='2.0', max_version='2.3'
)
subjects_acceptable = ShowIfVersion(
ser.ListField(read_only=True, default=[]),
min_version='2.0', max_version='2.4'
)
class Meta:
type_ = 'preprint_providers'
def get_absolute_url(self, obj):
return obj.absolute_api_v2_url
def get_preprints_url(self, obj):
return absolute_reverse('preprint_providers:preprints-list', kwargs={
'provider_id': obj._id,
'version': self.context['request'].parser_context['kwargs']['version']
})
def get_external_url(self, obj):
return obj.external_url
def get_permissions(self, obj):
auth = get_user_auth(self.context['request'])
if not auth.user:
return []
return get_perms(auth.user, obj)
def validate(self, data):
required_fields = ('reviews_workflow', 'reviews_comments_private', 'reviews_comments_anonymous')
for field in required_fields:
if data.get(field) is None:
raise ValidationError('All reviews fields must be set at once: `{}`'.format('`, `'.join(required_fields)))
return data
def update(self, instance, validated_data):
instance.reviews_workflow = validated_data['reviews_workflow']
instance.reviews_comments_private = validated_data['reviews_comments_private']
instance.reviews_comments_anonymous = validated_data['reviews_comments_anonymous']
instance.save()
return instance
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Problem'
db.create_table(u'game_problem', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=100)),
('order', self.gf('django.db.models.fields.IntegerField')()),
('html_template', self.gf('django.db.models.fields.CharField')(max_length=100)),
('multiplier', self.gf('django.db.models.fields.FloatField')(default=1.0)),
('base_points', self.gf('django.db.models.fields.IntegerField')(default=300)),
('created', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
))
db.send_create_signal(u'game', ['Problem'])
# Adding model 'Stage'
db.create_table(u'game_stage', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('unlocked_on', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('points_earned', self.gf('django.db.models.fields.FloatField')()),
('problem', self.gf('django.db. | models.fields.related.ForeignKey')(to=orm['game.Problem'])),
('team | ', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['teams.Team'])),
('created', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
))
db.send_create_signal(u'game', ['Stage'])
# Adding model 'Attempt'
db.create_table(u'game_attempt', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('correct', self.gf('django.db.models.fields.BooleanField')(default=False)),
('stage', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['game.Stage'])),
('created', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
))
db.send_create_signal(u'game', ['Attempt'])
def backwards(self, orm):
# Deleting model 'Problem'
db.delete_table(u'game_problem')
# Deleting model 'Stage'
db.delete_table(u'game_stage')
# Deleting model 'Attempt'
db.delete_table(u'game_attempt')
models = {
u'game.attempt': {
'Meta': {'object_name': 'Attempt'},
'correct': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'stage': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['game.Stage']"})
},
u'game.problem': {
'Meta': {'object_name': 'Problem'},
'base_points': ('django.db.models.fields.IntegerField', [], {'default': '300'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'html_template': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'multiplier': ('django.db.models.fields.FloatField', [], {'default': '1.0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order': ('django.db.models.fields.IntegerField', [], {})
},
u'game.stage': {
'Meta': {'object_name': 'Stage'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'points_earned': ('django.db.models.fields.FloatField', [], {}),
'problem': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['game.Problem']"}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['teams.Team']"}),
'unlocked_on': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
u'teams.team': {
'Meta': {'object_name': 'Team'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'token': ('django.db.models.fields.CharField', [], {'default': "'6e61d7a5cfc2462d8f1637f9464dd1b5'", 'max_length': '32'})
}
}
complete_apps = ['game'] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# kOS-C documentation build configuration file, created by
# sphinx-quickstart on Tue Apr 4 18:06:04 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
import sphinx_rtd_theme
# -- General configuration | ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.todo',
'sphinx.ext.mathjax',
'sphinx.ext.githubpages']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The su | ffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'kOS-C'
copyright = '2017, Fabian Thorand'
author = 'Fabian Thorand'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.3'
# The full version, including alpha/beta/rc tags.
release = '0.3.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'kOS-Cdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
'papersize': 'a4paper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'kOS-C.tex', 'kOS-C Documentation',
'Fabian Thorand', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'kos-c', 'kOS-C Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'kOS-C', 'kOS-C Documentation',
author, 'kOS-C', 'One line description of project.',
'Miscellaneous'),
]
|
# (c) 2019 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat.mock import patch
from ansible.modules.network.cloudengine import ce_is_is_instance
from units.modules.network.cloudengine.ce_module import TestCloudEngineModule, load_fixture
from units.modules.utils import set_module_args
class TestCloudEngineLacpModule(TestCloudEngineModule):
module = ce_is_is_instance
def setUp(self):
super(TestCloudEngineLacpModule, self).setUp()
self.mock_get_config = patch('ansible.modules.network.cloudengine.ce_is_is_instance.get_nc_config')
self.get_nc_config = self.mock_get_config.start()
self.mock_set_config = patch('ansible.modules.network.cloudengine.ce_is_is_instance.set_nc_config')
self.set_nc_config = self.mock_set_config.start()
self.set_nc_config.return_value = None
def tearDown(self):
super(TestCloudEngineLacpModule, self).tearDown()
self.mock_set_config.stop()
| self.mock_get_config.stop()
def test_isis_instance_present(self):
xml_existing = load_fixture('ce_is_is_instance', 'before.txt')
xml_end_state = load_fixture('ce_is_is_instance', 'after.txt')
update = ['isis 100', 'vpn-instance __public__']
self.get_nc_config.side_effect = (xml_existing, xml_end_state)
config = dict(
instance_id=100,
v | pn_name='__public__',
state='present')
set_module_args(config)
result = self.execute_module(changed=True)
self.assertEquals(sorted(result['updates']), sorted(update))
def test_isis_instance_present(self):
xml_existing = load_fixture('ce_is_is_instance', 'after.txt')
xml_end_state = load_fixture('ce_is_is_instance', 'before.txt')
update = ['undo isis 100']
self.get_nc_config.side_effect = (xml_existing, xml_end_state)
config = dict(
instance_id=100,
vpn_name='__public__',
state='absent')
set_module_args(config)
result = self.execute_module(changed=True)
self.assertEquals(sorted(result['updates']), sorted(update))
|
#!env | /bin/python
from app import app
import sys
port = 5000
debug = True
if len(sys.argv) == 3:
debug = sys.argv[1] == 'debug'
port = int(sys.argv[2]) |
app.run(debug = debug, port = port)
|
from .todict import *
from .toxml import *
class Dyslexml:
def _ | _init__(self):
self.toDict = todict.parse
| self.toXml = toxml.translate
|
# Copyright 2016 Huawei Technologies India Pvt. Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is dis | tributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from neutron.api.rpc.agentnotifiers import bgp_dr_rpc_agent_api
from neutron import context
from neutron.tests import b | ase
class TestBgpDrAgentNotifyApi(base.BaseTestCase):
def setUp(self):
super(TestBgpDrAgentNotifyApi, self).setUp()
self.notifier = (
bgp_dr_rpc_agent_api.BgpDrAgentNotifyApi())
mock_cast_p = mock.patch.object(self.notifier,
'_notification_host_cast')
self.mock_cast = mock_cast_p.start()
mock_call_p = mock.patch.object(self.notifier,
'_notification_host_call')
self.mock_call = mock_call_p.start()
self.context = context.get_admin_context()
self.host = 'host-1'
def test_notify_dragent_bgp_routes_advertisement(self):
bgp_speaker_id = 'bgp-speaker-1'
routes = [{'destination': '1.1.1.1', 'next_hop': '2.2.2.2'}]
self.notifier.bgp_routes_advertisement(self.context, bgp_speaker_id,
routes, self.host)
self.assertEqual(1, self.mock_cast.call_count)
self.assertEqual(0, self.mock_call.call_count)
def test_notify_dragent_bgp_routes_withdrawal(self):
bgp_speaker_id = 'bgp-speaker-1'
routes = [{'destination': '1.1.1.1'}]
self.notifier.bgp_routes_withdrawal(self.context, bgp_speaker_id,
routes, self.host)
self.assertEqual(1, self.mock_cast.call_count)
self.assertEqual(0, self.mock_call.call_count)
def test_notify_bgp_peer_disassociated(self):
bgp_speaker_id = 'bgp-speaker-1'
bgp_peer_ip = '1.1.1.1'
self.notifier.bgp_peer_disassociated(self.context, bgp_speaker_id,
bgp_peer_ip, self.host)
self.assertEqual(1, self.mock_cast.call_count)
self.assertEqual(0, self.mock_call.call_count)
def test_notify_bgp_peer_associated(self):
bgp_speaker_id = 'bgp-speaker-1'
bgp_peer_id = 'bgp-peer-1'
self.notifier.bgp_peer_associated(self.context, bgp_speaker_id,
bgp_peer_id, self.host)
self.assertEqual(1, self.mock_cast.call_count)
self.assertEqual(0, self.mock_call.call_count)
def test_notify_bgp_speaker_created(self):
bgp_speaker_id = 'bgp-speaker-1'
self.notifier.bgp_speaker_created(self.context, bgp_speaker_id,
self.host)
self.assertEqual(1, self.mock_cast.call_count)
self.assertEqual(0, self.mock_call.call_count)
def test_notify_bgp_speaker_removed(self):
bgp_speaker_id = 'bgp-speaker-1'
self.notifier.bgp_speaker_removed(self.context, bgp_speaker_id,
self.host)
self.assertEqual(1, self.mock_cast.call_count)
self.assertEqual(0, self.mock_call.call_count)
|
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2017 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import re
from lxml import etree # noqa
from superdesk import etree as sd_etree
from lxml import html as lxml_html
from lxml.html import clean
from flask import current_app as app
import chardet
# This pattern matches http(s) links, numbers (1.000.000 or 1,000,000 or 1 000 000), regulars words,
# compound words (e.g. "two-done") or abbreviation (e.g. D.C.)
# If you modify please keep in sync with superdesk-client/core/scripts/apps/authoring/authoring/directives/WordCount.js
WORD_PATTERN = re.compile(r'https?:[^ ]*|([0-9]+[,. ]?)+|([\w]\.)+|[\w][\w-]*')
def get_text_word_count(text):
"""Get word count for given plain text.
:param str text: text string
:return int: word count
"""
return sum(1 for word in WORD_PATTERN.finditer(text))
def get_text(markup, content='xml', lf_on_block=False, space_on_elements=False):
"""Get plain text version of (X)HTML or other XML element
if the markup can't be parsed, it will be returned unchanged
:param str markup: string to convert to plain text
:param str content: 'xml' or 'html', as in parse_html
:param bool lf_on_block: if True, add a line feed on block elements' tail
:param bool space_on_elements: if True, add a space on each element's tail
mainly used to count words with non HTML markup
:return str: plain text version of markup
"""
try:
root = sd_etree.parse_html(
markup,
content=content,
lf_on_block=lf_on_block,
space_on_elements=space_on_elements)
text = etree.tostring(root, encoding='unicode', method='text')
return text
except etree.ParseError:
return markup
def get_word_count(markup, no_html=False):
"""Get word count for given html.
:param str markup: xhtml (or other xml) markup
:param bool no_html: set to True if xml param is not (X)HTML
if True, a space will be added after each element to separate words.
This avoid to have construct like <hl2>word</hl2><p>another</p> (like in NITF)
being counted as one word.
:return int: count of words inside the text
"""
if no_html:
return get_text_word_count(get_text(markup, content='xml', space_on_elements=True))
else:
return get_text_word_count(get_text(markup, content='html', lf_on_block=True))
def update_word_count(update, original=None):
"""Update word count if there was change in content.
:param update: created/updated document
:param original: original document if updated
"""
if update.get('body_html'):
update.setdefault('word_count', get_word_count(update.get('body_html')))
else:
# If the body is removed then set the count to zero
if original and 'word_count' in original and 'body_html' in update:
update['word_count'] = 0
def get_char_count(html):
"""Get character count for given html.
:param html: html string to count
:return int: count of chars inside the text
"""
return len(get_text(html))
def get_reading_time(html, word_count=None, language=None):
"""Get estimanted number of minutes to read a text
Check https://dev.sourcefabric.org/browse/SDFID-118 for details
:param str html: html content
:param int word_count: number of words in the text
:param str language: language of the text
:return int: estimated number of minute to read the text
"""
if language and language.startswith('ja'):
return round(len(re.sub(r'[\s]', '', get_text(html))) / app.config['JAPANESE_CHARACTERS_PER_MINUTE'])
if not word_count:
word_count = get_word_count(html)
reading_time_float = word_count / 250
reading_time_minutes = int(reading_time_float)
reading_time_rem_sec = int((reading_time_float - reading_time_minutes) * 60)
if reading_time_rem_sec >= 30:
| reading_time_minutes += 1
return reading_time_minutes
def sanitize_html(html):
"""Sanitize HTML
:param str html: unsafe HTML markup
:return str: sanitized HTML
"""
if not html:
return ""
blacklist = ["script", "style", "head"]
root_elem = lxml_html.fromstring(html)
cleaner = clean.Cleaner(
add_nofollow=False,
kill_tags=blacklist
)
cleaned_xhtml = cleaner.clean_html(root_elem)
safe_html = | etree.tostring(cleaned_xhtml, encoding="unicode")
# the following code is legacy (pre-lxml)
if safe_html == ", -":
return ""
return safe_html
def decode(bytes_str):
"""Decode bytes value
try to decode using UTF-8, or to detect encoding. Will ignore bad chars as a last resort
@return (str): decoded string
"""
try:
return bytes_str.decode('utf-8')
except UnicodeDecodeError:
try:
return bytes_str.decode(chardet.detect(bytes_str)['encoding'])
except Exception:
return bytes_str.decode('utf-8', 'ignore')
|
# -*- coding: utf-8 -*-
from shutil import rmtree
from tempfile import mkdtemp
from omdbapi import OMDbAPI
from scrusubtitles import ScruSubtitles
from scrusubtitles import ScruSubtitlesListener
from scrusubtitles import ScruSubtitlesLogger
class TestService(ScruSubtitlesListener, ScruSubtitlesLogger):
def __init__(self):
super(TestService, self).__init__()
self._omdbapi = OMDbAPI()
self._omdbapi.logger = self
self._scrusubtitles = ScruSubtitles()
self._scrusubtitles.listener = self
self._scrusubtitles.logger = self
self._scrusubtitles.wo | rkdir = mkdtemp()
| self._num_subtitles_downloaded = 0
self._num_subtitles_found = 0
def cleanup(self):
rmtree(self._scrusubtitles.workdir)
def lookup(self, title, year):
return self._omdbapi.search(title, year)
def download(self, url, filename):
self._num_subtitles_downloaded = 0
self._scrusubtitles.download(url, filename)
self.info(u'{0} subtitles downloaded'.format(self._num_subtitles_downloaded))
def search(self, imdb_id, languages):
self._num_subtitles_found = 0
self._scrusubtitles.search(imdb_id, languages)
self.info(u'{0} subtitles found'.format(self._num_subtitles_found))
def on_subtitle_found(self, subtitle):
self._num_subtitles_found += 1
self.info(u'Found {0} subtitle {1}'.format(subtitle['language'], subtitle['filename']))
for key in subtitle:
self.debug(u' {0}: {1}'.format(key, subtitle[key]))
def on_subtitle_downloaded(self, path):
self._num_subtitles_downloaded += 1
self.info(u'Subtitle {0} downloaded'.format(path))
def debug(self, message):
print u'DEBUG: {0}'.format(message)
def info(self, message):
print u'INFO: {0}'.format(message)
def warn(self, message):
print u'WARN: {0}'.format(message)
def error(self, message):
print u'ERROR: {0}'.format(message)
|
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Band(models.Model):
| name = models.CharField(max_length=100)
class Meta:
ordering = ('name',)
def __str__(self) | :
return self.name
|
# -*- coding: utf-8 -*-
import sys
import tokenizer
import os
import re
import helpers.io as io
# just tokenizes the file
def simple(filepath):
tok = tokenizer.Tokenizer(filepath)
results = tok.full_tokenize()
return tokenizer.compress_tokens(results)
# smartly tokenizes a function for modified token edit distance (MTED)
# tokenPath is the entry point (potentially a main file, if C++ templates are used)
# sources is an array of all | of the paths we are actually interested in.
# If compres sis true, each token will be reduced to a single character. Good for edit distance!
def mted(tokenPath, sources, compress):
tok = tokenizer.Tokenizer(tokenPath)
functions = tok.split_functions(False)
# sort them appropriately
def comp(a,b):
lena = len(a[1])
lenb = len(b[1])
| if lena == lenb:
# if lengths are tied, sort alphabetically based on function name
if a[0] < b[0]:
return -1
else:
return 1
else:
return lena - lenb
functions.sort(comp)
# compress and output
results = ""
for funct in functions:
if funct[2] in sources:
if compress:
results += tokenizer.compress_tokens(funct[1])
else:
results += " ".join(funct[1])
if compress == False:
results += " "
# return results
return results.strip() |
font.render(str(getattr(v, self.text)), 1, self.colour)
if self.centred:
pos = list(self.pos)
pos[0] -= font.size(self.text)[0] / 2
pos[1] -= font.size(self.text)[1] / 2
pos = tuple(pos)
v.screen.blit(label, pos)
class Button(py.sprite.Sprite):
def __init__(self, text, pos, size, hovercolour, normalcolour, font, ID, centred = False, bsize=(0,0)):
super().__init__()
self.ID = ID
self.hovered = False
self.text = text
self.pos = pos
self.hcolour = hovercolour
self.ncolour = normalcolour
self.font = font
self.font = py.font.Font(font, int(size))
self.centred = centred
self.size = bsize
self.set_rect()
def update(self):
self.set_rend()
py.draw.rect(v.screen, self.get_color(), self.rect)
v.screen.blit(self.rend, self.rect)
if self.rect.collidepoint(py.mouse.get_pos()):
self.hovered = True
else:
self.hovered = False
def set_rend(self):
self.rend = self.font.render(self.text, True, (0,0,0))
def get_color(self):
if self.hovered:
return self.hcolour
else:
return self.ncolour
def set_rect(self):
self.set_rend()
self.rect = self.rend.get_rect()
if not self.centred:
self.rect.topleft = self.pos
if self.centred:
self.rect.center = self.pos
if not self.size[0] == 0:
self.rect.width = self.size[0]
if not self.size[1] == 0:
self.rect.height = self.size[1]
def pressed(self):
mouse = py.mouse.get_pos()
if mouse[0] > self.rect.topleft[0]:
if mouse[1] > self.rect.topleft[1]:
if mouse[0] < self.rect.bottomright[0]:
if mouse[1] < self.rect.bottomright[1]:
return True
else: return False
else: return False
else: return False
else: return False
import os, shutil
theFont = None
py.init()
v.screen = py.display.set_mode((640, 480))
v.screen.fill((20, 20, 20))
textLabel("Checking For Updates...", (320, 240), (255, 255, 255), theFont, 50, False, True).update()
py.display.flip()
tries = 0
def reporthook(count, blockSize, totalSize):
if totalSize == -1:
print("FAILED TOTALSIZE")
raise Exception()
#Shows percentage of download
py.event.pump()
for event in py.event.get():
if event.type == py.QUIT:
sys.exit()
percent = int(count*blockSize*100/totalSize)
rect = py.Rect(100, 240, percent*4.4, 30)
v.screen.fill((20, 20, 20))
py.draw.rect(v.screen, (255, 0, 0), rect)
py.draw.rect(v.screen, (0, 0, 0), rect, 2)
py.draw.rect(v.screen, (0, 0, 0), (100, 240, 440, 30), 2)
textLabel("Downloading...", (320, 150), (255, 255, 255), theFont, 50, False, True).update()
textLabel(str(percent) + "%", (320, 255), (255, 255, 255), theFont, 20, False, True).update()
py.display.flip()
def recursive_overwrite(src, dest, ignore=None):
if os.path.isdir(src):
if not os.path.isdir(dest):
os.makedirs(dest)
files = os.listdir(src)
if ignore is not None:
ignored = ignore(src, files)
else:
ignored = set()
for f in files:
if f not in ignored:
recursive_overwrite(os.path.join(src, f),
os.path.join(dest, f),
ignore)
else:
shutil.copyfile(src, dest)
def updateCheck():
global latest
page = urllib.request.urlopen('https://api.github.com/repos/lightopa/aiopa-battles/git/refs/heads/master')
#print(page.read().decode("utf-8"))
#data = json.loads(page.read().decode("utf-8"))
data = ast.literal_eval(page.read().decode("utf-8"))
latest = data["object"]["sha"]
#ind = page.find('class="sha btn btn-outline"')
#latest = page[ind + 38:ind + 45]
#print(latest)
#CHECK IF LATEST IS PROPER
try:
f = open("Update/current.version", "rb")
current = pickle.load(f)
f.close()
except:
print("create new file")
try:
os.mkdir("Update")
except:
pass
f = open("Update/current.version", "wb")
current = 0000
pickle.dump(current, f)
f.close()
print(current, "vs", latest)
if current != latest:
from os import remove
try:
remove("Update/download.zip")
except:
pass
print("downloading latest")
buttons = py.sprite.Group()
buttons.add(Button("Update", (220, 240), 60, (100, 100, 100), (255, 255, 255), theFont, "Y", centred=True))
buttons.add(Button("Ignore", (420, 240), 60, (100, 100, 100), (255, 255, 255), theFont, "N", centred=True))
buttons.add(Button("Skip Update", (320, 300), 40, (100, 100, 100), (255, 255, 255), theFont, "S", centred=True))
labels = py.sprite.Group()
labels.add(textLabel("An Update Is Available:", (320, 150), (255, 255, 255), theFont, 50, False, True))
labels.add(textLabel(str(str(current) + " ==> " + str(latest)), (320, 180), (255, 255, 255), theFont, 20, False, True))
while True:
py.event.pump()
v.screen.fill((20, 20, 20))
buttons.update()
labels.update()
for event in py.event.get():
if event.type == py.QUIT:
sys.exit()
elif event.type == py.MOUSEBUTTONDOWN:
for button in buttons:
if button.pressed():
id = button.ID
if id == "Y":
global tries
tries = 0
download()
return
if id == "N":
return
if id == "S":
f = open("Saves/current.version", "wb")
current = latest
pickle.dump(current, f)
f.close()
return
py.display.flip()
else:
v.screen.fill((20, 20, 20))
t = textLabel("No Update!", (320, 250), (255, 0, 0), theFont, 70, False, True)
v.current = current
t.update()
py.display.update()
if __name__ == "__main__":
py.time.wait(2000)
def download():
global tries
try:
try:
os.mkdir("Update")
except:
pass
urllib.request.urlretrieve("https://github.com/lightopa/Aiopa-Battles/archive/master.zip", "Update/download.zip", reporthook)
f = open("Update/current.version", "wb")
current = latest
pickle.dump(current, f)
f.close()
unzip()
except Exception as e:
tries += 1
print("Error: " + str(e))
v.screen.fill((20, 20, 20))
textLabel("Download Error. Retry " + str(tries) + "/8", (320, 240), (255, 255, 255), theFont, 50, False | , True).update()
textLabel("Error: " + str(e), (320, 24 | 0), (255, 255, 255), theFont, 50, False, True).update()
py.display.flip()
if tries > 8:
return
download()
def unzip():
v.screen.fill((20, 20, 20))
textLabel("Extracting Data...", (320, 240), (255, 255, 255), theFont, 50, False, True).update()
py.display.flip()
import zipfile
with zipfile.ZipFile('Update/download.zip', "r") as z:
z.extractall("Update/")
v.screen.fill((20, 20, 20))
textLabel("Updating Files...", (320, 240), (255, 255 |
from flask import Blueprint
from flask import flash
from flask import make_response, render_template
from flask_login import current_user
from markupsafe import Markup
from app.helpers.data_getter import DataGetter
from app.helpers.auth import AuthManager
from app.helpers.exporters.ical import ICalExporter
from app.helpers.exporters.pentabarfxml import PentabarfExporter
from app.helpers.exporters.xcal import XCalExporter
from app.hel | pers. | permission_decorators import can_access
event_export = Blueprint('event_export', __name__, url_prefix='/events/<int:event_id>/export')
@event_export.route('/')
@can_access
def display_export_view(event_id):
event = DataGetter.get_event(event_id)
export_jobs = DataGetter.get_export_jobs(event_id)
user = current_user
if not AuthManager.is_verified_user():
flash(Markup("Your account is unverified. "
"Please verify by clicking on the confirmation link that has been emailed to you."
'<br>Did not get the email? Please <a href="/resend_email/" class="alert-link"> '
'click here to resend the confirmation.</a>'))
return render_template(
'gentelella/admin/event/export/export.html', event=event, export_jobs=export_jobs,
current_user=user
)
@event_export.route('/pentabarf.xml')
@can_access
def pentabarf_export_view(event_id):
response = make_response(PentabarfExporter.export(event_id))
response.headers["Content-Type"] = "application/xml"
response.headers["Content-Disposition"] = "attachment; filename=pentabarf.xml"
return response
@event_export.route('/calendar.ical')
@can_access
def ical_export_view(event_id):
response = make_response(ICalExporter.export(event_id))
response.headers["Content-Type"] = "text/calendar"
response.headers["Content-Disposition"] = "attachment; filename=calendar.ics"
return response
@event_export.route('/calendar.xcs')
@can_access
def xcal_export_view(event_id):
response = make_response(XCalExporter.export(event_id))
response.headers["Content-Type"] = "text/calendar"
response.headers["Content-Disposition"] = "attachment; filename=calendar.xcs"
return response
|
from .working_gif import working_encoded
from .splash import SplashScreen, Spinner, CheckProcessor
fro | m .multilistbox import MultiListbox
from .utils import | set_widget_state, set_binding, set_button_action, set_tab_order
from .tooltip import ToolTip
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ####################################################################
# Copyright (C) 2005-2013 by the FIFE team
# http://www.fifengine.net
# This file is part of FIFE.
#
# FIFE is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# ####################################################################
from extension_test_utils import *
from loaders import *
class TestLoaders(unittest.TestCase):
def setUp(self):
self.engine = getEngine()
self.model = self.engine.getModel()
self.metamodel = self.model.getMetaModel()
def tearDown(self):
| del self.engine
def testLoading(self):
loadMapFile("content/maps/new_official_map.xml", self.engine)
query = self.metamodel.getObjects("id", "15001")
self.assertEqual(len(query), 1)
query = self.metamodel.getObjects("id", "15201")
self.assertEqual(len(query), 1)
query = self.model.getMaps("id", "Off | icialMap")
self.assertEqual(len(query), 1)
self.map = query[0]
# self.assertEqual(self.map.get("Name"), "official_map.xml")
self.assertEqual(self.map.get("Version"), '1')
self.assertEqual(self.map.get("Author"), "barra")
query = self.map.getElevations("id", "OfficialMapElevation")
self.assertEqual(len(query), 1)
self.elevation = query[0]
query = self.elevation.getLayers("id", "OfficialMapTileLayer")
self.assertEqual(len(query), 1)
self.layer = query[0]
self.assertEqual(self.layer.hasInstances(), True)
instances = self.layer.getInstances()
# removed from test set now due to switch to new directory structure -> content moved to clients
# to be considered if this should be taken into use again
TEST_CLASSES = []
if __name__ == '__main__':
unittest.main()
|
"""
Created on Thu May 05 20:02:00 2011
@author: Tillsten
"""
import numpy as np
from scipy.linalg import qr
eps = np.finfo(float).eps
def mls(B, v, umin, umax, Wv=None, Wu=None, ud=None, u=None, W=None, imax=100):
"""
mls - Control allocation using minimal least squares.
[u,W,iter] = mls_alloc(B,v,umin,umax,[Wv,Wu,ud,u0,W0,imax])
Solves the bounded sequential least-squares problem
min ||Wu(u-ud)|| subj. to u in M
where M is the set of control signals solving
min ||Wv(Bu-v)|| subj. to umin <= u <= umax
using a two stage active set method. Wu must be diagonal since the
problem is reformulated as a minimal least squares problem. The
implementation does not handle the case of coplanar controls.
Inputs:
-------
B control effectiveness matrix (k x m)
v commanded virtual control (k x 1)
umin lower position limits (m x 1)
umax upper position limits (m x 1)
Wv virtual control weighting matrix (k x k) [I]
Wu control weighting matrix (m x m), diagonal [I]
ud desired control (m x 1) [0]
u0 initial point (m x 1)
W0 initial working set (m x 1) [empty]
imax max no. of iterations [100]
Outputs:
-------
u optimal control
W optimal active set
iter no. of iterations (= no. of changes in the working set + 1)
0 if u_i not saturated
Active set syntax: W_i = -1 if u_i = umin_i
+1 if u_i = umax_i
Directly Based on the code from:
Ola Harkegard, www.control.isy.liu.se/~ola
see licsence.
"""
#k = number of virtual controls
#m = number of variables (actuators)
k, m = B.shape
if u == None:
u = np.mean(umin + umax, 0)[:, None]
if W == None:
W = np.zeros((m, 1))
if ud == None:
ud = np.zeros((m, 1))
if Wu == None:
Wu = np.eye(m)
if Wv | == None:
Wv = np.eye(k)
phase = 1
#Reformulate as a minimal least squares problem. See 2002-03-08 (1).
| A = Wv.dot(B).dot(np.linalg.pinv(Wu))
b = Wv.dot(v - B.dot(ud))
xmin = (umin - ud).flatten()
xmax = (umax - ud).flatten()
# Compute initial point and residual.
x = Wu.dot(u - ud)
r = np.atleast_2d(A.dot(x) - b)
#Determine indeces of free variables
i_free = (W == 0).flatten()
m_free = np.sum(i_free)
for i in range(imax):
#print 'Iter: ', i
if phase == 1:
A_free = A[:, i_free]
if m_free <= k:
if m_free > 0:
p_free = np.linalg.lstsq(-A_free, r)[0]
else:
q1, r1 = qr(A_free.T)
p_free = -q1.dot(np.solve(r1.T, r))
p = np.zeros((m, 1))
if A.shape[1] > 1:
p[i_free] = p_free
else:
p[i_free] = p_free.flatten()
else:
i_fixed = np.logical_not(i_free)
m_fixed = m - m_free
if m_fixed > 0:
HT = U[i_fixed.squeeze(), :].T
V, Rtot = qr(np.atleast_2d(HT))
V1 = V[:, :m_fixed]
V2 = V[:, m_fixed + 1:]
R = Rtot[:, m_fixed]
else:
V, Rtot = np.array([[]]), np.array([[]])
V1 = V2 = R = V.T
s = -V2.T.dot(z)
pz = V2.dot(s)
p = U.dot(pz)
x_opt = x + p
infeasible = np.logical_or(x_opt < xmin, x_opt > xmax)
if not np.any(infeasible[i_free]):
x = x_opt
if phase == 1:
r = r + A.dot(p)
else:
z = z + pz
if phase == 1 and m_free >= k:
phase = 2
Utot, Stot = qr(A.T)
U = Utot[:, k:]
z = U.T.dot(x)
else:
lam = np.zeros((m, 1))
if m_free < m:
if phase == 1:
g = A.T.dot(r)
lam = -W * g
else:
lam[i_fixed] = -W[i_fixed] * np.linalg.solve(R, V1.T.dot(z))
if np.all(lam >= -eps):
u = np.linalg.solve(Wu, x) + ud
return u
lambda_neg, i_neg = np.min(lam), np.argmin(lam)
W[i_neg] = 0
i_free[i_neg] = True
m_free += 1
else:
dist = np.ones(m)
i_min = np.logical_and(i_free, p.flat < 0).flatten()
i_max = np.logical_and(i_free, p.flat > 0).flatten()
dist[i_min] = (xmin[i_min] - x[i_min]) / p[i_min]
dist[i_max] = (xmax[i_max] - x[i_max]) / p[i_max]
alpha, i_alpha = np.min(dist), np.argmin(dist)
x = x + alpha * p
if phase == 1:
r = r + A.dot(alpha * p) #!!
else:
z = z + alpha * pz
W[i_alpha] = np.sign(p[i_alpha])
if i_free[i_alpha]:
i_free[i_alpha] = False
m_free -= 1
u = np.linalg.solve(Wu, x) + ud
return u
def bounded_lsq(A, b, lower_lim, upper_lim):
"""
Minimizes:
|Ax-b|_2
for lower_lim<x<upper_lim.
"""
return mls(A, b, lower_lim, upper_lim)
def test_bounded_lsq():
from numpy.core.umath_tests import matrix_multiply
s = np.linspace(0, 10, 100)
A = np.exp(-((s - 5) ** 2) / 20)
A = A[:, None]
b = 16 * A
x = bounded_lsq(A, b, np.atleast_2d(0), np.atleast_2d(15))
np.testing.assert_almost_equal(x, 15)
A = np.array([[1, -3], [5, 7]])
b = np.array([[-50], [50]])
ll = np.array(([[-10], [-10]]))
ul = np.array(([[10], [10]]))
x0 = bounded_lsq(A, b, ll, ul)
np.testing.assert_array_almost_equal(x0, np.array([[-4.61538462], [10.]]))
if __name__ == '__main__':
from numpy.core.umath_tests import matrix_multiply
import matplotlib.pyplot as plt
test_bounded_lsq()
s = np.linspace(0, 10, 100)
A = np.exp(-((s - 5) ** 2) / 20)
A = A[:, None]
b = 16 * A
x = bounded_lsq(A, b, np.atleast_2d(0), np.atleast_2d(4))
plt.plot(A.dot(x))
plt.plot(b)
plt.figure()
plt.rcParams['font.family'] = 'serif'
A = np.array([[1, -3], [5, 7]])
b = np.array([[-50], [50]])
ll = np.array(([[-10], [-10]]))
ul = np.array(([[10], [10]]))
Ud = np.array(([0, 0]))
gamma = 1000
x0 = bounded_lsq(A, b, ll, ul)
x = np.linspace(-30, 30, 500)
y = np.linspace(-30, 30, 500)
X, Y = np.meshgrid(x, y)
S = np.dstack((X, Y))
SN = matrix_multiply(S, A.T)
plt.clf()
plt.contourf(x, y, np.sqrt(((SN - b.T) ** 2).sum(-1)), 30,
cmap=plt.cm.PuBu_r)
plt.colorbar()
#plt.axhline(ll[0])
#plt.axhline(ul[0])
#plt.axvline(ll[1])
#plt.axvline(ul[1])
rect = np.vstack((ll, ul - ll))
patch = plt.Rectangle(ll, *(ul - ll), facecolor=(0.0, 0., 0., 0))
plt.gca().add_patch(patch)
plt.annotate("Bounded Min",
xy=x0, xycoords='data',
xytext=(-5, 5), textcoords='data',
arrowprops=dict(arrowstyle="->",
connectionstyle="arc3"),
)
plt.annotate("Lsq Min",
xy=np.linalg.lstsq(A, b)[0], xycoords='data',
xytext=(20, 10), textcoords='offset points',
arrowprops=dict(arrowstyle="->",
connectionstyle="arc3"),
)
plt.scatter(*x0)
plt.scatter(*np.linalg.lstsq(A, b)[0])
plt.show()
|
er that used to be defined in the parser is no
# longer defined, stored copies of that parameter will be
# deserialized as strings by this logic even if they were
# originally meant to be some other type.
if renewalparams["authenticator"] == "webroot":
_restore_webroot_config(config, renewalparams)
plugin_prefixes = []
else:
plugin_prefixes = [renewalparams["authenticator"]]
if renewalparams.get("installer", None) is not None:
plugin_prefixes.append(renewalparams["ins | taller"])
for plugin_prefix in set(plugin_prefixes):
for config_item, config_value in six.iteritems(renewalparams):
if config_item.startswith(plugin_prefix + "_") and not cli.set_by_cli(config_item):
# Values None, True, and False need to be treated specially,
# As their types aren't handled correctly by conf | igobj
if config_value in ("None", "True", "False"):
# bool("False") == True
# pylint: disable=eval-used
setattr(config.namespace, config_item, eval(config_value))
else:
cast = cli.argparse_type(config_item)
setattr(config.namespace, config_item, cast(config_value))
def _restore_required_config_elements(config, renewalparams):
"""Sets non-plugin specific values in config from renewalparams
:param configuration.NamespaceConfig config: configuration for the
current lineage
:param configobj.Section renewalparams: parameters from the renewal
configuration file that defines this lineage
"""
# string-valued items to add if they're present
for config_item in STR_CONFIG_ITEMS:
if config_item in renewalparams and not cli.set_by_cli(config_item):
value = renewalparams[config_item]
# Unfortunately, we've lost type information from ConfigObj,
# so we don't know if the original was NoneType or str!
if value == "None":
value = None
setattr(config.namespace, config_item, value)
# int-valued items to add if they're present
for config_item in INT_CONFIG_ITEMS:
if config_item in renewalparams and not cli.set_by_cli(config_item):
config_value = renewalparams[config_item]
# the default value for http01_port was None during private beta
if config_item == "http01_port" and config_value == "None":
logger.info("updating legacy http01_port value")
int_value = cli.flag_default("http01_port")
else:
try:
int_value = int(config_value)
except ValueError:
raise errors.Error(
"Expected a numeric value for {0}".format(config_item))
setattr(config.namespace, config_item, int_value)
def should_renew(config, lineage):
"Return true if any of the circumstances for automatic renewal apply."
return True
def _avoid_invalidating_lineage(config, lineage, original_server):
"Do not renew a valid cert with one from a staging server!"
def _is_staging(srv):
return srv == constants.STAGING_URI or "staging" in srv
# Some lineages may have begun with --staging, but then had production certs
# added to them
latest_cert = OpenSSL.crypto.load_certificate(
OpenSSL.crypto.FILETYPE_PEM, open(lineage.cert).read())
# all our test certs are from happy hacker fake CA, though maybe one day
# we should test more methodically
now_valid = "fake" not in repr(latest_cert.get_issuer()).lower()
if _is_staging(config.server):
if not _is_staging(original_server) or now_valid:
if not config.break_my_certs:
names = ", ".join(lineage.names())
raise errors.Error(
"You've asked to renew/replace a seemingly valid certificate with "
"a test certificate (domains: {0}). We will not do that "
"unless you use the --break-my-certs flag!".format(names))
def renew_cert(config, domains, le_client, lineage):
"Renew a certificate lineage."
renewal_params = lineage.configuration["renewalparams"]
original_server = renewal_params.get("server", cli.flag_default("server"))
_avoid_invalidating_lineage(config, lineage, original_server)
new_certr, new_chain, new_key, _ = le_client.obtain_certificate(domains)
if config.dry_run:
logger.info("Dry run: skipping updating lineage at %s",
os.path.dirname(lineage.cert))
else:
prior_version = lineage.latest_common_version()
new_cert = OpenSSL.crypto.dump_certificate(
OpenSSL.crypto.FILETYPE_PEM, new_certr.body.wrapped)
new_chain = crypto_util.dump_pyopenssl_chain(new_chain)
renewal_conf = configuration.RenewerConfiguration(config.namespace)
# TODO: Check return value of save_successor
lineage.save_successor(prior_version, new_cert, new_key.pem, new_chain, renewal_conf)
lineage.update_all_links_to(lineage.latest_common_version())
hooks.renew_hook(config, domains, lineage.live_dir)
def report(msgs, category):
"Format a results report for a category of renewal outcomes"
lines = ("%s (%s)" % (m, category) for m in msgs)
return " " + "\n ".join(lines)
def _renew_describe_results(config, renew_successes, renew_failures,
renew_skipped, parse_failures):
out = []
notify = out.append
if config.dry_run:
notify("** DRY RUN: simulating 'letsencrypt renew' close to cert expiry")
notify("** (The test certificates below have not been saved.)")
notify("")
if renew_skipped:
notify("The following certs are not due for renewal yet:")
notify(report(renew_skipped, "skipped"))
if not renew_successes and not renew_failures:
notify("No renewals were attempted.")
elif renew_successes and not renew_failures:
notify("Congratulations, all renewals succeeded. The following certs "
"have been renewed:")
notify(report(renew_successes, "success"))
elif renew_failures and not renew_successes:
notify("All renewal attempts failed. The following certs could not be "
"renewed:")
notify(report(renew_failures, "failure"))
elif renew_failures and renew_successes:
notify("The following certs were successfully renewed:")
notify(report(renew_successes, "success"))
notify("\nThe following certs could not be renewed:")
notify(report(renew_failures, "failure"))
if parse_failures:
notify("\nAdditionally, the following renewal configuration files "
"were invalid: ")
notify(parse_failures, "parsefail")
if config.dry_run:
notify("** DRY RUN: simulating 'letsencrypt renew' close to cert expiry")
notify("** (The test certificates above have not been saved.)")
if config.quiet and not (renew_failures or parse_failures):
return
print("\n".join(out))
def renew_all_lineages(config):
"""Examine each lineage; renew if due and report results"""
if config.domains != []:
raise errors.Error("Currently, the renew verb is only capable of "
"renewing all installed certificates that are due "
"to be renewed; individual domains cannot be "
"specified with this action. If you would like to "
"renew specific certificates, use the certonly "
"command. The renew verb may provide other options "
"for selecting certificates to renew in the future.")
renewer_config = configuration.RenewerConfiguration(config)
renew_successes = []
renew_failures = []
renew_skipped = []
parse_failures = []
for renewal_file in renewal_conf_files(renewer_config):
disp = zope.component.getUtility(interfaces.IDisp |
# -*- coding: utf-8 -*-
# Copyright (c) 2011 - 2015 Detlev Offenbach <detlev@die-offenbachs.de>
#
"""
Module implementing the rebase extension interface.
"""
from __future__ import unicode_literals
import os
from PyQt5.QtWidgets import QDialog
from ..HgExtension import HgExtension
from ..HgDialog import HgDialog
class Rebase(HgExtension):
"""
Class implementing the rebase extension interface.
"""
def __init__(self, vcs):
"""
Constructor
@param vcs reference to the Mercurial vcs object
"""
super(Rebase, self).__init__(vcs)
def hgRebase(self, path):
"""
Public method to rebase changesets to a different branch.
@param path directory name of the project (string)
@return flag indicating that the project should be reread (boolean)
"""
# find the root of the repo
repodir = self.vcs.splitPath(path)[0]
while not os.path.isdir(os.path.join(repodir, self.vcs.adminDir)):
repodir = os.path.dirname(repodir)
if os.path.splitdrive(repodir)[1] == os.sep:
return False
res = False
from .HgRebaseDialog import HgRebaseDialog
dlg = HgRebaseDialog(self.vcs.hgGetTagsList(repodir),
self.vcs.hgGetBranchesList(repodir),
self.vcs.hgGetBookmarksList(repodir))
if dlg.exec_() == QDialog.Accepted:
(indicator, sourceRev, destRev, collapse, keep, keepBranches,
detach) = dlg.getData()
args = self.vcs.initCommand("rebase")
if indicator == "S":
args.append("--source")
args.append(sourceRev)
elif indicator == "B":
args.append("--base")
args.append(sourceRev)
if destRev:
args.append("--dest")
args.append(destRev)
if collapse:
args.append("--collapse")
if keep:
args.append("--keep")
if keepBranches:
args.append("--keepbranches")
if detach:
args.append("--detach")
args.append("--verbose")
dia = HgDialog(self.tr('Rebase Changesets'), self.vcs)
res = dia.startProcess(args, repodir)
if res:
| dia.exec_()
res = dia.hasAddOrDelete()
self.vcs.checkVCSStatus()
return res
def hgRebaseContinue(self, path):
"""
Public method to continue rebasing changesets from another branch.
@param path directory name of the project (string)
| @return flag indicating that the project should be reread (boolean)
"""
# find the root of the repo
repodir = self.vcs.splitPath(path)[0]
while not os.path.isdir(os.path.join(repodir, self.vcs.adminDir)):
repodir = os.path.dirname(repodir)
if os.path.splitdrive(repodir)[1] == os.sep:
return False
args = self.vcs.initCommand("rebase")
args.append("--continue")
args.append("--verbose")
dia = HgDialog(self.tr('Rebase Changesets (Continue)'), self.vcs)
res = dia.startProcess(args, repodir)
if res:
dia.exec_()
res = dia.hasAddOrDelete()
self.vcs.checkVCSStatus()
return res
def hgRebaseAbort(self, path):
"""
Public method to abort rebasing changesets from another branch.
@param path directory name of the project (string)
@return flag indicating that the project should be reread (boolean)
"""
# find the root of the repo
repodir = self.vcs.splitPath(path)[0]
while not os.path.isdir(os.path.join(repodir, self.vcs.adminDir)):
repodir = os.path.dirname(repodir)
if os.path.splitdrive(repodir)[1] == os.sep:
return False
args = self.vcs.initCommand("rebase")
args.append("--abort")
args.append("--verbose")
dia = HgDialog(self.tr('Rebase Changesets (Abort)'), self.vcs)
res = dia.startProcess(args, repodir)
if res:
dia.exec_()
res = dia.hasAddOrDelete()
self.vcs.checkVCSStatus()
return res
|
()
state = hass.states.get("sensor.tasmota_status")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("sensor.tasmota_status")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test pushed state update
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/STATE", '{"Wifi":{"Signal":20.5}}'
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_status")
assert state.state == "20.5"
# Test polled state | update
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS11",
'{"StatusSTS":{"Wifi":{"Signal":20.0}}}',
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_status")
assert state.state == "20.0"
# Test force update flag
entity = hass.data["entity_components"]["sensor"].get_entity(
"sensor.tasmota_status"
)
assert entity.force_update
@pytest.mark.parametrize("status_sensor_disabled", [False])
async de | f test_single_shot_status_sensor_state_via_mqtt(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
entity_reg = er.async_get(hass)
# Pre-enable the status sensor
entity_reg.async_get_or_create(
sensor.DOMAIN,
"tasmota",
"00000049A3BC_status_sensor_status_sensor_status_restart_reason",
suggested_object_id="tasmota_status",
disabled_by=None,
)
config = copy.deepcopy(DEFAULT_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_status")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("sensor.tasmota_status")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test polled state update
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS1",
'{"StatusPRM":{"RestartReason":"Some reason"}}',
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_status")
assert state.state == "Some reason"
# Test polled state update is ignored
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS1",
'{"StatusPRM":{"RestartReason":"Another reason"}}',
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_status")
assert state.state == "Some reason"
# Device signals online again
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_status")
assert state.state == "Some reason"
# Test polled state update
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS1",
'{"StatusPRM":{"RestartReason":"Another reason"}}',
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_status")
assert state.state == "Another reason"
# Test polled state update is ignored
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS1",
'{"StatusPRM":{"RestartReason":"Third reason"}}',
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_status")
assert state.state == "Another reason"
@pytest.mark.parametrize("status_sensor_disabled", [False])
@patch.object(hatasmota.status_sensor, "datetime", Mock(wraps=datetime.datetime))
async def test_restart_time_status_sensor_state_via_mqtt(
hass, mqtt_mock, setup_tasmota
):
"""Test state update via MQTT."""
entity_reg = er.async_get(hass)
# Pre-enable the status sensor
entity_reg.async_get_or_create(
sensor.DOMAIN,
"tasmota",
"00000049A3BC_status_sensor_status_sensor_last_restart_time",
suggested_object_id="tasmota_status",
disabled_by=None,
)
config = copy.deepcopy(DEFAULT_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_status")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("sensor.tasmota_status")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test polled state update
utc_now = datetime.datetime(2020, 11, 11, 8, 0, 0, tzinfo=dt.UTC)
hatasmota.status_sensor.datetime.now.return_value = utc_now
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS11",
'{"StatusSTS":{"UptimeSec":"3600"}}',
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_status")
assert state.state == "2020-11-11T07:00:00+00:00"
async def test_attributes(hass, mqtt_mock, setup_tasmota):
"""Test correct attributes for sensors."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = {
"sn": {
"DHT11": {"Temperature": None},
"Beer": {"CarbonDioxide": None},
"TempUnit": "C",
}
}
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_dht11_temperature")
assert state.attributes.get("device_class") == "temperature"
assert state.attributes.get("friendly_name") == "Tasmota DHT11 Temperature"
assert state.attributes.get("icon") is None
assert state.attributes.get("unit_of_measurement") == "°C"
state = hass.states.get("sensor.tasmota_beer_CarbonDioxide")
assert state.attributes.get("device_class") == "carbon_dioxide"
assert state.attributes.get("friendly_name") == "Tasmota Beer CarbonDioxide"
assert state.attributes.get("icon") is None
assert state.attributes.get("unit_of_measurement") == "ppm"
async def test_nested_sensor_attributes(hass, mqtt_mock, setup_tasmota):
"""Test correct attributes for sensors."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(NESTED_SENSOR_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_tx23_speed_act")
assert state.attributes.get("device_class") is None
assert state.attributes.get("friendly_name") == "Tasmota TX23 Speed Act"
assert state.attributes.get("icon") is None
assert state.attributes.get("unit_of_measurement") == "km/h"
state = hass.states.get("sensor.tasmota_tx23_dir_avg")
assert state.attributes.get("device_class") is None
assert state.attributes.get("friendly_name") == "Tasmota TX23 Dir Avg"
assert state.attributes.get("icon") is None
assert state.attributes.get("unit_of_measurement") == " "
async def test_indexed_sensor_attributes(hass, mqtt_mock, setup_tasmota):
"""Test correct attributes for sensors."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = {
"sn": {
"Dummy1": {"Temperature": [None, None]},
"Dummy2": {"Carbon |
ion.flush()
with self.app.test_client() as c:
rv = c.get(self.queries_url, query_string={'api_key': user.api_key})
self.assertEqual(user.id, api_key_load_user_from_request(request).id)
def test_disabled_user_api_key(self):
user = self.factory.create_user(api_key="user_key")
user.disable()
models.db.session.flush()
with self.app.test_client() as c:
rv = c.get(self.queries_url, query_string={'api_key': user.api_key})
self.assertEqual(None, api_key_load_user_from_request(request))
def test_api_key_header(self):
with self.app.test_client() as c:
rv = c.get(self.query_url, headers={'Authorization': "Key {}".format(self.api_key)})
self.assertIsNotNone(api_key_load_user_from_request(request))
def test_api_key_header_with_wrong_key(self):
with self.app.test_client() as c:
rv = c.get(self.query_url, headers={'Authorization': "Key oops"})
self.assertIsNone(api_key_load_user_from_request(request))
def test_api_key_for_wrong_org(self):
other_user = self.factory.create_admin(org=self.factory.create_org())
with self.app.test_client() as c:
rv = c.get(self.query_url, headers={'Authorization': "Key {}".format(other_user.api_key)})
self.assertEqual(404, rv.status_code)
class TestHMACAuthentication(BaseTestCase):
#
# This is a bad way to write these tests, but the way Flask works doesn't make it easy to write them properly...
#
def setUp(self):
super(TestHMACAuthentication, self).setUp()
self.api_key = '10'
self.query = self.factory.create_query(api_key=self.api_key)
models.db.session.flush()
self.path = '/{}/api/queries/{}'.format(self.query.org.slug, self.query.id)
self.expires = time.time() + 1800
def signature(self, expires):
return sign(self.query.api_key, self.path, expires)
def test_no_signature(self):
with self.app.test_client() as c:
rv = c.get(self.path)
self.assertIsNone(hmac_load_user_from_request(request))
def test_wrong_signature(self):
with self.app.test_client() as c:
rv = c.get(self.path, query_string={'signature': 'whatever', 'expires': self.expires})
self.assertIsNone(hmac_load_user_from_request(request))
def test_correct_signature(self):
with self.app.test_client() as c:
rv = c.get(self.path, query_string={'signature': self.signature(self.expires), 'expires': self.expires})
self.assertIsNotNone(hmac_load_user_from_request(request))
def test_no_query_id(self):
with self.app.test_client() as c:
rv = c.get('/{}/api/queries'.format(self.query.org.slug), query_string={'api_key': self.api_key})
self.assertIsNone(hmac_load_user_from_request(request))
def test_user_api_key(self):
user = self.factory.create_user(api_key="user_key")
path = '/api/queries/'
models.db.session.flush()
signature = sign(user.api_key, path, self.expires)
with self.app.test_client() as c:
rv = c.get(path, query_string={'signature': signature, 'expires': self.expires, 'user_id': user.id})
self.assertEqual(user.id, hmac_load_user_from_request(request).id)
class TestCreateAndLoginUser(BaseTestCase):
def test_logins_valid_user(self):
user = self.factory.create_user(email=u'test@example.com')
with patch('redash.authentication.login_user') as login_user_mock:
create_and_login_user(self.factory.org, user.name, user.email)
login_user_mock.assert_called_once_with(user, remember=True)
def test_creates_vaild_new_user(self):
email = u'test@example.com'
name = 'Test User'
with patch('redash.authentication.login_user') as login_user_mock:
create_and_login_user(self.factory.org, name, email)
self.assertTrue(login_user_mock.called)
user = models.User.query.filter(models.User.email == email).one()
self.assertEqual(user.email, email)
def test_updates_user_name(self):
user = self.factory.create_user(email=u'test@example.com')
with patch('redash.authentication.login_user') as login_user_mock:
create_and_login_user(self.factory.org, "New Name", user.email)
login_user_mock.assert_called_once_with(user, remember=True)
class TestVerifyProfile(BaseTestCase):
def test_no_domain_allowed_for_org(self):
profile = dict(email=u'arik@example.com')
self.assertFalse(verify_profile(self.factory.org, profile))
def test_domain_not_in_org_domains_list(self):
profile = dict(email=u'arik@example.com')
self.factory.org.settings[models.Organization.SETTING_GOOGLE_APPS_DOMAINS] = ['example.org']
self.assertFalse(verify_profile(self.factory.org, profile))
def test_domain_in_org_domains_list(self):
profile = dict(email=u'arik@example.com')
self.factory.org.settings[models.Organization.SETTING_GOOGLE_APPS_DOMAINS] = ['example.com']
self.assertTrue(verify_profile(self.factory.org, profile))
self.factory.org.settings[models.Organization.SETTING_GOOGLE_APPS_DOMAINS] = ['example.org', 'example.com']
self.assertTrue(verify_profile(self.factory.org, profile))
def test_org_in_public_mode_accepts_any_domain(self):
profile = dict(email=u'arik@example.com')
self.factory.org.settings[models.Organization.SETTING_IS_PUBLIC] = True
self.factory.org.settings[models.Organization.SETTING_GOOGLE_APPS_DOMAINS] = []
self.assertTrue(verify_profile(self.factory.org, profile))
def test_user_not_in_domain_but_account_exists(self):
profile = dict(email=u'arik@example.com')
self.factory.create_user(email=u'arik@example.com')
self.factory.org.settings[models. | Organization.SETTING_GOOGLE_APPS_DOMAINS] = ['example.org']
self.assertTrue(verify_profile(self.factory.org, profile))
class TestGetLoginUrl(BaseTestCase):
def test_when_multi_org_enabled_and_org_exists(self):
with self.app.test_request_context('/{}/'.format(self.factory.org.slug)):
self.assertEqual(get_login_url(next=None), '/{}/login'.format(self.factory.org.slug))
def test_when_multi_org_enab | led_and_org_doesnt_exist(self):
with self.app.test_request_context('/{}_notexists/'.format(self.factory.org.slug)):
self.assertEqual(get_login_url(next=None), '/')
class TestRedirectToUrlAfterLoggingIn(BaseTestCase):
def setUp(self):
super(TestRedirectToUrlAfterLoggingIn, self).setUp()
self.user = self.factory.user
self.password = 'test1234'
def test_no_next_param(self):
response = self.post_request('/login', data={'email': self.user.email, 'password': self.password}, org=self.factory.org)
self.assertEqual(response.location, 'http://localhost/{}/'.format(self.user.org.slug))
def test_simple_path_in_next_param(self):
response = self.post_request('/login?next=queries', data={'email': self.user.email, 'password': self.password}, org=self.factory.org)
self.assertEqual(response.location, 'http://localhost/queries')
def test_starts_scheme_url_in_next_param(self):
response = self.post_request('/login?next=https://redash.io', data={'email': self.user.email, 'password': self.password}, org=self.factory.org)
self.assertEqual(response.location, 'http://localhost/')
def test_without_scheme_url_in_next_param(self):
response = self.post_request('/login?next=//redash.io', data={'email': self.user.email, 'password': self.password}, org=self.factory.org)
self.assertEqual(response.location, 'http://localhost/')
def test_without_scheme_with_path_url_in_next_param(self):
response = self.post_request('/login?next=//localhost/queries', data={'email': self.user.email, 'password': self.password}, org=self.factory.org)
self.assertEqual(response.location, 'http://localhost/queries')
class |
im | port unittest
from katas.kyu_7.complementary_dna import DNA_strand
class DNAStrandTestCase(unittest.TestCase):
def test_equals(self):
self.assertEqual(DNA_strand('AAAA'), 'TTTT')
def test_equals_2(self):
self.assertEqual(DNA_strand('ATTGC'), 'TAACG')
def test_equals_3(self):
self.assertEqual(DNA_strand('GTAT'), 'CATA') | |
# Tic Tac Toe
# Tic Tac Toe
import random
def drawBoard(board):
# This function prints out the board that it was passed.
# "board" is a list of 10 strings representing the board (ignore index 0)
print(' | |')
print(' ' + board[7] + ' | ' + board[8] + ' | ' + board[9])
print(' | |')
print('-----------')
print(' | |')
print(' ' + board[4] + ' | ' + board[5] + ' | ' + board[6])
print(' | |')
print('-----------')
print(' | |')
print(' ' + board[1] + ' | ' + board[2] + ' | ' + board[3])
print(' | |')
def inputPlayerLetter():
# Let's the player type which letter they want to be.
# Returns a list with the player's letter as the first item, and the computer's letter as the second.
letter = ''
while not (letter == 'X' or letter == 'O'):
print('Do you want to be X or O?')
letter = input().upper()
# the first element in the tuple is the player's letter, the second is the computer's letter.
if letter == 'X':
return ['X', 'O']
else:
return ['O', 'X']
def whoGoesFirst():
# Randomly choose the player who goes first.
if random.randint(0, 1) == 0:
return 'computer'
else:
return 'player'
def playAgain():
# This function returns True if the player wants to play again, otherwise it returns False.
print('Do you want to play again? (yes or no)')
return input().lower().startswith('y')
def makeMove(board, letter, move):
board[move] = letter
def isWinner(bo, le):
# Given a board and a player's letter, this function returns True if that player has won.
# We use bo instead of board and le instead of letter so we don't have to type as much.
return ((bo[7] == le and bo[8] == le and bo[9] == le) or # across the top
(bo[4] == le and bo[5] == le and bo[6] == le) or # across the middle
(bo[1] == le and bo[2] == le and bo[3] == le) or # across the bottom
(bo[7] == le and bo[4] == le and bo[1] == le) or # down the left side
(bo[8] == le and bo[5] == le and bo[2] == le) or # down the middle
(bo[9] == le and bo[6] == le and bo[3] == le) or # down the right side
(bo[7] == le and bo[5] == le and bo[3] == le) or # diagonal
(bo[9] == le and bo[5] == le and bo[1] == le)) # diagonal
def getBoardCopy(board):
# Make a duplicate of the board list and return it the duplicate.
dupeBoard = []
for i in board:
dupeBoard.append(i)
return dupeBoard
def isSpaceFree(board, move):
# Return true if the passed move is free on the passed board.
return board[move] == ' '
def getPlayerMove(board):
# Let the player type in his move.
move = ' '
while move not in '1 2 3 4 5 6 7 8 9'.split() or not isSpaceFree(board, int(move)):
print('What is your next move? (1-9)')
move = input()
return int(move)
def chooseRandomMoveFromList(board, movesList):
# Returns a valid move from the passed list on the passed board.
# Returns None if there is no valid move.
possibleMoves = []
for i in movesList:
if isSpaceFree(board, i):
possibleMoves.append(i)
if len(possibleMoves) != 0:
return random.choice(possibleMoves)
else:
return None
def getComputerMove(board, computerLetter):
# Given a board and the computer's letter, determine where to move and return that move.
if computerLetter == 'X':
playerLetter = 'O'
else:
playerLetter = 'X'
# Here is our algorithm for our Tic Tac Toe AI:
# First, check if we can win in the next move
for i in range(1, 10):
copy = getBoardCopy(board)
if isSpaceFree(copy, i):
makeMove(copy, computerLetter, i)
if isWinner(copy, computerLetter):
return i
# Check if the player could win on his next move, and block them.
for i in range(1, 10):
copy = getBoardCopy(board)
if isSpaceFree(copy, i):
makeMove(copy, playerLetter, i)
if isWinner(copy, playerLetter):
return i
# Try to take one of the corners, if they are free.
move = chooseRandomMoveFromList(board, [1, 3, 7, 9])
if move != None:
return move
# Try to take the center, if it is free.
if isSpaceFree(board, 5):
return 5
# Move on one of the sides.
return chooseRandomMoveFromList(board, [2, 4, 6, 8])
def isBoardFull(board):
# Return True if every space on the board has been taken. Otherwise return False.
for i in range(1, 10):
if isSpaceFree(board, i):
return False
return True
print('Welcome to Tic Tac Toe!')
while T | rue:
# Reset the board
theBoard = [' '] * 10
playerLetter, computerLetter = inputPlayerLetter()
turn = whoGoesFirst()
print('The ' + turn + ' will go first.')
gameIsPlaying = True
while gameIsPlaying:
if turn == 'player':
# Player's turn.
| drawBoard(theBoard)
move = getPlayerMove(theBoard)
makeMove(theBoard, playerLetter, move)
if isWinner(theBoard, playerLetter):
drawBoard(theBoard)
print('Hooray! You have won the game!')
gameIsPlaying = False
else:
if isBoardFull(theBoard):
drawBoard(theBoard)
print('The game is a tie!')
break
else:
turn = 'computer'
else:
# Computer's turn.
move = getComputerMove(theBoard, computerLetter)
makeMove(theBoard, computerLetter, move)
if isWinner(theBoard, computerLetter):
drawBoard(theBoard)
print('The computer has beaten you! You lose.')
gameIsPlaying = False
else:
if isBoardFull(theBoard):
drawBoard(theBoard)
print('The game is a tie!')
break
else:
turn = 'player'
if not playAgain():
break |
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
"""Monkey-patching to add multiprocessing support for coverage.py"""
import multiprocessing
import multiprocessing.process
| import os
from coverage import env
from coverage.misc import contract
# An attribute that will be set on the module to indicate that it has been
# monkey-patched.
PATCHED_MARKER = "_coverage$patched"
if env.PYVERSION >= (3, 4):
OriginalProcess = multiprocessing.process.BaseProcess
else:
OriginalProcess = mul | tiprocessing.Process
original_bootstrap = OriginalProcess._bootstrap
class ProcessWithCoverage(OriginalProcess):
"""A replacement for multiprocess.Process that starts coverage."""
def _bootstrap(self):
"""Wrapper around _bootstrap to start coverage."""
from coverage import Coverage # avoid circular import
cov = Coverage(data_suffix=True)
cov._warn_preimported_source = False
cov.start()
debug = cov._debug
try:
if debug.should("multiproc"):
debug.write("Calling multiprocessing bootstrap")
return original_bootstrap(self)
finally:
if debug.should("multiproc"):
debug.write("Finished multiprocessing bootstrap")
cov.stop()
cov.save()
if debug.should("multiproc"):
debug.write("Saved multiprocessing data")
class Stowaway(object):
"""An object to pickle, so when it is unpickled, it can apply the monkey-patch."""
def __init__(self, rcfile):
self.rcfile = rcfile
def __getstate__(self):
return {'rcfile': self.rcfile}
def __setstate__(self, state):
patch_multiprocessing(state['rcfile'])
@contract(rcfile=str)
def patch_multiprocessing(rcfile):
"""Monkey-patch the multiprocessing module.
This enables coverage measurement of processes started by multiprocessing.
This involves aggressive monkey-patching.
`rcfile` is the path to the rcfile being used.
"""
if hasattr(multiprocessing, PATCHED_MARKER):
return
if env.PYVERSION >= (3, 4):
OriginalProcess._bootstrap = ProcessWithCoverage._bootstrap
else:
multiprocessing.Process = ProcessWithCoverage
# Set the value in ProcessWithCoverage that will be pickled into the child
# process.
os.environ["COVERAGE_RCFILE"] = rcfile
# When spawning processes rather than forking them, we have no state in the
# new process. We sneak in there with a Stowaway: we stuff one of our own
# objects into the data that gets pickled and sent to the sub-process. When
# the Stowaway is unpickled, it's __setstate__ method is called, which
# re-applies the monkey-patch.
# Windows only spawns, so this is needed to keep Windows working.
try:
from multiprocessing import spawn
original_get_preparation_data = spawn.get_preparation_data
except (ImportError, AttributeError):
pass
else:
def get_preparation_data_with_stowaway(name):
"""Get the original preparation data, and also insert our stowaway."""
d = original_get_preparation_data(name)
d['stowaway'] = Stowaway(rcfile)
return d
spawn.get_preparation_data = get_preparation_data_with_stowaway
setattr(multiprocessing, PATCHED_MARKER, True)
|
code, but you are not
# obligated to do so. If you do not wish to do so, delete this exception
# statement from your version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
import re, os
import cgi
import urllib
import xml.dom.minidom as dom
from mako.template import Template
import rb
import LastFM
from gi.repository import WebKit
from gi.repository import GObject, Gtk
from gi.repository import RB
import gettext
gettext.install('rhythmbox', RB.locale_dir())
class ArtistTab (GObject.GObject):
__gsignals__ = {
'switch-tab' : (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE,
(GObject.TYPE_STRING,))
}
def __init__ (self, shell, buttons, ds, view):
GObject.GObject.__init__ (self)
self.shell = shell
self.sp = shell.props.shell_player
self.db = shell.props.db
self.buttons = buttons
self.button = Gtk.ToggleButton (label=_("Artist"))
self.datasource = ds
self.view = view
self.artist = None
self.active = False
self.button.show()
self.button.set_relief (Gtk.ReliefStyle.NONE)
self.button.set_focus_on_click(False)
self.button.connect ('clicked',
lambda button : self.emit('switch-tab', 'artist'))
buttons.pack_start (self.button, True, True, 0)
def activate (self):
print "activating Artist Tab"
self.button.set_active(True)
self.active = True
self.reload ()
def deactivate (self):
print "deactivating Artist Tab"
self.button.set_active(False)
self.active = False
def reload (self):
entry | = self.sp.get_playing_entry ()
if entry is None:
print "Nothing playing"
return None
artist = entry.get_string (RB.RhythmDBPropType.ARTIST)
if self.active and self.artist != artist:
self.datasource.fetch_artist_data (artist)
self.view.loading (artist)
else:
self.view.load_view()
self.artist = artist
class ArtistView (GObject.GObject):
def __init__ (self, shell, p | lugin, webview, ds):
GObject.GObject.__init__ (self)
self.webview = webview
self.ds = ds
self.shell = shell
self.plugin = plugin
self.file = ""
plugindir = plugin.plugin_info.get_data_dir()
self.basepath = "file://" + urllib.pathname2url (plugindir)
self.load_tmpl ()
self.connect_signals ()
def load_view (self):
self.webview.load_string (self.file, 'text/html', 'utf-8', self.basepath)
def loading (self, current_artist):
self.loading_file = self.loading_template.render (
artist = current_artist,
info = _("Loading biography for %s") % current_artist,
song = "",
basepath = self.basepath)
self.webview.load_string (self.loading_file, 'text/html', 'utf-8', self.basepath)
def load_tmpl (self):
self.path = rb.find_plugin_file(self.plugin, 'tmpl/artist-tmpl.html')
self.loading_path = rb.find_plugin_file (self.plugin, 'tmpl/loading.html')
self.template = Template (filename = self.path, module_directory = self.plugin.tempdir)
self.loading_template = Template (filename = self.loading_path, module_directory = self.plugin.tempdir)
self.styles = self.basepath + '/tmpl/main.css'
def connect_signals (self):
self.air_id = self.ds.connect ('artist-info-ready', self.artist_info_ready)
def artist_info_ready (self, ds):
# Can only be called after the artist-info-ready signal has fired.
# If called any other time, the behavior is undefined
try:
info = ds.get_artist_info ()
small, med, big = info['images'] or (None, None, None)
summary, full_bio = info['bio'] or (None, None)
self.file = self.template.render (artist = ds.get_current_artist (),
error = ds.get_error (),
image = med,
fullbio = full_bio,
shortbio = summary,
datasource = LastFM.datasource_link (self.basepath),
stylesheet = self.styles )
self.load_view ()
except Exception, e:
print "Problem in info ready: %s" % e
class ArtistDataSource (GObject.GObject):
__gsignals__ = {
'artist-info-ready' : (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, ()),
'artist-similar-ready' : (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, ()),
'artist-top-tracks-ready' : (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, ()),
'artist-top-albums-ready' : (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, ()),
}
def __init__ (self, info_cache, ranking_cache):
GObject.GObject.__init__ (self)
self.current_artist = None
self.error = None
self.artist = {
'info' : {
'data' : None,
'signal' : 'artist-info-ready',
'function' : 'getinfo',
'cache' : info_cache,
'parsed' : False,
},
'similar' : {
'data' : None,
'signal' : 'artist-similar-ready',
'function' : 'getsimilar',
'cache' : info_cache,
'parsed' : False,
},
'top_albums' : {
'data' : None,
'signal' : 'artist-top-albums-ready',
'function' : 'gettopalbums',
'cache' : ranking_cache,
'parsed' : False,
},
'top_tracks' : {
'data' : None,
'signal' : 'artist-top-tracks-ready',
'function' : 'gettoptracks',
'cache' : ranking_cache,
'parsed' : False,
},
}
def extract (self, data, position):
"""
Safely extract the data from an xml node. Returns data
at position or None if position does not exist
"""
try:
return data[position].firstChild.data
except Exception, e:
return None
def fetch_top_tracks (self, artist):
if LastFM.user_has_account() is False:
return
artist = urllib.quote_plus (artist)
function = self.artist['top_tracks']['function']
cache = self.artist['top_tracks']['cache']
cachekey = "lastfm:artist:%s:%s" % (function, artist)
url = '%sartist.%s&artist=%s&api_key=%s' % (LastFM.URL_PREFIX,
function, artist, LastFM.API_KEY)
cache.fetch(cachekey, url, self.fetch_artist_data_cb, self.artist['top_tracks'])
def fetch_artist_data (self, artist):
"""
Initiate the fetching of all artist data. Fetches artist info, similar
artists, artist top albums and top tracks. Downloads XML files from last.fm
and saves as parsed DOM documents in self.artist dictionary. Must be called
before any of the get_* methods.
"""
self.current_artist = artist
if LastFM.user_has_account() is False:
self.error = LastFM.NO_ACCOUNT_ERROR
self.emit ('artist-info-ready')
return
self.error = None
artist = urllib.quote_plus (artist)
for key, value in self.artist |
t)
self.errorSending = QCheckBox(
self.tr("Send error info to author"), self)
self.errorSending.setChecked(settings['error'])
layout.addRow(self.errorSending)
self.checkUpdates = QCheckBox(
self.tr("Automatically check for updates"), self)
self.checkUpdates.setChecked(settings['updates'])
layout.addRow(self.checkUpdates)
self.imageSideLen = NumberEdit(self)
self.imageSideLen.setMaximumWidth(60)
layout.addRow(self.tr("Max image side len"), self.imageSideLen)
self.imageSideLen.setText(str(settings['ImageSideLen']))
self.freeNumeric = QCheckBox(
self.tr("Free format numeric fields"), self)
self.freeNumeric.setChecked(settings['free_numeric'])
layout.addRow(self.freeNumeric)
self.storeSorting = QCheckBox(
self.tr("Store column sorting"), self)
self.storeSorting.setChecked(settings['store_sorting'])
layout.addRow(self.storeSorting)
self.sortFilter = QCheckBox(
self.tr("Sort items in filters (slow)"), self)
self.sortFilter.setChecked(settings['sort_filter'])
layout.addRow(self.sortFilter)
self.sortTree = QCheckBox(
self.tr("Sort items in tree (slow)"), self)
self.sortTree.setChecked(settings['sort_tree'])
layout.addRow(self.sortTree)
self.imageName = QCheckBox(
self.tr("Point to image file rather than storing image"), self)
self.imageName.setChecked(settings['image_name'])
layout.addRow(self.imageName)
self.imagePath = QLineEdit(self)
self.imagePath.setMinimumWidth(120)
self.imagePath.setText(settings['image_path'])
icon = style.standardIcon(QStyle.SP_DialogOpenButton)
self.imagePathButton = QPushButton(icon, '', self)
self.imagePathButton.clicked.connect(self.imageP | athButtonClicked)
hLayout = QHBoxLayout()
hLayout.addWidget(self.imagePath)
hLayout.addWidget(self.imagePathButt | on)
hLayout.setContentsMargins(QMargins())
layout.addRow(self.tr("Image Path"), hLayout)
self.idDates = QCheckBox(
self.tr("Tellico import: keep ID and created/modified dates"), self)
self.idDates.setChecked(settings['id_dates'])
layout.addRow(self.idDates)
current = 0
self.templateSelector = QComboBox(self)
for i, template in enumerate(Report.scanTemplates()):
self.templateSelector.addItem(template)
if settings['template'] == template:
current = i
self.templateSelector.setCurrentIndex(current)
self.templateSelector.setSizePolicy(QSizePolicy.Fixed,
QSizePolicy.Fixed)
layout.addRow(self.tr("Default template"), self.templateSelector)
current = 0
self.titleFormatSelector = QComboBox(self)
for i, titleFormat in enumerate(['default', 'US']):
self.titleFormatSelector.addItem(titleFormat)
if settings['title_format'] == titleFormat:
current = i
self.titleFormatSelector.setCurrentIndex(current)
self.titleFormatSelector.setSizePolicy(QSizePolicy.Fixed,
QSizePolicy.Fixed)
layout.addRow(self.tr("Title Format"), self.titleFormatSelector)
self.setLayout(layout)
def backupButtonClicked(self):
folder = QFileDialog.getExistingDirectory(self,
self.tr("Backup folder"),
self.backupFolder.text())
if folder:
self.backupFolder.setText(folder)
def referenceButtonClicked(self):
file, _selectedFilter = QFileDialog.getOpenFileName(self,
self.tr("Select reference"),
self.reference.text(),
"*.ref")
if file:
self.reference.setText(file)
def imagePathButtonClicked(self):
file, _selectedFilter = QFileDialog.getOpenFileName(self,
self.tr("Select imagePath"),
self.imagePath.text(),
"*.ref")
if file:
self.imagePath.setText(file)
def save(self):
settings = Settings()
current = self.languageSelector.currentIndex()
settings['locale'] = self.languageSelector.itemData(current)
settings['backup'] = self.backupFolder.text()
settings['reference'] = self.reference.text()
settings['error'] = self.errorSending.isChecked()
settings['updates'] = self.checkUpdates.isChecked()
settings['free_numeric'] = self.freeNumeric.isChecked()
settings['store_sorting'] = self.storeSorting.isChecked()
settings['sort_filter'] = self.sortFilter.isChecked()
settings['sort_tree'] = self.sortTree.isChecked()
settings['image_name'] = self.imageName.isChecked()
settings['image_path'] = self.imagePath.text()
settings['id_dates'] = self.idDates.isChecked()
settings['title_format'] = self.titleFormatSelector.currentText()
settings['template'] = self.templateSelector.currentText()
settings['ImageSideLen'] = int(self.imageSideLen.text())
settings.save()
class FieldsSettingsPage(QWidget):
DataRole = Qt.UserRole
def __init__(self, collection, parent=None):
super(FieldsSettingsPage, self).__init__(parent)
self.listWidget = QListWidget(self)
self.listWidget.setWrapping(True)
self.listWidget.setMinimumWidth(330)
self.listWidget.setMinimumHeight(180)
self.fields = collection.fields
for field in self.fields:
item = QListWidgetItem(field.title, self.listWidget)
item.setData(self.DataRole, field)
item.setFlags(Qt.ItemIsEditable | Qt.ItemIsUserCheckable |
Qt.ItemIsEnabled)
checked = Qt.Unchecked
if field.enabled:
checked = Qt.Checked
item.setCheckState(checked)
self.listWidget.addItem(item)
self.defaultFieldsButton = QPushButton(
self.tr("Revert to default"), self)
self.defaultFieldsButton.clicked.connect(
self.defaultFieldsButtonClicked)
self.defaultFieldsButton.setSizePolicy(QSizePolicy.Fixed,
QSizePolicy.Fixed)
layout = QVBoxLayout(self)
layout.addWidget(QLabel(self.tr("Global enabled fields:"), self))
layout.addWidget(self.listWidget)
layout.addWidget(self.defaultFieldsButton)
self.setLayout(layout)
def resizeEvent(self, event):
self.listWidget.setWrapping(True)
def defaultFieldsButtonClicked(self):
defaultFields = CollectionFieldsBase()
for i in range(self.listWidget.count()):
item = self.listWidget.item(i)
field = item.data(self.DataRole)
defaultField = defaultFields.field(field.id)
item.setText(defaultField.title)
def save(self):
for i in range(self.listWidget.count()):
item = self.listWidget.item(i)
field = item.data(self.DataRole)
field.enabled = (item.checkState() == Qt.Checked)
field.title = item.text()
self.fields.save()
@storeDlgSizeDecorator
class SettingsDialog(QDialog):
def __init__(self, collection, parent=None):
super().__init__(parent,
Qt.WindowCloseButtonHint | Qt.WindowSystemMenuHint)
mainPage = MainSettingsPage(collection, self)
fieldsPage = FieldsSettingsPage(collection, self)
self.setWi |
# Copyright (C) 2014 Adam Schubert <adam.schubert@sg1-game.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied w | arranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If no | t, see <http://www.gnu.org/licenses/>.
__author__="Adam Schubert <adam.schubert@sg1-game.net>"
__date__ ="$12.10.2014 1:49:02$"
from tests.UserTest import *
from tests.ApiTest import *
from tests.ClanTest import *
from tests.CharacterTest import *
from tests.ServerTest import *
from tests.FactionTest import * |
from yoyo.connections import parse_uri | , unparse_uri
def _test_parse_uri(connection_string, expected_uri_tuple):
uri_tuple = parse_uri(connection_string)
assert isinstance(uri_tuple, tuple)
assert (uri_tuple == expected_uri_tuple)
def _test_unparse_uri(uri_tuple, expected_connection_string):
connection_string = unparse_uri(uri_tuple)
assert isinstance(connection_s | tring, str)
assert (connection_string == expected_connection_string)
def test_uri_without_db_params():
connection_string = 'postgres://user:password@server:7777/database'
uri_tuple = ('postgres', 'user', 'password', 'server', 7777, 'database', None)
_test_parse_uri(connection_string, uri_tuple)
_test_unparse_uri(uri_tuple, connection_string)
def test_parse_uri_with_db_params():
connection_string = 'odbc://user:password@server:7777/database?DSN=dsn'
uri_tuple = ('odbc', 'user', 'password', 'server', 7777, 'database', {'DSN': 'dsn'})
_test_parse_uri(connection_string, uri_tuple)
_test_unparse_uri(uri_tuple, connection_string)
|
import argparse
import json
import datetime
import os
import sys
from codecs import open
def run(cohort_id, threshold, period, course_range):
from django.contrib.auth.models import User
from django.db.models import Sum, Max, Min, Avg
from django.utils.html import strip_tags
from oppia.models import Activity, Course, Cohort, CourseCohort, Participant, Tracker
from oppia.quiz.models import Quiz, QuizQuestion, QuizAttempt, QuizAttemptResponse
print "Cohort:" + str(cohort_id)
print "Threshold: " + str (threshold)
print "Period: " + period
print "Course Range: " + course_range
if period == 'project':
START_DATE = datetime.datetime(2015,4,01,0,0,0)
END_DATE = datetime.datetime(2016,10,31,23,59,59)
elif period == 'training':
START_DATE = datetime.datetime(2015,4,01,0,0,0)
END_DATE = datetime.datetime(2015,7,31,23,59,59)
elif period == 'cpd':
START_DATE = datetime.datetime(2015,8,01,0,0,0)
END_DATE = datetime.datetime(2016,10,31,23,59,59)
elif period == 'op3.4-mar16':
START_DATE = datetime.datetime(2015,8,01,0,0,0)
END_DATE = datetime.datetime(2016,03,31,23,59,59)
else:
print "Invalid period supplied"
sys.exit()
students = User.objects.filter(participant__cohort_id=cohort_id, participant__role=Participant.STUDENT).order_by('username')
if course_range == 'ancpnc':
courses = Course.objects.filter(coursecohort__cohort_id = cohort_id, shortname__in=['anc1-et','anc2-et','pnc-et'])
elif course_range == 'anc':
courses = Course.objects.filter(coursecohort__cohort_id = cohort_id, shortname__in=['anc1-et','anc2-et'])
elif course_range == 'pnc':
courses = Course.objects.filter(coursecohort__cohort_id = cohort_id, shortname__in=['pnc-et'])
elif course_range =='all':
courses = Course.objects.filter(coursecohort__cohort_id = cohort_id)
else:
print "Invalid course range supplied"
sys.exit()
filename = 'hew-quiz-progress-' + period + '-' + course_range + '-' + str(threshold) + '.html'
output_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '_output', filename)
out_file = open(output_file, 'w', 'utf-8')
out_file.write("<html>")
out_file.write("<head>")
out_file.write('<meta http-equiv="Content-Type" content="text/html;charset=utf-8" />')
out_file.write("<style> td {text-align:center;} #footer { font-size:small; font-style:italic; } </style>")
out_file.write("</head>")
out_file.write("<body>")
out_file.write("<h3>Courses: %s</h3>" % ','.join(courses.values_list('shortname', flat=True)))
out_file.write("<h3>Quiz pass threshold set at: %d%%</h3>" % threshold)
out_file.write("<h3>Date range: %s to %s</h3>" % (START_DATE.strftime('%d-%b-%Y'), END_DATE.strftime('%d-%b-%Y')))
out_file.write("<table>")
out_file.write("<tr>")
out_file.write("<th>Student</th>")
out_file.write("<th>No Quizzes</th>")
out_file.write("<th>No Attempted</th>")
out_file.write("<th>No Passed</th>")
out_file.write("</tr>")
for s in students:
print s.first_name + " " + s.last_name
out_file.write("<tr>")
out_file.write("<td>%s %s</td>" | % (s.first_name, s.last_name))
no_quizzes = 0
no_attempted = 0
no_passed = 0
for c in courses:
# other quizzes - no times taken, max score, min score, first score, mos | t recent score, average score
act_quizzes = Activity.objects.filter(section__course=c, baseline=False, type="quiz")
no_quizzes += act_quizzes.count()
quiz_digests = act_quizzes.values_list('digest', flat=True).distinct()
quizzes = Quiz.objects.filter(quizprops__name='digest', quizprops__value__in=quiz_digests)
for q in quizzes:
qas = QuizAttempt.objects.filter(quiz=q,user=s).aggregate(user_max_score=Max('score'), max_score=Max('maxscore'))
print qas
if qas['user_max_score'] is not None:
no_attempted += 1
if qas['user_max_score'] * 100/ qas['max_score'] >= threshold:
no_passed += 1
out_file.write("<td>%d</td>" % no_quizzes)
out_file.write("<td>%d</td>" % no_attempted)
out_file.write("<td>%d</td>" % no_passed)
out_file.write("</tr>\n")
out_file.write("</table>")
out_file.write("<div id='footer'>Report generated at %s by script %s</div>" % (datetime.datetime.now().strftime('%Y-%m-%d %H:%M'),os.path.realpath(__file__)))
out_file.write("</body></html>")
out_file.close()
def title_lang(title,lang):
try:
titles = json.loads(title)
if lang in titles:
return titles[lang]
else:
for l in titles:
return titles[l]
except:
pass
return title
if __name__ == "__main__":
import django
django.setup()
parser = argparse.ArgumentParser()
parser.add_argument("--cohort_id", help="", type=int)
parser.add_argument("--threshold", help="", type=int)
parser.add_argument("--period", help="", choices=['project','training','cpd','op3.4-mar16'])
parser.add_argument("--course_range", help="", choices=['all','ancpnc','anc', 'pnc'])
args = parser.parse_args()
run(args.cohort_id, args.threshold, args.period, args.course_range) |
('paginate_by', 10)]):
result = self.resource.collection_get()
self.assertEqual(len(result['data']), 10)
def test_forced_limit_has_precedence_over_provided_limit(self):
with mock.patch.dict(self.resource.request.registry.settings, [
('paginate_by', 5)]):
self.resource.request.GET = {'_limit': '10'}
result = self.resource.collection_get()
self.assertEqual(len(result['data']), 5)
def test_return_next_page_url_is_given_in_headers(self):
self.resource.request.GET = {'_limit': '10'}
self.resource.collection_get()
self.assertIn('Next-Page', self.last_response.headers)
def test_next_page_url_has_got_querystring(self):
self.resource.request.GET = {'_limit': '10'}
self.resource.collection_get()
queryparams = self._setup_next_page()
self.assertIn('_limit', queryparams)
self.assertIn('_token', queryparams)
def test_next_page_url_gives_distinct_records(self):
self.resource.request.GET = {'_limit': '10'}
results1 = self.resource.collection_get()
self._setup_next_page()
results2 = self.resource.collection_get()
results_id1 = set([x['id'] for x in results1['data']])
results_id2 = set([x['id'] for x in results2['data']])
self.assertFalse(results_id1.intersection(results_id2))
def test_next_page_url_gives_distinct_records_with_forced_limit(self):
with mock.patch.dict(self.resource.request.registry.settings, [
('paginate_by', 5)]):
results1 = self.resource.collection_get()
self._setup_next_page()
results2 = self.resource.collection_get()
results_id1 = set([x['id'] for x in results1['data']])
results_id2 = set([x['id'] for x in results2['data']])
self.assertFalse(results_id1.intersection(results_id2))
def test_twice_the_same_next_page(self):
self.resource.request.GET = {'_limit': '10'}
self.resource.collection_get()
first_next = self.last_response.headers['Next-Page']
self.resource.collection_get()
second_next = self.last_response.headers['Next-Page']
self.assertEqual(first_next, second_next)
def test_stops_giving_next_page_at_the_end_of_first_page(self):
self.resource.collection_get()
self.assertNotIn('Next-Page', self.last_response.headers)
def test_stops_giving_next_page_at_the_end_sets(self):
self.resource.request.GET = {'_limit': '11'}
self.resource.collection_get()
self._setup_next_page()
self.resource.collection_get()
self.assertNotIn('Next-Page', self.last_response.headers)
def test_stops_giving_next_page_at_the_end_sets_on_exact_limit(self):
self.resource.request.GET = {'_limit': '10'}
self.resource.collection_get()
self._setup_next_page()
self.resource.collection_get()
self.assertNotIn('Next-Page', self.last_response.headers)
def test_handle_simple_sorting(self):
self.resource.request.GET = {'_sort': '-status', '_limit': '20'}
expected_results = self.resource.collection_get()
self.resource.request.GET['_limit'] = '10'
results1 = self.resource.collection_get()
self._setup_next_page()
results2 = self.resource.collection_get()
self.assertEqual(expected_results['data'],
results1['data'] + results2['data'])
def test_handle_multiple_sorting(self):
self.resource.request.GET = {'_sort': '-status,title', '_limit': '20'}
expected_results = self.resource.collection_get()
self.resource.request.GET['_limit'] = '10'
results1 = self.resource.collection_get()
self._setup_next_page()
results2 = self.resource.collection_get()
self.assertEqual(expected_results['data'],
results1['data'] + results2['data'])
def test_handle_filtering_sorting(self):
self.resource.request.GET = {'_sort': '-status,title', 'status': '2',
'_limit': '20'}
expected_results = self.resource.collection_get()
self.resource.request.GET['_limit'] = '3'
results1 = self.resource.collection_get()
self._setup_next_page()
results2 = self.resource.collection_get()
self.assertEqual(expected_results['data'],
results1['data'] + results2['data'])
def test_handle_sorting_desc(self):
self.resource.request.GET = {'_sort': 'status,-title', '_limit': '20'}
expected_results = self.resource.collection_get()
self.resource.request.GET['_limit'] = '10'
results1 = self.resource.collection_get()
self._setup_next_page()
results2 = self.resource.collection_get()
self.assertEqual(expected_results['data'],
results1['data'] + results2['data'])
def test_handle_since(self):
self.resource.request.GET = {'_since': '123', '_limit': '20'}
expected_results = self.resource.collection_get()
self.resource.request.GET['_limit'] = '10'
results1 = self.resource.collection_get()
self._setup_next_page()
results2 = self.resource.collection_get()
self.assertEqual(expected_results['data'],
results1['data'] + results2['data'])
def test_wrong_limit_raise_400(self):
self.resource.request.GET = {'_since': '123', '_limit': 'toto'}
self.assertRaises(HTTPBadRequest, self.resource.collection_get)
def test_token_wrong_base64(self):
self.resource.request.GET = {'_since': '123', '_limit': '20',
'_token': '123'}
self.assertRaises(HTTPBadRequest, self.resource.collection_get)
def test_token_wrong_json(self):
self.resource.request.GET = {
'_since': '123', '_limit': '20',
'_token': b64encode('{"toto":'.encode('ascii')).decode('ascii')}
self.assertRaises(HTTPBadRequest, self.resource.collection_ | get)
def test_token_wrong_json_fields(self):
badtoken = '{"toto": {"tutu": 1}}'
self.resource.request.GET = {
'_since': '123', '_limit': '20',
'_token': b64encode(badtoken.encode('ascii')).decode('ascii')}
self.assertRaises(HTTPBadRequest, self.resource.collection_get)
def test_raises_bad_request_if_token_has_bad_data_structure(self):
invalid_token = json.dumps([[('last_modified', 0, '>')]])
self.resource.request.GE | T = {
'_since': '123', '_limit': '20',
'_token': b64encode(invalid_token.encode('ascii')).decode('ascii')}
self.assertRaises(HTTPBadRequest, self.resource.collection_get)
class BuildPaginationTokenTest(BaseTest):
def setUp(self):
super(BuildPaginationTokenTest, self).setUp()
self.patch_known_field.start()
self.record = {
'id': 1, 'status': 2, 'unread': True,
'last_modified': 1234, 'title': 'Title'
}
def test_token_contains_current_offset(self):
token = self.resource._build_pagination_token([('last_modified', -1)],
self.record,
42)
tokeninfo = json.loads(b64decode(token).decode('ascii'))
self.assertEqual(tokeninfo['offset'], 42)
def test_no_sorting_default_to_modified_field(self):
token = self.resource._build_pagination_token([('last_modified', -1)],
self.record,
42)
tokeninfo = json.loads(b64decode(token).decode('ascii'))
self.assertDictEqual(tokeninfo['last_record'],
{"last_modified": 1234})
def test_sorting_handle_both_rules(self):
token = self.resource._build_pagination_token([
('status', -1),
('last_modified', -1)
], self.record, 34)
tokeninfo = json.loads(b64 |
#!/usr/bin/env python
"""Main Django renderer."""
import os
import pdb
import time
from django import http
from django import shortcuts
from django import template
from django.views.decorators import csrf
import logging
from grr import gui
from grr.gui import renderers
from grr.gui import webauth
from grr.lib import access_control
from grr.lib import aff4
from grr.lib import config_lib
from grr.lib import flags
from grr.lib import rdfvalue
from grr.lib import registry
from grr.lib import stats
config_lib.DEFINE_string("AdminUI.page_title",
"GRR Admin Console",
"Page title of the Admin UI.")
config_lib.DEFINE_string("A | dminUI.heading",
"GRR Rapid Re | sponse",
"Dashboard heading displayed in the Admin UI.")
config_lib.DEFINE_string("AdminUI.report_url",
"http://code.google.com/p/grr/issues/list",
"URL of the 'Report a problem' link.")
config_lib.DEFINE_string("AdminUI.help_url",
"https://code.google.com/p/grr/",
"URL of the 'Help' link.")
DOCUMENT_ROOT = os.path.join(os.path.dirname(gui.__file__), "static")
class ViewsInit(registry.InitHook):
pre = ["StatsInit"]
def RunOnce(self):
"""Run this once on init."""
# Renderer-aware metrics
stats.STATS.RegisterEventMetric(
"ui_renderer_latency", fields=[("renderer", str)])
stats.STATS.RegisterEventMetric(
"ui_renderer_response_size", fields=[("renderer", str)],
units=stats.MetricUnits.BYTES)
stats.STATS.RegisterCounterMetric(
"ui_renderer_failure", fields=[("renderer", str)])
# General metrics
stats.STATS.RegisterCounterMetric("ui_unknown_renderer")
stats.STATS.RegisterCounterMetric("http_access_denied")
stats.STATS.RegisterCounterMetric("http_server_error")
@webauth.SecurityCheck
@csrf.ensure_csrf_cookie # Set the csrf cookie on the homepage.
def Homepage(request):
"""Basic handler to render the index page."""
# We build a list of all js files to include by looking at the list
# of renderers modules. JS files are always named in accordance with
# renderers modules names. I.e. if there's a renderers package called
# grr.gui.plugins.acl_manager, we expect a js files called acl_manager.js.
renderers_js_files = set()
for cls in renderers.Renderer.classes.values():
if aff4.issubclass(cls, renderers.Renderer) and cls.__module__:
renderers_js_files.add(cls.__module__.split(".")[-1] + ".js")
context = {"page_title": config_lib.CONFIG["AdminUI.page_title"],
"heading": config_lib.CONFIG["AdminUI.heading"],
"report_url": config_lib.CONFIG["AdminUI.report_url"],
"help_url": config_lib.CONFIG["AdminUI.help_url"],
"renderers_js": renderers_js_files}
return shortcuts.render_to_response(
"base.html", context, context_instance=template.RequestContext(request))
@webauth.SecurityCheck
def RenderBinaryDownload(request):
"""Basic handler to allow downloads of aff4:/config/executables files."""
path, filename = request.path.split("/", 2)[-1].rsplit("/", 1)
if not path or not filename:
return AccessDenied("Error: Invalid path.")
request.REQ = request.REQUEST
def Generator():
with aff4.FACTORY.Open(aff4_path, aff4_type="GRRSignedBlob",
token=BuildToken(request, 60)) as fd:
while True:
data = fd.Read(1000000)
if not data: break
yield data
base_path = rdfvalue.RDFURN("aff4:/config/executables")
aff4_path = base_path.Add(path).Add(filename)
if not aff4_path.RelativeName(base_path):
# Check for path traversals.
return AccessDenied("Error: Invalid path.")
filename = aff4_path.Basename()
response = http.HttpResponse(content=Generator(),
content_type="binary/octet-stream")
response["Content-Disposition"] = ("attachment; filename=%s" % filename)
return response
@webauth.SecurityCheck
@renderers.ErrorHandler()
def RenderGenericRenderer(request):
"""Django handler for rendering registered GUI Elements."""
try:
action, renderer_name = request.path.split("/")[-2:]
renderer_cls = renderers.Renderer.GetPlugin(name=renderer_name)
except KeyError:
stats.STATS.IncrementCounter("ui_unknown_renderer")
return AccessDenied("Error: Renderer %s not found" % renderer_name)
# Check that the action is valid
["Layout", "RenderAjax", "Download", "Validate"].index(action)
renderer = renderer_cls()
result = http.HttpResponse(content_type="text/html")
# Pass the request only from POST parameters. It is much more convenient to
# deal with normal dicts than Django's Query objects so we convert here.
if flags.FLAGS.debug:
# Allow both POST and GET for debugging
request.REQ = request.POST.dict()
request.REQ.update(request.GET.dict())
else:
# Only POST in production for CSRF protections.
request.REQ = request.POST.dict()
# Build the security token for this request
request.token = BuildToken(request, renderer.max_execution_time)
# Allow the renderer to check its own ACLs.
renderer.CheckAccess(request)
try:
# Does this renderer support this action?
method = getattr(renderer, action)
start_time = time.time()
try:
result = method(request, result) or result
finally:
total_time = time.time() - start_time
stats.STATS.RecordEvent("ui_renderer_latency",
total_time, fields=[renderer_name])
except access_control.UnauthorizedAccess, e:
result = http.HttpResponse(content_type="text/html")
result = renderers.Renderer.GetPlugin("UnauthorizedRenderer")().Layout(
request, result, exception=e)
except Exception:
stats.STATS.IncrementCounter("ui_renderer_failure",
fields=[renderer_name])
if flags.FLAGS.debug:
pdb.post_mortem()
raise
if not isinstance(result, http.HttpResponse):
raise RuntimeError("Renderer returned invalid response %r" % result)
# Prepend bad json to break json script inclusion attacks.
content_type = result.get("Content-Type", 0)
if content_type and "json" in content_type.lower():
result.content = ")]}\n" + result.content
return result
def BuildToken(request, execution_time):
"""Build an ACLToken from the request."""
token = access_control.ACLToken(
username=request.user,
reason=request.REQ.get("reason", ""),
process="GRRAdminUI",
expiry=rdfvalue.RDFDatetime().Now() + execution_time)
for field in ["REMOTE_ADDR", "HTTP_X_FORWARDED_FOR"]:
remote_addr = request.META.get(field, "")
if remote_addr:
token.source_ips.append(remote_addr)
return token
def AccessDenied(message):
"""Return an access denied Response object."""
response = shortcuts.render_to_response("404.html", {"message": message})
logging.warn(message)
response.status_code = 403
stats.STATS.IncrementCounter("http_access_denied")
return response
def ServerError(unused_request, template_name="500.html"):
"""500 Error handler."""
stats.STATS.IncrementCounter("http_server_error")
response = shortcuts.render_to_response(template_name)
response.status_code = 500
return response
|
# yselect - An RPM/Yum package handling frontend.
# Copyrig | ht (C) 2006 James Bowes <jbowes@redhat.com>
# Copyright (C) 2006 Devan Goodwin <dg@fnordia.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License | as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""
Fake Repository object.
Copyright (C) 2006 James Bowes <jbowes@redhat.com>
"""
__revision__ = "$Rev$"
class Repository:
"""
Fake Repository object.
"""
def __init__(self):
pass
def getPackage(self):
"""
Pretend to download a package.
Return the location of the downloaded package.
"""
pass
def getHeader(self):
"""
Pretend to download a package header.
Return the location of the downloaded header.
"""
pass
def getPackageSack(self):
""" Return the PackageSack for this Repository. """
pass
|
# (c) 2015, Alice Ferrazzi <alice.ferrazzi@gmail.com>
#
# This file is part of Eisen
#
# Eisen is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Eisen is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Eisen. If not, see <http://www.gnu.org/licenses/>.
from sqlalchemy import *
from bin.config_module import Config
import time
def start_engine():
engine = create_engine(Config.SQLALCHEMY_DATABASE_URI,
echo=True)
metadata = MetaData(bind=engine)
return engine, metadata
def sendTaskToDb(engine, metadata, connection, task, target_host):
while t | ask.ready() is False:
time.sleep(1)
tasks_result = str(task.get())
repository_package = Table('task_result', metadata, autoload=True,
autoload_with=engine)
st | mt = repository_package.insert()
connection.execute(
stmt,
task_id=task,
task_result=tasks_result,
target_host=target_host,
).execution_options(autocommit=True)
connection.close() |
import PyOrgMode
import time
import unittest
class TestClockElement(unittest.TestCase):
def test_duration_format(self):
"""Dura | tions are formatted identically to org-mode"""
for hour in '0', '1', '5', '10', '12', '13', '19', '23':
for minute in '00', '01', '29', '40', '59':
orig_str = '%s:%s' % ( | hour, minute)
orgdate_element = PyOrgMode.OrgDate(orig_str)
formatted_str = orgdate_element.get_value()
self.assertEqual(formatted_str, orig_str)
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility functions for Windows builds.
These functions are executed via gyp-win-tool when using the ninja generator.
"""
from ctypes import windll, wintypes
import os
import shutil
import subprocess
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
def main(args):
executor = WinTool()
exit_code = executor.Dispatch(args)
if exit_code is not None:
sys.exit(exit_code)
class LinkLock(object):
"""A flock-style lock to limit the number of concurrent links to one.
Uses a session-local mutex based on the file's directory.
"""
def __enter__(self):
name = 'Local\\%s' % BASE_DIR.replace('\\', '_').replace(':', '_')
self.mutex = windll.kernel32.CreateMutexW(
wintypes.c_int(0),
wintypes.c_int(0),
wintypes.create_unicode_buffer(name))
assert self.mutex
result = windll.kernel32.WaitForSingleObject(
self.mutex, wintypes.c_int(0xFFFFFFFF))
# 0x80 means another process was killed without releasing the mutex, but
# that this process has been given ownership. This is fine for our
# purposes.
assert result in (0, 0x80), (
"%s, %s" % (result, windll.kernel32.GetLastError()))
def __exit__(self, type, value, traceback):
windll.kernel32.ReleaseMutex(self.mutex)
windll.kernel32.CloseHandle(self.mutex)
class WinTool(object):
"""This class performs all the Windows tooling steps. The methods can either
be executed directly, or dispatched from an argument list."""
def Dispatch(self, args):
"""Dispatches a string command to a method."""
if len(args) < 1:
raise Exception("Not enough arguments")
method = "Exec%s" % self._CommandifyName(args[0])
return getattr(self, method)(*args[1:])
def _CommandifyName(self, name_string):
"""Transforms a tool name like recursive-mirror to RecursiveMirror."""
return name_string.title().replace('-', '')
def _GetEnv(self, arch):
"""Gets the saved environment from a file for a given architecture."""
# The environment is saved as an "environment block" (see CreateProcess
# and msvs_emulation for details). We convert to a dict here.
# Drop last 2 NULs, one for list terminator, one for trailing vs. separator.
pairs = open(arch).read()[:-2].split('\0')
kvs = [item.split('=', 1) for item in pairs]
return dict(kvs)
def ExecStamp(self, path):
"""Simple stamp command."""
open(path, 'w').close()
def ExecRecursiveMirror(self, source, dest):
"""Emulation of rm -rf out && cp -af in out."""
if os.path.exists(dest):
if os.path.isdir(dest):
shutil.rmtree(dest)
else:
os.unlink(dest)
if os.path.isdir(source):
shutil.copytree(source, dest)
else:
shutil.copy2(source, dest)
def ExecLinkWrapper(self, arch, *args):
"""Filter diagnostic output from link that looks like:
' Creating library ui.dll.lib and object ui.dll.exp'
This happens when there are exports from the dll or exe.
"""
with LinkLock():
env = self._GetEnv(arch)
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
for line in out.splitlines():
if not line.startswith(' Creating library '):
print line
return popen.returncode
def ExecManifestWrapper(self, arch, *args):
"""Run manifest tool with environment set. Strip out undesirable warning
(some XML blocks are recognized by the OS loader, but not the manifest
tool)."""
env = self._GetEnv(arch)
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
for line in out.splitlines():
if line and 'manifest authoring warning 81010002' not in line:
print line
return popen.returncode
def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl,
*flags):
"""Filter noisy filenames output from MIDL compile step that isn't
quietable via command line flags.
"""
args = ['midl', '/nologo'] + list(flags) + [
'/out', outdir,
'/tlb', tlb,
'/h', h,
'/dlldata', dlldata,
'/iid', iid,
'/proxy', proxy,
idl]
env = self._GetEnv(arch)
popen = subpr | oce | ss.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
# Filter junk out of stdout, and write filtered versions. Output we want
# to filter is pairs of lines that look like this:
# Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.idl
# objidl.idl
lines = out.splitlines()
prefix = 'Processing '
processing = set(os.path.basename(x) for x in lines if x.startswith(prefix))
for line in lines:
if not line.startswith(prefix) and line not in processing:
print line
return popen.returncode
def ExecAsmWrapper(self, arch, *args):
"""Filter logo banner from invocations of asm.exe."""
env = self._GetEnv(arch)
# MSVS doesn't assemble x64 asm files.
if arch == 'environment.x64':
return 0
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
for line in out.splitlines():
if (not line.startswith('Copyright (C) Microsoft Corporation') and
not line.startswith('Microsoft (R) Macro Assembler') and
not line.startswith(' Assembling: ') and
line):
print line
return popen.returncode
def ExecRcWrapper(self, arch, *args):
"""Filter logo banner from invocations of rc.exe. Older versions of RC
don't support the /nologo flag."""
env = self._GetEnv(arch)
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
for line in out.splitlines():
if (not line.startswith('Microsoft (R) Windows (R) Resource Compiler') and
not line.startswith('Copyright (C) Microsoft Corporation') and
line):
print line
return popen.returncode
def ExecActionWrapper(self, arch, rspfile, *dir):
"""Runs an action command line from a response file using the environment
for |arch|. If |dir| is supplied, use that as the working directory."""
env = self._GetEnv(arch)
args = open(rspfile).read()
dir = dir[0] if dir else None
popen = subprocess.Popen(args, shell=True, env=env, cwd=dir)
popen.wait()
return popen.returncode
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
"""
Created on 5 Sep 2016
@author: Bruno Beloff (bruno.beloff@southcoastscience.com)
in /boot/config.txt
# RPi...
# Uncomment for i2c-0 & i2c-3 access (EEPROM programming)
# dtparam=i2c_vc=on
dtoverlay i2c-gpio i2c_gpio_sda=0 i2c_gpio_scl=1
"""
import time
from scs_core.sys.eeprom_image import EEPROMImage
from scs_host.bus.i2c import I2C
from scs_host.sys.host import Host
# --------------------------------------------------------------------------------------------------------------------
class CAT24C32(object):
"""
Semiconductor Components Industries CAT24C32 32-Kb Serial EEPROM
"""
SIZE = 0x1000 # 4096 bytes
__BUFFER_SIZE = 32
__TWR = 0.005 # seconds
# ----------------------------------------------------------------------------------------------------------------
@classmethod
def __read_image(cls, addr, count):
try:
I2C.EEPROM.start_tx(Host.DFE_EEPROM_ADDR)
content = I2C.EEPROM.read_cmd16(addr, count)
return EEPROMImage(content)
finally:
I2C.EEPROM.end_tx()
@classmethod
def __write_image(cls, addr, values): # max 32 values
try:
I2C.EEPROM.start_tx(Host.DFE_EEPROM_ADDR)
I2C.EEPROM.write_addr16(addr, *values)
time.sleep(cls.__TWR)
finally:
I2C.EEPROM.end_tx()
# ----------------------------------------------------------------------------------------------------------------
def __init__(self):
"""
initialise with current EEPROM contents
"""
self.__image = self.__read_image(0, CAT24C32.SIZE)
# ----------------------------------------------------------------------------------------------------------------
def write(self, image):
# ver | ify...
if len(image) != CAT24C32.SIZE:
raise ValueError("CAT2 | 4C32.write: image has incorrect length.")
addr = 0
# write...
while addr < len(image.content):
values = image.content[addr: addr + CAT24C32.__BUFFER_SIZE]
self.__write_image(addr, values)
addr += CAT24C32.__BUFFER_SIZE
# reload...
self.__image = self.__read_image(0, CAT24C32.SIZE)
# ----------------------------------------------------------------------------------------------------------------
@property
def image(self):
return self.__image
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
return "CAT24C32:{image:%s}" % self.image
|
"""
A mechanize browser that redirects specified HTTP connections to a WSGI
object.
"""
from httplib import HTTP
from mechanize import Browser as MechanizeBrowser
from wsgi_intercept.urllib2_intercept import install_opener, uninstall_opener
try:
from mechanize import HTTPHandler
except ImportError:
# pre mechanize 0.1.0 it was a separate package
# (this will break if it is combined with a newer mechanize)
from ClientCookie import HTTPHandl | er
import sys, os.path
from wsgi_intercept.urllib2_intercept import WSGI_HTTPHandler, WSGI_HTTPSHandler
class Browser(MechanizeBrowser):
"""
A version of the mechanize browser class that
installs the WSGI intercept handler
"""
handler_classes = MechanizeBrowser.handler_classes.copy()
handler_classes['http'] = WSGI_HTTPHandler
handler_classes['https'] = WSGI_HTTPSHandler
def __init__(self, *args, **kwargs):
# install WSGI intercept ha | ndler.
install(self)
MechanizeBrowser.__init__(self, *args, **kwargs)
def install(browser):
install_opener() |
"""
WSGI config for mothra project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
import sys
import site
import djcelery
djcelery.setup_loader()
project_path = '/var/www/textflows'
if project_path not in sys.path:
sys.path.append(project_path)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mothra.settings")
# This application object is used by a | ny WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply | WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application) |
rom Arraiz et al. Note: max_iter provides
an additional stop condition.
A1 : string
If A1='het', then the matrix A1 is defined as in Arraiz et
al. If A1='hom', then as in Anselin (2011). If
A1='hom_sc' (default), then as in Drukker, Egger and Prucha (2010)
and Drukker, Prucha and Raciborski (2010).
vm : boolean
If True, include variance-covariance matrix in summary
results
name_y : string
Name of dependent variable for use in output
name_x : list of strings
Names of independent variables for use in output
name_yend : list of strings
Names of endogenous variables for use in output
name_q : list of strings
Names of instruments for use in output
name_w : string
Name of weights matrix for use in output
name_ds : string
Name of dataset for use in output
Attributes
----------
summary : string
Summary of regression results and diagnostics (note: use in
conjunction with the print command)
betas : array
kx1 array of estimated coefficients
u : array
nx1 array of residuals
e_filtered : array
nx1 array of spatially filtered residuals
predy : array
nx1 array of predicted y values
n : integer
Number of observations
k : integer
Number of variables for which coefficients are estimated
(including the constant)
y : array
nx1 array for dependent variable
x : array
Two dimensional array with n rows and one column for each
independent (exogenous) variable, including the constant
yend : array
Two dimensional array with n rows and one column for each
endogenous variable
q : array
Two dimensional array with n rows and one column for each
external exogenous variable used as instruments
z : array
nxk array of variables (combination of x and yend)
h : array
nxl array of instruments (combination of x and q)
iter_stop | : string
Stop criterion reached during iteration of steps 2a and 2b
from Arraiz et al.
iteration : integer
Number of iterations of steps 2a and 2b from Arraiz et al.
mean_y : float
Mean of dependent variable
std_y : float
Standard deviation of dependent variable
vm : array
Variance covariance matrix (kxk)
pr2 | : float
Pseudo R squared (squared correlation between y and ypred)
sig2 : float
Sigma squared used in computations
std_err : array
1xk array of standard errors of the betas
z_stat : list of tuples
z statistic; each tuple contains the pair (statistic,
p-value), where each is a float
name_y : string
Name of dependent variable for use in output
name_x : list of strings
Names of independent variables for use in output
name_yend : list of strings
Names of endogenous variables for use in output
name_z : list of strings
Names of exogenous and endogenous variables for use in
output
name_q : list of strings
Names of external instruments
name_h : list of strings
Names of all instruments used in ouput
name_w : string
Name of weights matrix for use in output
name_ds : string
Name of dataset for use in output
title : string
Name of the regression method used
hth : float
H'H
Examples
--------
We first need to import the needed modules, namely numpy to convert the
data we read into arrays that ``spreg`` understands and ``pysal`` to
perform all the analysis.
>>> import numpy as np
>>> import pysal
Open data on Columbus neighborhood crime (49 areas) using pysal.open().
This is the DBF associated with the Columbus shapefile. Note that
pysal.open() also reads data in CSV format; since the actual class
requires data to be passed in as numpy arrays, the user can read their
data in using any method.
>>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
Extract the HOVAL column (home values) from the DBF file and make it the
dependent variable for the regression. Note that PySAL requires this to be
an numpy array of shape (n, 1) as opposed to the also common shape of (n, )
that other packages accept.
>>> y = np.array(db.by_col("HOVAL"))
>>> y = np.reshape(y, (49,1))
Extract INC (income) vector from the DBF to be used as
independent variables in the regression. Note that PySAL requires this to
be an nxj numpy array, where j is the number of independent variables (not
including a constant). By default this class adds a vector of ones to the
independent variables passed in.
>>> X = []
>>> X.append(db.by_col("INC"))
>>> X = np.array(X).T
In this case we consider CRIME (crime rates) is an endogenous regressor.
We tell the model that this is so by passing it in a different parameter
from the exogenous variables (x).
>>> yd = []
>>> yd.append(db.by_col("CRIME"))
>>> yd = np.array(yd).T
Because we have endogenous variables, to obtain a correct estimate of the
model, we need to instrument for CRIME. We use DISCBD (distance to the
CBD) for this and hence put it in the instruments parameter, 'q'.
>>> q = []
>>> q.append(db.by_col("DISCBD"))
>>> q = np.array(q).T
Since we want to run a spatial error model, we need to specify the spatial
weights matrix that includes the spatial configuration of the observations
into the error component of the model. To do that, we can open an already
existing gal file or create a new one. In this case, we will create one
from ``columbus.shp``.
>>> w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
Unless there is a good reason not to do it, the weights have to be
row-standardized so every row of the matrix sums to one. Among other
things, his allows to interpret the spatial lag of a variable as the
average value of the neighboring observations. In PySAL, this can be
easily performed in the following way:
>>> w.transform = 'r'
We are all set with the preliminars, we are good to run the model. In this
case, we will need the variables (exogenous and endogenous), the
instruments and the weights matrix. If we want to
have the names of the variables printed in the output summary, we will
have to pass them in as well, although this is optional.
>>> reg = GM_Endog_Error_Hom(y, X, yd, q, w=w, A1='hom_sc', name_x=['inc'], name_y='hoval', name_yend=['crime'], name_q=['discbd'], name_ds='columbus')
Once we have run the model, we can explore a little bit the output. The
regression object we have created has many attributes so take your time to
discover them. This class offers an error model that assumes
homoskedasticity but that unlike the models from
``pysal.spreg.error_sp``, it allows for inference on the spatial
parameter. Hence, we find the same number of betas as of standard errors,
which we calculate taking the square root of the diagonal of the
variance-covariance matrix:
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from __future__ import print_function
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
# REF [site] >> https://machinelearningmastery.com/how-to-visualize-filters-and-feature-maps-in-convolutional-neural-networks/
def visualize_filters_in_CNN():
# Load the model.
model = tf.keras.applications.vgg16.VGG16()
# Summarize the model.
model.summary()
# Summarize filter shapes.
for layer in model.layers:
# Check for convolutional layer.
if 'conv' not in layer.name:
continue
# Get filter weights.
filters, biases = layer.get_weights()
print(layer.name, filters.shape)
#--------------------
# Retrieve weights from the second hidden layer.
filters, biases = model.layers[1].get_weights()
# Normalize filter values to 0-1 so we can visualize them.
f_min, f_max = filters.min(), filters.max()
filters = (filters - f_min) / (f_max - f_min)
# Plot first few filters.
n_filters, ix = 6, 1
for i in range(n_filters):
# Get the filter.
f = filters[:, :, :, i]
# Plot each channel separately.
for j in range(3):
# Specify subplot and turn of axis.
ax = plt.subplot(n_filters, 3, ix)
ax.set_xticks([])
ax.set_yticks([])
# Plot filter channel in grayscale.
plt.imshow(f[:, :, j], cmap='gray')
ix += 1
# Show the figure.
plt.show()
# REF [site] >> https://machinelearningmastery.com/how-to-visualize-filters-and-feature-maps-in-convolutional-neural-networks/
def visualize_feature_maps_in_CNN():
# Load the image with the required shape.
img = tf.keras.preprocessing.image.load_img('./bird.jpg', target_size=(224, 224))
# Convert the image to an array.
img = tf.keras.preprocessing.image.img_to_array(img)
# Expand dimensions so that it represents a single 'sample'.
img = np.expand_dims(img, axis=0)
# Prepare the image (e.g. scale pixel values for the vgg).
img = tf.keras.applications.vgg16.preprocess_input(img)
#--------------------
# Load the model.
model = tf.keras.applications.vgg16.VGG16()
# Redefine model to output right after the first hidden layer.
model = tf.keras.models.Model(inputs=model.inputs, outputs=model.layers[1].output)
model.summary()
# Get feature map for first hidden layer.
feature_maps = model.predict(img)
# Plot all 64 maps in an 8x8 squares.
square = 8
ix = 1
for _ in range(square):
for _ in range(square):
# Specify subplot and turn of axis.
ax = plt.subplot(square, squa | re, ix)
ax.set_xticks([])
ax.set_yticks([])
# Plot filter channel in grayscale.
plt.imshow(feature_maps[0, :, :, ix-1], cmap='gray')
ix += 1
# Show the figure.
plt.show( | )
#--------------------
# Load the model.
model = tf.keras.applications.vgg16.VGG16()
# Redefine model to output right after hidden layers.
ixs = [2, 5, 9, 13, 17]
outputs = [model.layers[i].output for i in ixs]
model = tf.keras.models.Model(inputs=model.inputs, outputs=outputs)
# Get feature map for the hidden layers.
feature_maps = model.predict(img)
# Plot the output from each block.
square = 8
for fmap in feature_maps:
# Plot all 64 maps in an 8x8 squares.
ix = 1
for _ in range(square):
for _ in range(square):
# Specify subplot and turn of axis.
ax = plt.subplot(square, square, ix)
ax.set_xticks([])
ax.set_yticks([])
# Plot filter channel in grayscale.
plt.imshow(fmap[0, :, :, ix-1], cmap='gray')
ix += 1
# Show the figure.
plt.show()
def main():
#visualize_filters_in_CNN()
visualize_feature_maps_in_CNN()
#--------------------------------------------------------------------
if '__main__' == __name__:
main()
|
import os
import libtaxii
project = u'libtaxii'
copyright = u'2014, The MITRE Corporation'
version = libtaxii.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.ifconfig',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinxcontrib.napoleon',
]
intersphinx_mapping = {
'python': ('http://docs.python.org/', None),
}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {0}
""".format(release)
e | xclude_patterns = [
'_build',
]
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'libtaxii.tex', u'libtaxii Documentation',
u'The MITRE C | orporation', 'manual'),
]
|
__author__ = 'jatwood'
import sys
import numpy as np
from sklearn.metrics import f1_score, accuracy_score
from sklearn.linear_model import LogisticRegression
import data
import util
import kernel
import structured
from baseline_graph_experiment import GraphDecompositionModel
def graph_proportion_baseline_experiment(model_fn, data_fn, data_name, model_name, prop_valid, prop_test):
print 'Running node experiment (%s)...' % (data_name,)
A, X, Y = data_fn()
n_graphs = len(A)
A = np.asarray(A)
X = np.asarray(X)
Y = np.asarray(Y)
indices = np.arange(n_graphs)
np.random.seed(4)
np.random.shuffle(indices)
print indices
valid_start = int(n_graphs * (1 - (prop_valid + prop_test)))
test_start = int(n_graphs * (1 - prop_test))
valid_indices = indices[valid_start:test_start]
test_indices = indices[test_start:]
for train_prop in [x / 10.0 for x in range(1, 11)]:
train_end = int(valid_start * train_prop)
train_indices = indices[:train_end]
m = model_fn()
m.fit_with_validation(A, X, Y, train_indices, valid_indices)
preds = m.predict(A, X, test_indices)
actuals = Y[test_indices,:]
accuracy = accuracy_score(actuals, preds)
f1_micro = f1_score(actuals, preds, average='micro')
f1_macro = f1_score(actuals, preds, average='macro')
print 'form: name,micro_f,macro_f,accuracy'
print '###RESULTS###: %s,%s,%.6f,%.8f,%.8f,%.8f' % (data_name, model_name, train_prop, f1_micro, f1_macro, accuracy)
def graph_proportion_kernel_experiment(model, data_fn, data_name, model_name, prop_valid, prop_test):
print 'Running graph experiment (%s)...' % (data_name,)
print 'parsing data...'
A, X, Y = data_fn()
print 'done'
n_graphs = len(A)
A = np.asarray(A)
X = np.asarray(X)
Y = np.asarray(Y)
indices = np.arange(n_graphs)
np.random.seed(4)
np.random.shuffle(indices)
print indices
valid_start = int(n_graphs * (1 - (prop_valid + prop_test)))
test_start = int(n_graphs * (1 - prop_test))
valid_indices = indices[valid_start:test_start]
test_indices = indices[test_start:]
for train_prop in [x / 10.0 for x in range(1, 11)[::-1]]:
print 'train prop %s' % (train_prop,)
train_end = int(valid_start * train_prop)
train_indices = indices[:train_end]
model.fit_with_validation(Y, train_indices, valid_indices, test_indices)
preds = model.predict(Y, valid_indices, test_indices)
actuals = Y[test_indices,:]
accuracy = accuracy_score(actuals, preds)
f1_micro = f1_score(actuals, preds, average='micro')
f1_macro = f1_score(actuals, preds, average='macro')
print 'form: name,micro_f,macro_f,accuracy'
print '###RESULTS###: %s,%s,%.6f,%.8f,%.8f,%.8f' % (data_name, model_name, train_prop, f1_micro, f1_macro, accuracy)
if __name__ == '__main__':
np.random.seed()
args = sys.argv[1:]
name_to_data = {
'nci1': lambda: data.parse_nci(graph_name='nci1.graph'),
'nci109': lambda: data.parse_nci(graph_name='nci109.graph'),
'mutag': lambda : data.parse_nci(graph_name='mutag.graph'),
'ptc': lambda : data.parse_nci(graph_name='ptc.graph'),
'enzymes': lambda : data.parse_nci(graph_name='enzymes.graph'),
'nci1struct': lambda: data.parse_nci(graph_name='nci1.graph', with_structural_features=True),
'nci109struct': lambda: data.parse_nci(graph_name='nci109.graph', with_structural_features=True),
}
transform_lookup = {
'id': None,
'rwl': util.rw_laplacian,
'l': util.laplacian,
}
name_to_parameters = {
'nci1': {'num_dimensions':2,
'kernel_type':1,
'feature_type':3,
'ds_name':'nci1',
'window_size':2,
'ngram_type':0,
'sampling_type':1,
'graphlet_size':0,
'sample_size':2
},
'nci109': {'num_dimensions':5,
'kernel_type':1,
'feature_type':3,
'ds_name':'nci109',
| 'window_size':10,
'ngram_type':0,
'sampling_type':0,
'graphlet_size':0,
'sample_size':2
},
'mutag': {'num_dimensions':2,
'kernel_type':1,
'feature_type':3,
'ds_name':'mutag',
'win | dow_size':2,
'ngram_type':0,
'sampling_type':1,
'graphlet_size':0,
'sample_size':2
},
'enzymes': {'num_dimensions':2,
'kernel_type':1,
'feature_type':3,
'ds_name':'enzymes',
'window_size':2,
'ngram_type':0,
'sampling_type':1,
'graphlet_size':0,
'sample_size':2
},
'ptc': {'num_dimensions':2,
'kernel_type':1,
'feature_type':3,
'ds_name':'ptc',
'window_size':2,
'ngram_type':0,
'sampling_type':1,
'graphlet_size':0,
'sample_size':2
},
'nci1struct': {'num_dimensions':2,
'kernel_type':1,
'feature_type':3,
'ds_name':'nci1',
'window_size':2,
'ngram_type':0,
'sampling_type':1,
'graphlet_size':0,
'sample_size':2
},
'nci109struct': {'num_dimensions':5,
'kernel_type':1,
'feature_type':3,
'ds_name':'nci109',
'window_size':10,
'ngram_type':0,
'sampling_type':0,
'graphlet_size':0,
'sample_size':2
},
}
data_name = args[0]
data_fn = name_to_data[data_name]
model_name = args[1]
baseline_models = {
'logisticl1': lambda: GraphDecompositionModel(reg='l1'),
'logisticl2': lambda: GraphDecompositionModel(reg='l2')
}
kernel_models = {
'deepwl': kernel.DeepWL(**name_to_parameters[data_name]),
}
data_name = args[0]
data_fn = name_to_data[data_name]
model_name = args[1]
if model_name in baseline_models:
graph_proportion_baseline_experiment(baseline_models[model_name], data_fn, data_name, model_name, 0.1, 0.1)
elif model_name in kernel_models:
graph_proportion_kernel_experiment(kernel_models[model_name], data_fn, data_name, model_name, 0.1, 0.1)
else:
print '%s not recognized' % (model_name,)
|
"""CS 234 Assignment 2 Question 1a - Testing assumptions: Stock.py
"""
class Stock:
"""
A data type representing a single stock
Fields: name - str: the company name as a string
symbol - str: a string uniquely identifying the stock
price - non-negative float: last/current price
low - non-negative float: lowest price of the day
high - non-negative float: highest price of the day
volume - non-negative int: number of shares traded
Floats are represented to two decimal places.
The constraint low <= price <= high is always satisfied.
"""
def __init__(self, aName, aSymbol, price = 0.0,\
low = 0.0, high = 0.0, volume = 0):
"""
Preconditions: aName and aSymbol are strs
Assumptions: if supplied price, low and high are non-negative floats
if supplied volume is a non-negative int
Strings aName and aSymbol cannot be empty
Variables aName and aSymbol have to be Strings
It is assumed that: low < price < high
| Postconditions: construct a stock data type with aName and aSymbol
"""
if type(aSymbol) != type("s"):
raise TypeError, "The input symbol must be a string!"
if len(aSymbol) == 0:
raise TypeError, "The input symbol cannot be empty!"
elif type(aName) != type("s"):
raise T | ypeError, "The input name must be a string!"
if len(aName) == 0:
raise TypeError, "The input name cannot be empty!"
elif (price < 0) or (low < 0) or (high < 0) or (volume < 0):
raise ValueError, "Price, low, high and volume inputs must be non-negative!"
elif (low <= price) == False:
raise ValueError, "Low must be lower than price!"
elif (price <= high) == False:
raise ValueError, "Price must be lower than high!"
elif (low <= high) == False:
raise ValueError, "Low must be lower than high!"
elif (type(price) != type(0.0)) or (type(low) != type(0.0)) or (type(high) != type(0.0)):
raise ValueError, "Price, low and high inputs must be float inputs!"
self.name = aName
self.symbol = aSymbol
self.price = price
self.low = low
self.high = high
self.volume = volume
def __repr__(self):
"""
Postcondition: return a string representation of Stock
"""
return "Stock(%s, %s, %.2f, %.2f, %.2f, %d)" % \
(self.name, self.symbol, self.price, \
self.low, self.high, self.volume)
def __eq__(self, rhs):
"""
Precondition: rhs is another Stock
Postcondition: returns True iff both stock symbols are identical
"""
return self.symbol == rhs.symbol
def __ne__(self, rhs):
"""
Precondition: rhs is another Stock
Postcondition: returns True iff both stock symbols are different
"""
return not(self==rhs)
|
#!/usr/bin/env python
"""
DragonPy - base memory info
~~~~~~~~~~~~~~~~~~~~~~~~~~~
:created: 2013 by Jens Diemer - www.jensdiemer.de
:copyleft: 2013 by the MC6809 team, see AUTHORS for more details.
:license: GNU GPL v3 or above, see LICENSE for more details.
"""
import sys
class BaseMemoryInfo:
def __init__(self, out_func):
self.out_func = out_func
def get_shortest(self, addr):
shortest = None
size = sys.maxsize
for start, end, txt in self.MEM_INFO:
if not start <= addr <= end:
continue
current_size = abs(end - start)
if current_size < size:
size = current_size
| shortest = start, end, txt
if shortest is None:
return f"${addr:x}: UNKNOWN"
start, end, txt = shortest
| if start == end:
return f"${addr:x}: {txt}"
else:
return f"${addr:x}: ${start:x}-${end:x} - {txt}"
def __call__(self, addr, info="", shortest=True):
if shortest:
mem_info = self.get_shortest(addr)
if info:
self.out_func(f"{info}: {mem_info}")
else:
self.out_func(mem_info)
return
mem_info = []
for start, end, txt in self.MEM_INFO:
if start <= addr <= end:
mem_info.append(
(start, end, txt)
)
if not mem_info:
self.out_func(f"{info} ${addr:x}: UNKNOWN")
else:
self.out_func(f"{info} ${addr:x}:")
for start, end, txt in mem_info:
if start == end:
self.out_func(f" * ${start:x} - {txt}")
else:
self.out_func(f" * ${start:x}-${end:x} - {txt}")
|
on(DataStorePutException)
def _put(self, key, value):
return self.result_store.put(key, value)
@_wrapped_operation(ScheduleAddException)
def _add_schedule(self, data, ts):
if self.schedule is None:
raise AttributeError('Schedule not specified.')
self.schedule.add(data, ts)
@_wrapped_operation(ScheduleReadException)
def _read_schedule(self, ts):
if self.schedule is None:
raise AttributeError('Schedule not specified.')
return self.schedule.read(ts)
def emit(self, message):
"""Events should always fail silently."""
try:
self.events.emit(message)
except:
pass
def enqueue(self, task):
if self.always_eager:
return task.execute()
self._write(registry.get_message_for_task(task))
if self.result_store:
return AsyncData(self, task)
def dequeue(self):
message = self._read()
if message:
return registry.get_task_for_message(message)
def _format_time(self, dt):
if dt is None:
return None
return time.mktime(dt.timetuple())
def emit_task(self, status, task, error=False):
if self.events:
message_data = {'status': status}
message_data.update({
'id': task.task_id,
'task': type(task).__name__,
'retries': task.retries,
'retry_delay': task.retry_delay,
'execute_time': self._format_time(task.execute_time),
'error': error})
if error:
message_data['traceback'] = traceback.format_exc()
self.emit(json.dumps(message_data))
def execute(self, task):
if not isinstance(task, QueueTask):
raise TypeError('Unknown object: %s' % task)
result = task.execute()
if result is None and not self.store_none:
return
if self.result_store and not isinstance(task, PeriodicQueueTask):
self._put(task.task_id, pickle.dumps(result))
return result
def revoke(self, task, revoke_until=None, revoke_once=False):
if not self.result_store:
raise QueueException('A DataStore is required to revoke task')
serialized = pickle.dumps((revoke_until, revoke_once))
self._put(task.revoke_id, serialized)
def restore(self, task):
self._get(task.revoke_id) # simply get and delete if there
def is_revoked(self, task, dt=None, peek=True):
if not self.result_store:
return False
res = self._get(task.revoke_id, peek=True)
if res is EmptyData:
return False
revoke_until, revoke_once = pickle.loads(res)
if revoke_once:
# This task *was* revoked for one run, but now it should be
# restored to normal execution.
if not peek:
self | .restore(task)
return True
| return revoke_until is None or revoke_until > dt
def add_schedule(self, task):
msg = registry.get_message_for_task(task)
ex_time = task.execute_time or datetime.datetime.fromtimestamp(0)
self._add_schedule(msg, ex_time)
def read_schedule(self, ts):
return [
registry.get_task_for_message(m) for m in self._read_schedule(ts)]
def flush(self):
self.queue.flush()
def ready_to_run(self, cmd, dt=None):
dt = dt or datetime.datetime.utcnow()
return cmd.execute_time is None or cmd.execute_time <= dt
class AsyncData(object):
def __init__(self, huey, task):
self.huey = huey
self.task = task
self._result = EmptyData
def _get(self):
task_id = self.task.task_id
if self._result is EmptyData:
res = self.huey._get(task_id)
if res is not EmptyData:
self._result = pickle.loads(res)
return self._result
else:
return res
else:
return self._result
def get(self, blocking=False, timeout=None, backoff=1.15, max_delay=1.0,
revoke_on_timeout=False):
if not blocking:
res = self._get()
if res is not EmptyData:
return res
else:
start = time.time()
delay = .1
while self._result is EmptyData:
if timeout and time.time() - start >= timeout:
if revoke_on_timeout:
self.revoke()
raise DataStoreTimeout
if delay > max_delay:
delay = max_delay
if self._get() is EmptyData:
time.sleep(delay)
delay *= backoff
return self._result
def revoke(self):
self.huey.revoke(self.task)
def restore(self):
self.huey.restore(self.task)
def with_metaclass(meta, base=object):
return meta("NewBase", (base,), {})
class QueueTaskMetaClass(type):
def __init__(cls, name, bases, attrs):
"""
Metaclass to ensure that all task classes are registered
"""
registry.register(cls)
class QueueTask(with_metaclass(QueueTaskMetaClass)):
"""
A class that encapsulates the logic necessary to 'do something' given some
arbitrary data. When enqueued with the :class:`Huey`, it will be
stored in a queue for out-of-band execution via the consumer. See also
the :meth:`task` decorator, which can be used to automatically
execute any function out-of-band.
Example::
class SendEmailTask(QueueTask):
def execute(self):
data = self.get_data()
send_email(data['recipient'], data['subject'], data['body'])
huey.enqueue(
SendEmailTask({
'recipient': 'somebody@spam.com',
'subject': 'look at this awesome website',
'body': 'http://youtube.com'
})
)
"""
def __init__(self, data=None, task_id=None, execute_time=None, retries=0,
retry_delay=0):
self.set_data(data)
self.task_id = task_id or self.create_id()
self.revoke_id = 'r:%s' % self.task_id
self.execute_time = execute_time
self.retries = retries
self.retry_delay = retry_delay
def create_id(self):
return str(uuid.uuid4())
def get_data(self):
return self.data
def set_data(self, data):
self.data = data
def execute(self):
"""Execute any arbitary code here"""
raise NotImplementedError
def __eq__(self, rhs):
return (
self.task_id == rhs.task_id and
self.execute_time == rhs.execute_time and
type(self) == type(rhs))
class PeriodicQueueTask(QueueTask):
def create_id(self):
return registry.task_to_string(type(self))
def validate_datetime(self, dt):
"""Validate that the task should execute at the given datetime"""
return False
def create_task(task_class, func, retries_as_argument=False, task_name=None,
include_task=False, **kwargs):
def execute(self):
args, kwargs = self.data or ((), {})
if retries_as_argument:
kwargs['retries'] = self.retries
if include_task:
kwargs['task'] = self
return func(*args, **kwargs)
attrs = {
'execute': execute,
'__module__': func.__module__,
'__doc__': func.__doc__
}
attrs.update(kwargs)
klass = type(
task_name or 'queuecmd_%s' % (func.__name__),
(task_class,),
attrs
)
return klass
dash_re = re.compile('(\d+)-(\d+)')
every_re = re.compile('\*\/(\d+)')
def crontab(month='*', day='*', day_of_week='*', hour='*', minute='*'):
"""
Convert a "crontab"-style set of parameters into a test function that will
return True when the given datetime matches the parameters set forth in
the crontab.
Acceptable inputs:
* = every distinct value
*/n = run every "n" times, i.e. hours='*/4' == 0 |
accordingly.
"""
gziptext_cache = GzipTextFileCache(
cachedir=os.path.join(tempfile.gettempdir(), 'pydov_tests_error'),
max_age=datetime.timedelta(seconds=0.1))
gziptext_cache.remove()
orig_cache = pydov.cache
pydov.cache = gziptext_cache
yield
gziptext_cache.remove()
pydov.cache = orig_cache
@pytest.fixture
def test_hook_count():
"""PyTest fixture temporarily disabling default hooks and installing
HookCounter."""
orig_hooks = pydov.hooks
pydov.hooks = Hooks(
(HookCounter(),)
)
yield
pydov.hooks = orig_hooks
class TestNoXDOV(object):
"""Class grouping tests related failing DOV services."""
@pytest.mark.online
@pytest.mark.skipif(not ServiceCheck.service_ok(),
reason="DOV service is unreachable")
def test_do_not_cache_error(self):
"""Test whether the 404 error page does not end up being cached."""
bs = BoringSearch(objecttype=pydov.types.boring.Boring)
bs.search(query=PropertyIsEqualTo(
'pkey_boring', build_dov_url('data/boring/2004-103984')))
assert not os.path.exists(os.path.join(
pydov.cache.cachedir, 'boring', '2004-103984.xml.gz'
))
@pytest.mark.online
@pytest.mark.skipif(not ServiceCheck.service_ok(),
reason="DOV service is unreachable")
def test_do_not_overwrite_stale_cache(self):
"""Test whether a stale copy of the data which exists in the cache is
not overwritten by the 404 error page."""
bs = BoringSearch(objecttype=pydov.types.boring.Boring)
testdata_path = os.path.join(
'tests', 'data', 'types', 'boring', 'boring.xml')
cache_path = os.path.join(
pydov.cache.cachedir, 'boring', '2004-103984.xml.gz'
)
os.makedirs(os.path.dirname(cache_path))
with open(testdata_path, 'r') as testdata:
with gzip.open(cache_path, 'wb') as cached_data:
cached_data.write(testdata.read().encode('utf8'))
time.sleep(0.5)
bs.search(query=PropertyIsEqualTo(
'pkey_boring', build_dov_url('data/boring/2004-103984')))
with gzip.open(cache_path, 'rb') as cached_data:
assert 'GEO-04/169-BNo-B1' in cached_data.read().decode('utf8')
@pytest.mark.online
@pytest.mark.skipif(not ServiceCheck.service_ok(),
reason="DOV service is unreachable")
def test_stale_warning(self):
"""Test whether a stale version of the data from the cache is used in
case of a service error, and if a warning is issued to the user."""
bs = BoringSearch(objecttype=pydov.types.boring.Boring)
testdata_path = os.path.join(
'tests', 'data', 'types', 'boring', 'boring.xml')
cache_path = os.path.join(
pydov.cache.cachedir, 'boring', '2004-103984.xml.gz'
)
os.makedirs(os.path.dirname(cache_path))
with open(testdata_path, 'r') as testdata:
with gzip.open(cache_path, 'wb') as cached_data:
cached_data.write(testdata.read().encode('utf8'))
time.sleep(0.5)
with pytest.warns(XmlStaleWarning):
df = bs.search(query=PropertyIsEqualTo(
'pkey_boring', build_dov_url('data/boring/2004-103984')))
assert not df.iloc[0].boorgatmeting
assert df.iloc[0].boormethode == 'spade'
@pytest.mark.online
@pytest.mark.skipif(not ServiceCheck.service_ok(),
reason="DOV service is unreachable")
def test_stale_disabled(self):
"""Test whether no stale version of the data from the cache is used
when disabled, and if a warning is issued to the user."""
pydov.cache.stale_on_error = False
bs = BoringSearch(objecttype=pydov.types.boring.Boring)
testdata_path = os.path.join(
'tests', 'data', 'types', 'boring', 'boring.xml')
cache_path = os.path.join(
pydov.cache.cachedir, 'boring', '2004-103984.xml.gz'
)
os.makedirs(os.path.dirname(cache_path))
with open(testdata_path, 'r') as testdata:
with gzip.open(cache_path, 'wb') as cached_data:
cached_data.write(testdata.read().encode('utf8'))
time.sleep(0.5)
with pytest.warns(XmlFetchWarning):
df = bs.search(query=PropertyIsEqualTo(
'pkey_boring', build_dov_url('data/boring/2004-103984')))
assert np.isnan(df.iloc[0].boorgatmeting)
assert np.isnan(df.iloc[0].boormethode)
@pytest.mark.online
@pytest.mark.skipif(not ServiceCheck.service_ok(),
reason="DOV service is unreachable")
def test_wfs_data_present(self):
"""Test whether data available in the WFS is present in the dataframe
in case of a service error in XDOV."""
bs = BoringSearch(objecttype=pydov.types.boring.Boring)
df = bs.search(query=PropertyIsEqualTo(
'pkey_boring', build_dov_url('data/boring/2016-122561')))
assert df.iloc[0].gemeente == 'Wortegem-Petegem'
@pytest.mark.online
@pytest.mark.skipif(not ServiceCheck.service_ok(),
reason="DOV service is unreachable")
def test_nan_and_fetch_warning(self):
"""Test whether the XML data is set tot NaN in case of an error and
no stale cache is available. Also test if a warning is given to the
user."""
bs = BoringSearch(objecttype=pydov.types.boring.Boring)
with pytest.warns(XmlFetchWarning):
df = bs.search(query=PropertyIsEqualTo(
'pkey_boring', build_dov_url('data/boring/2016-122561')))
assert np.isnan(df.iloc[0].boorgatmeting)
@pytest.mark.online
@pytest.mark.skipif(not ServiceCheck.service_ok(),
reason="DOV service is unreachable")
def test_no_xsd_warning(self):
"""Test whether the metadata can still be retrieved, a | nd that the
XSD values are unavailable. Also test if a warning is given to the
user."""
with pytest.warns(XsdFetchWarning):
gwf = GrondwaterFilterSearch(
objecttype=pydov.types.grondwaterfilter.GrondwaterFilter)
f | ields = gwf.get_fields()
assert 'values' not in fields['aquifer_code']
@pytest.mark.online
@pytest.mark.skipif(not ServiceCheck.service_ok(),
reason="DOV service is unreachable")
def test_no_xsd_wfs_only(self):
"""Test whether the WFS data is available, even if XSD schemas cannot
be resolved."""
gwf = GrondwaterFilterSearch(
objecttype=pydov.types.grondwaterfilter.GrondwaterFilter)
df = gwf.search(max_features=1)
assert df.iloc[0].pkey_filter is not None
@pytest.mark.online
@pytest.mark.skipif(not ServiceCheck.service_ok(),
reason="DOV service is unreachable")
def test_hooks_fetch_error(self, test_hook_count):
"""Test if the correct hooks are fired when the XML fails to be
fetched from DOV.
Parameters
----------
test_hook_count : pytest.fixture
Fixture removing default hooks and installing HookCounter.
"""
bs = BoringSearch(objecttype=pydov.types.boring.Boring)
bs.search(query=PropertyIsEqualTo(
'pkey_boring', build_dov_url('data/boring/2004-103984')))
assert pydov.hooks[0].count_wfs_search_init == 1
assert pydov.hooks[0].count_wfs_search_result == 1
assert pydov.hooks[0].count_wfs_search_result_received == 1
assert pydov.hooks[0].count_xml_received == 0
assert pydov.hooks[0].count_xml_cache_hit == 0
assert pydov.hooks[0].count_xml_downloaded == 0
assert pydov.hooks[0].count_xml_stale_hit == 0
assert pydov.hooks[0].count_xml_fetch_error == 1
assert pydov.hooks[0].count_meta_received > 0
assert pydov.hooks[0].count_inject_meta_response > 0
@pytest.mark.online
@pytest.mark.skipif(not ServiceCheck.service_ok(),
|
#!/usr/bin/env python3
from AbstractAPIReader import AbstractAPIReader
from Smartvel import Smartvel
from smartvel_auth import TOKEN
class APIReaderSmartvel(AbstractAPIReader):
def __init__(self, token=TOKEN, endpoint='events'):
self._token = token
self._endpoint = endpoint
def get | _it | erable(self):
return Smartvel(self._token, self._endpoint)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from os.path import splitext
import yaml
def build_plan(discovery_result, config=None):
""" Extract content and determine which checks need to be run
Checks are assigned in the following order:
1. <filename>.fval
2. <dir>.fval
3. default config
Note: As soon as a matching set of tests is found, the build is complete.
Merging/inheritance is not currently supported.
Args:
discovery_result: The paths discovered
config: Options provided in configuration file and
through command line options
Returns:
list: The checks to perform against the discovered paths
"""
logger = config['logger']
plan = list()
for item in discovery_result:
fval_path = item.get('fval_path')
dir_fval_path = item.get('dir_fval_path')
unit_path = item.get('unit_path')
fval_dir_config = None
unit_ext = splitext(unit_path)[1]
unit_tests = list()
# If a 'unit specific' fval file was found, then loads its checks
if fval_path:
try:
with open(fval_path.encode('utf-8')) as fval_file:
cf_yaml_content = yaml.load(fval_file.read())
unit_tests = cf_yaml_content
except IOError:
logger.debug('No unit specific file found for: {0}'.format(fval_path))
# If a fval file was foun | d in the directory, then | load its checks
elif dir_fval_path:
try:
with open(
dir_fval_path.encode(
'utf-8'), 'r') as dir_fval_file:
fval_dir_config = yaml.load(dir_fval_file.read())
except IOError:
logger.debug('No dir specific file found for: {0}'.format(fval_path))
except:
raise
# If no tests (specific to the unit or the directory) are found,
# then fall back to master config
if not unit_tests and fval_dir_config and fval_dir_config.get('mappings'):
# CHECK IF UNIT FILE HAS RECOGNISED EXTENSION IN MAPPINGS
matching_templates = list()
for mapping in fval_dir_config.get('mappings'):
if mapping.get('extension') == unit_ext[1:]:
matching_templates = mapping.get('templates')
# IF MATCHING TEMPLATES WERE FOUND, THEN EXTRACT THE CHECKS
if matching_templates:
extracted_checks = dict()
for matching_template in matching_templates:
if matching_template in fval_dir_config.get('templates'):
matched_template = fval_dir_config.get('templates').get(matching_template)
extracted_checks.update(matched_template)
unit_tests = extracted_checks
elif not unit_tests and (config.get('all') or dir_fval_path) and config.get('mappings'):
# CHECK IF UNIT FILE HAS RECOGNISED EXTENSION IN MAPPINGS
matching_templates = list()
for mapping in config.get('mappings'):
if mapping.get('extension') == unit_ext[1:]:
matching_templates = mapping.get('templates')
# IF MATCHING TEMPLATES WERE FOUND, THEN EXTRACT THE CHECKS
if matching_templates:
extracted_checks = dict()
for matching_template in matching_templates:
if config.get('templates') and matching_template in config.get('templates'):
matched_template = config.get('templates').get(matching_template)
extracted_checks.update(matched_template)
unit_tests = extracted_checks
if unit_tests:
plan.append(dict(unit_path=unit_path, unit_checks=unit_tests))
return plan
|
import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('crystal_snake_hooded')
mobileTemplate.setLevel(62)
mobileTemplate.setDifficulty(Difficulty.NORMAL)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(False)
mobileTemplate.setScale(1)
mobileTemplate.setMeatType("Carnivore Meat")
mobileTemplate.setMeatAmount(5)
mobileTemplate.setHideType("Scaley Hide")
mobileTemplate.setHideAmount(2)
mobileTemplate.setSocialGroup("crystal snake")
mobileTemplate.setAssistRange(12)
mobileTemplate.setStalker(False)
mobileTemplate.setOptionsBitmask(Options.AGGRESSIVE | Options.ATTACKABLE)
templates = Vector()
templates.add('object/mobile/shared_crystal_snake.iff')
mobileTemplate.setTempl | ates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/melee/unarmed/shared_unarmed_default.iff', WeaponType.UNARMED, 1.0, 6, 'kinetic')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
attacks.add('bm_bite_4')
attacks.add('bm_hamstring_4')
attacks.add('bm_puncture_2')
mobileTemplate.setDefaultAttack('creatureMeleeAttack')
mobileT | emplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('hooded_crystal_snake', mobileTemplate)
return |
l(-3, -7)) == "3.0/7.0"
assert rcode(x + Rational(3, 7)) == "x + 3.0/7.0"
assert rcode(Rational(3, 7)*x) == "(3.0/7.0)*x"
def test_rcode_Integer():
assert rcode(Integer(67)) == "67"
assert rcode(Integer(-1)) == "-1"
def test_rcode_functions():
assert rcode(sin(x) ** cos(x)) == "sin(x)^ | cos(x)"
assert rcode | (factorial(x) + gamma(y)) == "factorial(x) + gamma(y)"
assert rcode(beta(Min(x, y), Max(x, y))) == "beta(min(x, y), max(x, y))"
def test_rcode_inline_function():
x = symbols('x')
g = implemented_function('g', Lambda(x, 2*x))
assert rcode(g(x)) == "2*x"
g = implemented_function('g', Lambda(x, 2*x/Catalan))
assert rcode(
g(x)) == "Catalan = %s;\n2*x/Catalan" % Catalan.n()
A = IndexedBase('A')
i = Idx('i', symbols('n', integer=True))
g = implemented_function('g', Lambda(x, x*(1 + x)*(2 + x)))
res=rcode(g(A[i]), assign_to=A[i])
ref=(
"for (i in 1:n){\n"
" A[i] = (A[i] + 1)*(A[i] + 2)*A[i];\n"
"}"
)
assert res == ref
def test_rcode_exceptions():
assert rcode(ceiling(x)) == "ceiling(x)"
assert rcode(Abs(x)) == "abs(x)"
assert rcode(gamma(x)) == "gamma(x)"
def test_rcode_user_functions():
x = symbols('x', integer=False)
n = symbols('n', integer=True)
custom_functions = {
"ceiling": "myceil",
"Abs": [(lambda x: not x.is_integer, "fabs"), (lambda x: x.is_integer, "abs")],
}
assert rcode(ceiling(x), user_functions=custom_functions) == "myceil(x)"
assert rcode(Abs(x), user_functions=custom_functions) == "fabs(x)"
assert rcode(Abs(n), user_functions=custom_functions) == "abs(n)"
def test_rcode_boolean():
assert rcode(True) == "True"
assert rcode(S.true) == "True"
assert rcode(False) == "False"
assert rcode(S.false) == "False"
assert rcode(x & y) == "x & y"
assert rcode(x | y) == "x | y"
assert rcode(~x) == "!x"
assert rcode(x & y & z) == "x & y & z"
assert rcode(x | y | z) == "x | y | z"
assert rcode((x & y) | z) == "z | x & y"
assert rcode((x | y) & z) == "z & (x | y)"
def test_rcode_Relational():
from sympy import Eq, Ne, Le, Lt, Gt, Ge
assert rcode(Eq(x, y)) == "x == y"
assert rcode(Ne(x, y)) == "x != y"
assert rcode(Le(x, y)) == "x <= y"
assert rcode(Lt(x, y)) == "x < y"
assert rcode(Gt(x, y)) == "x > y"
assert rcode(Ge(x, y)) == "x >= y"
def test_rcode_Piecewise():
expr = Piecewise((x, x < 1), (x**2, True))
res=rcode(expr)
ref="ifelse(x < 1,x,x^2)"
assert res == ref
tau=Symbol("tau")
res=rcode(expr,tau)
ref="tau = ifelse(x < 1,x,x^2);"
assert res == ref
expr = 2*Piecewise((x, x < 1), (x**2, x<2), (x**3,True))
assert rcode(expr) == "2*ifelse(x < 1,x,ifelse(x < 2,x^2,x^3))"
res = rcode(expr, assign_to='c')
assert res == "c = 2*ifelse(x < 1,x,ifelse(x < 2,x^2,x^3));"
# Check that Piecewise without a True (default) condition error
#expr = Piecewise((x, x < 1), (x**2, x > 1), (sin(x), x > 0))
#raises(ValueError, lambda: rcode(expr))
expr = 2*Piecewise((x, x < 1), (x**2, x<2))
assert(rcode(expr))== "2*ifelse(x < 1,x,ifelse(x < 2,x^2,NA))"
def test_rcode_sinc():
from sympy import sinc
expr = sinc(x)
res = rcode(expr)
ref = "ifelse(x != 0,sin(x)/x,1)"
assert res == ref
def test_rcode_Piecewise_deep():
p = rcode(2*Piecewise((x, x < 1), (x + 1, x < 2), (x**2, True)))
assert p == "2*ifelse(x < 1,x,ifelse(x < 2,x + 1,x^2))"
expr = x*y*z + x**2 + y**2 + Piecewise((0, x < 0.5), (1, True)) + cos(z) - 1
p = rcode(expr)
ref="x^2 + x*y*z + y^2 + ifelse(x < 0.5,0,1) + cos(z) - 1"
assert p == ref
ref="c = x^2 + x*y*z + y^2 + ifelse(x < 0.5,0,1) + cos(z) - 1;"
p = rcode(expr, assign_to='c')
assert p == ref
def test_rcode_ITE():
expr = ITE(x < 1, y, z)
p = rcode(expr)
ref="ifelse(x < 1,y,z)"
assert p == ref
def test_rcode_settings():
raises(TypeError, lambda: rcode(sin(x), method="garbage"))
def test_rcode_Indexed():
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
n, m, o = symbols('n m o', integer=True)
i, j, k = Idx('i', n), Idx('j', m), Idx('k', o)
p = RCodePrinter()
p._not_r = set()
x = IndexedBase('x')[j]
assert p._print_Indexed(x) == 'x[j]'
A = IndexedBase('A')[i, j]
assert p._print_Indexed(A) == 'A[i, j]'
B = IndexedBase('B')[i, j, k]
assert p._print_Indexed(B) == 'B[i, j, k]'
assert p._not_r == set()
def test_rcode_Indexed_without_looking_for_contraction():
len_y = 5
y = IndexedBase('y', shape=(len_y,))
x = IndexedBase('x', shape=(len_y,))
Dy = IndexedBase('Dy', shape=(len_y-1,))
i = Idx('i', len_y-1)
e=Eq(Dy[i], (y[i+1]-y[i])/(x[i+1]-x[i]))
code0 = rcode(e.rhs, assign_to=e.lhs, contract=False)
assert code0 == 'Dy[i] = (y[%s] - y[i])/(x[%s] - x[i]);' % (i + 1, i + 1)
def test_rcode_loops_matrix_vector():
n, m = symbols('n m', integer=True)
A = IndexedBase('A')
x = IndexedBase('x')
y = IndexedBase('y')
i = Idx('i', m)
j = Idx('j', n)
s = (
'for (i in 1:m){\n'
' y[i] = 0;\n'
'}\n'
'for (i in 1:m){\n'
' for (j in 1:n){\n'
' y[i] = A[i, j]*x[j] + y[i];\n'
' }\n'
'}'
)
c = rcode(A[i, j]*x[j], assign_to=y[i])
assert c == s
def test_dummy_loops():
# the following line could also be
# [Dummy(s, integer=True) for s in 'im']
# or [Dummy(integer=True) for s in 'im']
i, m = symbols('i m', integer=True, cls=Dummy)
x = IndexedBase('x')
y = IndexedBase('y')
i = Idx(i, m)
expected = (
'for (i_%(icount)i in 1:m_%(mcount)i){\n'
' y[i_%(icount)i] = x[i_%(icount)i];\n'
'}'
) % {'icount': i.label.dummy_index, 'mcount': m.dummy_index}
code = rcode(x[i], assign_to=y[i])
assert code == expected
def test_rcode_loops_add():
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
n, m = symbols('n m', integer=True)
A = IndexedBase('A')
x = IndexedBase('x')
y = IndexedBase('y')
z = IndexedBase('z')
i = Idx('i', m)
j = Idx('j', n)
s = (
'for (i in 1:m){\n'
' y[i] = x[i] + z[i];\n'
'}\n'
'for (i in 1:m){\n'
' for (j in 1:n){\n'
' y[i] = A[i, j]*x[j] + y[i];\n'
' }\n'
'}'
)
c = rcode(A[i, j]*x[j] + x[i] + z[i], assign_to=y[i])
assert c == s
def test_rcode_loops_multiple_contractions():
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
n, m, o, p = symbols('n m o p', integer=True)
a = IndexedBase('a')
b = IndexedBase('b')
y = IndexedBase('y')
i = Idx('i', m)
j = Idx('j', n)
k = Idx('k', o)
l = Idx('l', p)
s = (
'for (i in 1:m){\n'
' y[i] = 0;\n'
'}\n'
'for (i in 1:m){\n'
' for (j in 1:n){\n'
' for (k in 1:o){\n'
' for (l in 1:p){\n'
' y[i] = a[i, j, k, l]*b[j, k, l] + y[i];\n'
' }\n'
' }\n'
' }\n'
'}'
)
c = rcode(b[j, k, l]*a[i, j, k, l], assign_to=y[i])
assert c == s
def test_rcode_loops_addfactor():
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
n, m, o, p = symbols('n m o p', integer=True)
a = IndexedBase('a')
b = IndexedBase('b')
c = IndexedBase('c')
y = IndexedBase('y')
i = Idx('i', m)
j = Idx('j', n)
k = Idx('k', o)
l = Idx('l', p)
s = (
'for (i in 1:m){\n'
' y[i] = 0;\n'
'}\n'
'for (i in 1:m){\n'
' for (j in 1:n){\n'
' for (k in 1:o){\n'
' for (l in 1:p){\n'
' y[i] = (a[i, j, k, l] + b[i, j, k, l])*c[j, k, l] + y[i];\n'
' }\n'
' }\n'
' }\n'
'}'
)
c = rcode((a[i, j, k, l] + b[i, j, k, l])*c[j, k, l], assign_to=y[i])
assert c == s
def test_rcode_loops_multiple_terms():
from sympy.tensor import Inde |
from corrdb.common.models import UserModel
from corrdb.common.models import ProfileModel
from corrdb.common import get_or_create
import hashlib
import datetime
import simplejson as json
import os
import re
def password_check(password):
"""
Verify the strength of 'password'
Returns a dict indicating the wrong criteria
A password is considered strong if:
12 characters length or more
1 digit or more
1 symbol or more
1 uppercase letter or more
1 lowercase letter or more
"""
# calculating the length
length_error = len(password) < 12
# searching for digits
digit_error = re.search(r"\d", password) is None
# searching for uppercase
uppercase_error = re.search(r"[A-Z]", password) is None
# searching for lowercase
lowercase_error = re.search(r"[a-z]", password) is None
# searching for symbols
# ]\;',./!@#$%^&*()_+-=
symbol_error = not any(i in "]\;',./!@#$%^&*()_+-=]" for i in password)
# overall result
password_ok = not ( length_error or digit_error or uppercase_error or lowercase_error or symbol_error )
return password_ok
def check_admin(email=None):
"""
Check that admin account does not already exist
Returns boolean to indicate if it is true or false
"""
if email:
account = UserModel.objects(email=email).first()
if account and account.group == "admin":
return True
else:
admin = UserModel.objects(group="admin").first()
if admin:
# We only want to allow the creation of one admin
# Only the original admin can add new admins.
# Once created another admin cannot be added this way
# for security purposes.
print("Admins already exist!")
return True
else:
return False
else:
# Fake admin existence to avoid attempt to create admin with void email.
return True
def create_admin(email, password, fname, lname):
"""
Creates the first admin user
Returns boolean to indicate if the account was created or not
"""
if not password_check(password):
return False
else:
hash_pwd = hashlib.sha256(('CoRRPassword_%s'%password).encode("ascii")).hexdigest()
(account, created) = get_or_create(document=UserModel, created_at=str(datetime.datetime.utcnow()), email=email, group='admin', api_token=hashlib.sha256(('CoRRToken_%s_%s'%(email, str(datetime.datetime.utcnow()))).encode("ascii")).hexdi | gest())
if created:
account.password = hash_pwd
account.save()
(profile_model, created) = get_or_create(document=ProfileModel, created | _at=str(datetime.datetime.utcnow()), user=account, fname=fname, lname=lname)
if created:
return True
else:
return False
else:
return False
content = {}
# Loading admin user account information.
# The instance admin should make sure to securely backup this file.
if os.path.isfile("/home/corradmin/credentials/tmp_admin.json"):
with open("/home/corradmin/credentials/tmp_admin.json", "r") as admin_stuff:
content = json.loads(admin_stuff.read())
try:
if not check_admin(content['admin-email']):
print("Creating an admin account!")
create_admin(content['admin-email'], content['admin-password'], content['admin-fname'], content['admin-lname'])
except:
print("An error occured!")
else:
with open("/tmp/tmp_admin.json", "r") as admin_stuff:
content = json.loads(admin_stuff.read())
try:
if not check_admin(content['admin-email']):
print("Creating an admin account!")
create_admin(content['admin-email'], content['admin-password'], content['admin-fname'], content['admin-lname'])
except:
print("An error occured!")
|
"""
Copyright (c) 2012-2020 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT | ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from base_console import BaseConsole
from share_iscsi_console import ShareIscsiConsole
from rest_util import api_call
class IscsiConsole(BaseConsole):
| def __init__(self, prompt):
BaseConsole.__init__(self)
self.prompt = prompt + " Iscsi>"
self.url = BaseConsole.url + "sm/services/iscsi/"
def do_status(self, args):
iscsi_info = api_call(self.url)
print(iscsi_info)
def put_wrapper(self, args, command):
input_data = {
"command": command,
}
iscsi_info = api_call(self.url, data=input_data, calltype="put")
print(iscsi_info)
def do_start(self, args):
return self.put_wrapper(args, "start")
def do_stop(self, args):
return self.put_wrapper(args, "stop")
def do_share(self, args):
input_share = args.split()
if len(input_share) > 0:
si_console = ShareIscsiConsole(input_share[0])
if len(input_share) > 1:
si_console.onecmd(" ".join(input_share[1:]))
else:
si_console.cmdloop()
|
import lldb
from lldbsuite.test.decorators i | mport *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
@no_debug_info_test
def test(self):
self.build()
self.dbg.CreateTarget(self.getBuildArtifact("a.out"))
# The offset of f2 should be 8 because of `alignas(8)`.
self.expect_expr("(intptr_t)& | d3g.f2 - (intptr_t)&d3g", result_value="8")
|
#!/usr/bin/env python
'''
headderTemplate: an immutale tuple listing, in order, the expected headders
'''
headderTemplate = (
"Payee Name: ",
"Payee ID: ",
"Payee Site: ",
"Payment Number: ",
"Payment Date: ",
)
'''
headderConverters: a dictionary key'd on the Headder title. the value is a lambda to format data values
'''
headderConverters = {
"Payee Name: " : (lambda x: x),
"Payee ID: " : (lambda x: x),
"Payee Site: " : (lambda x: x),
"Payment Number: " : (lambda x: x),
"Payment Date: " : (lambda x: x),
}
'''
Perform some preliminary validataion of t | he input file
'''
csvfile = 'BCLDB_Payment_Remittance_74596_2014-7-31.csv'
#f = open ('BCLDB_Payment_Remittance_75249_2014-8-11.csv', 'r')
#f = open ('BCLDB_Payment_Remittance_73976_2014-7-15.csv', 'r')
f = open (csvfile, 'r')
headders = ()
for l | in f.readlines()[0:5]:
# make a list and slice the first value out and append to another tuple
headders = headders + (''.join(l).replace('"','').strip().split(',')[0],)
'''
validate the input file has the correct number of data headders
'''
if (len(headderTemplate) == len(headders)) :
print "File has same number of headders"
else:
print "File has different headders"
raise NameError('wrong number of headders')
'''
validate that the input file has the correct headders in the correct order
'''
for f, b in zip(headderTemplate,headders):
if (f != b) :
raise NameError('Headder Problems f != b: {0}, {1}'.format(f, b))
print "File has correct Headder in correct order"
'''
Everything seems ok so proceed with processing the file so that it is ready for import
into MySQL.
'''
# preproces file for import and write new file
# convert Transation Date and Batch Date to usable date types.
f = open (csvfile, 'r')
data = []
for h, l in zip(headders,f.readlines()[0:5]):
# parse out the data from the line
d = ''.join(l).replace('"','').strip().split(',')[1]
# apply any formatting from he headderConverters and append to data
data = data + [','.join([ headderConverters[h](d) ])]
# print ','.join([ headderConverters[h](d) ])
print ','.join(data)
|
from Foundation import *
from PyObjCTools.TestSupport import *
class TestNSXMLNodeOptions (TestCase):
def testConstants(self):
self.assertEqual(NSXMLNodeOptionsNone, 0)
self.assertEqual(NSXMLNodeIsCDATA, 1 << 0)
self.assertEqual(NSXMLNodeExpandEmptyElement, 1 << 1)
self.assertEqual(NSXMLNodeCompactEmptyElement, 1 << 2)
self.assertEqual(NSXMLNodeUseSingleQuotes, 1 << 3)
self.assertEqual(NSXMLNodeUseDoubleQuotes, 1 << 4)
self.assertEqual(NSXMLDocumentTidyHTML, 1 << 9)
self.assertEqual(NSXMLDocumentTidyXML, 1 << 10)
self.assertEqual(NSXMLDocumentValidate, 1 << 13)
self.assertEqual(NSXMLNodeLoadExternalEntitiesAlways, 1 << 14)
self.assertEqual(NSXMLNodeLoadExternalEntitiesSameOriginOnly, 1 << 15)
self.assertEqual(NSXMLNodeLoadExternalEntitiesNever, 1 << 19)
self.assertEqual(NSXMLDocumentXInclude, 1 << 16)
self.assertEqual(NSXMLNodePrettyPrint, 1 << 17)
self.assertEqual(NSXMLDocumentIncludeContentTypeDeclaration, 1 << 18)
self.assertEqual(NSXMLNodePreserveNamespaceOrder, 1 << 20)
self.assertEqual(NSXMLNodePreserveAttributeOrder, 1 << 21)
self.assertEqual(NSXMLNodePreserveEntities, 1 << 22)
self.assertEqual(NSXMLNodePreservePrefixes, 1 << 23)
self.asser | tEqual(NSXMLNodePreserveCDATA, 1 << 24)
self.assertEqual(N | SXMLNodePreserveWhitespace, 1 << 25)
self.assertEqual(NSXMLNodePreserveDTD, 1 << 26)
self.assertEqual(NSXMLNodePreserveCharacterReferences, 1 << 27)
self.assertEqual(NSXMLNodePreserveEmptyElements, (
NSXMLNodeExpandEmptyElement | NSXMLNodeCompactEmptyElement))
self.assertEqual(NSXMLNodePreserveQuotes, (NSXMLNodeUseSingleQuotes | NSXMLNodeUseDoubleQuotes))
self.assertEqual(NSXMLNodePreserveAll & 0xFFFFFFFF, 0xFFFFFFFF & (
NSXMLNodePreserveNamespaceOrder |
NSXMLNodePreserveAttributeOrder |
NSXMLNodePreserveEntities |
NSXMLNodePreservePrefixes |
NSXMLNodePreserveCDATA |
NSXMLNodePreserveEmptyElements |
NSXMLNodePreserveQuotes |
NSXMLNodePreserveWhitespace |
NSXMLNodePreserveDTD |
NSXMLNodePreserveCharacterReferences |
0xFFF00000))
if __name__ == "__main__":
main()
|
from contextlib import contextmanager
import json
import mock
# Mock tastypi API.
class ProjectData(object):
def get(self):
return dict()
def mock_version(repo):
class MockVersion(object):
def __init__(self, x=None):
pass
def put(self, x=None):
return x
def get(self, **kwargs):
# SCIENTIST DOG
version = json.loads("""
{
"active": false,
"built": false,
"id": "12095",
"identifier": "remotes/origin/zip_importing",
"resource_uri": "/api/v1/version/12095/",
"slug": "zip_importing",
"uploaded": false,
"verbose_name": "zip_importing"
}""")
project = json.loads("""
{
"absolute_url": "/projects/docs/",
"analytics_code": "",
"copyright": "",
"default_branch": "",
"default_version": "latest",
"description": "Make docs.readthedocs.org work :D",
"django_packages_url": "",
"documentation_type": "sphinx",
"id": "2599",
"modified_date": "2012-03-12T19:59:09.130773",
"name": "docs",
"project_url": "",
"pub_date": "2012-02-19T18:10:56.582780",
"repo": "git://github.com/rtfd/readthedocs.org",
"repo_type": "git",
"requirements_file": "",
"resource_uri": "/api/v1/project/2599/",
"slug": "docs",
"subdomain": "http://docs.readthedocs.org/",
"suffix": ".rst",
"theme": "default",
"use_virtualenv": false,
"users": [
"/api/v1/user/1/"
],
"version": ""
}""")
version['project'] = project
project['repo'] = repo
if 'slug' in kwargs:
return {'objects': [version], 'project': project}
else:
return version
return MockVersion
class MockApi(object):
def __init__(self, repo):
self.version = mock_version(repo)
def project(self, x):
| return ProjectData()
@contextmanager
def mock_api(repo):
api_mock = MockApi(repo)
with (
mock.patch('readthedocs.restapi.client.api', api_mock) and
mock.patch('readthedocs.api.client.api', api_mock) and
moc | k.patch('readthedocs.projects.tasks.api_v2', api_mock) and
mock.patch('readthedocs.projects.tasks.api_v1', api_mock)):
yield api_mock
|
from django.conf.urls.defaults import patterns, url
from snippets.base import views
urlpatterns = patterns('',
url(r'^$', views.index, name='base.index'),
url(r'^(?P<startpage_version>[^/]+)/(?P<name>[^/]+)/(?P<version>[^/]+)/'
'(?P<appbuildid>[^/]+)/(?P<build_target>[^/]+)/(?P<locale>[^/]+)/'
'(?P<channel>[^/]+)/(?P<os_version>[^/]+)/(?P<distribution>[^/]+)/'
'(?P<distribution_version>[ | ^/]+)/$', views.fetch_snippets,
name='view_snippets'),
url(r'^admin/base/snippet/preview/', views.preview_empty,
name='base.admin.preview_empty'),
url(r'^admin/base/snippet/(\d+)/preview/', views.preview_snippet,
n | ame='base.admin.preview_snippet'),
url(r'^admin/base/snippettemplate/(\d+)/variables/',
views.admin_template_json, name='base.admin.template_json'),
)
|
def check_vivado(args):
vivado_path = get_command("vivado")
if vivado_path == None:
# Look for the default Vivado install directory
if os.name == 'nt':
base_dir = r"C:\Xilinx\Vivado"
else:
base_dir = "/opt/Xilinx/Vivad | o"
if os.path.exists(base_dir):
for file in os.listdir(bas | e_dir):
bin_dir = base_dir + os.path.sep + file + os.path.sep + "bin"
if os.path.exists(bin_dir + os.path.sep + "vivado"):
os.environ["PATH"] += os.pathsep + bin_dir
vivado_path = bin_dir
break
if vivado_path == None:
return (False, "toolchain not found in your PATH", "download it from https://www.xilinx.com/support/download.html")
return (True, "found at {}".format(vivado_path))
|
#! /usr/bin/env vpython3
# Copyright 2017 The Chromium Authors. All rights reserv | ed.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=protected-access
import tempfile
import shutil
import unittest
from pylib.base import output_manager
from pylib.base import output_manager_test_case
from pylib.output import local_output_manager
class LocalOutputManagerTest(output_manager_test_case.OutputManagerTestCase):
def setUp(self):
self._output_dir = tempfile.mkdtemp()
self._output_manager | = local_output_manager.LocalOutputManager(
self._output_dir)
def testUsableTempFile(self):
self.assertUsableTempFile(
self._output_manager._CreateArchivedFile(
'test_file', 'test_subdir', output_manager.Datatype.TEXT))
def tearDown(self):
shutil.rmtree(self._output_dir)
if __name__ == '__main__':
unittest.main()
|
''' Various kinds of layout components.
'''
from __future__ import absolute_import
import logging
logger = logging.getLogger(__name__)
from ..core.enums import SizingMode
from ..core.has_props import abstract
from ..core.properties import Bool, Enum, Int, Instance, List, Seq, String
from ..core.validation import warning
from ..core.validation.warnings import BOTH_CHILD_AND_ROOT, EMPTY_LAYOUT
from ..model import Model
@abstract
class LayoutDOM(Model):
''' An abstract base class for layout components.
'''
width = Int(help="""
An optional width for the component (in pixels).
""")
height = Int(help="""
An optional height for the component (in pixels).
""")
disabled = Bool(False, help="""
Whether the widget will be disabled when rendered. If ``True``,
the widget will be greyed-out, and not respond to UI events.
""")
sizing_mode = Enum(SizingMode, default="fixed", help="""
How the item being displayed should size itself. Possible values are
``"fixed"``, ``"scale_width"``, ``"scale_height"``, ``"scale_both"``, and
``"stretch_both"``.
``"stretch_both"`` elements are completely responsive (independently in width and height) and
will resize to occupy all available space, even if this changes the aspect ratio of the element.
This is sometimes called outside-in, and is a typical behavior for desktop applications.
``"fixed"`` elements are not responsive. They will retain their original width and height
regardless of any subsequent browser window resize events.
``"scale_width"`` elements will responsively resize to fit to the width available, *while
maintaining the original aspect ratio*. This is a typical behavior for modern websites. For a
``Plot``, the aspect ratio ``plot_width/plot_height`` is maintained.
``"scale_height"`` elements will responsively resize to fit to the height available, *while
maintaining the original aspect ratio*. For a ``Plot``, the aspect ratio
``plot_width/plot_height`` is maintained. A plot with ``"scale_height"`` mode needs
to be wrapped in a ``Row`` or ``Column`` to be responsive.
``"scale_both"`` elements will responsively resize to for both the width and height available,
*while maintaining the original aspect ratio*.
""")
# List | in order for in-place changes to trigger changes, ref: https://github.com/bokeh/bokeh/issues/6841
css_classes = List(String, help="""
A list of c | ss class names to add to this DOM element. Note: the class names are
simply added as-is, no other guarantees are provided.
It is also permissible to assign from tuples, however these are adapted -- the
property will always contain a list.
""").accepts(Seq(String), lambda x: list(x))
class Spacer(LayoutDOM):
''' A container for space used to fill an empty spot in a row or column.
'''
class WidgetBox(LayoutDOM):
''' A container for widgets that are part of a layout.
'''
def __init__(self, *args, **kwargs):
if len(args) > 0 and "children" in kwargs:
raise ValueError("'children' keyword cannot be used with positional arguments")
elif len(args) > 0:
kwargs["children"] = list(args)
super(WidgetBox, self).__init__(**kwargs)
@warning(EMPTY_LAYOUT)
def _check_empty_layout(self):
from itertools import chain
if not list(chain(self.children)):
return str(self)
@warning(BOTH_CHILD_AND_ROOT)
def _check_child_is_also_root(self):
problems = []
for c in self.children:
if c.document is not None and c in c.document.roots:
problems.append(str(c))
if problems:
return ", ".join(problems)
else:
return None
children = List(Instance('bokeh.models.widgets.Widget'), help="""
The list of widgets to put in the layout box.
""")
@abstract
class Box(LayoutDOM):
''' Abstract base class for Row and Column. Do not use directly.
'''
def __init__(self, *args, **kwargs):
if len(args) > 0 and "children" in kwargs:
raise ValueError("'children' keyword cannot be used with positional arguments")
elif len(args) > 0:
kwargs["children"] = list(args)
unwrapped_children = kwargs.get("children", [])
kwargs["children"] = self._wrap_children(unwrapped_children)
super(Box, self).__init__(**kwargs)
def _wrap_children(self, children):
''' Wrap any Widgets of a list of child layouts in a WidgetBox.
This allows for the convenience of just spelling Row(button1, button2).
'''
from .widgets.widget import Widget
wrapped_children = []
for child in children:
if isinstance(child, Widget):
child = WidgetBox(
children=[child],
sizing_mode=child.sizing_mode,
width=child.width,
height=child.height,
disabled=child.disabled
)
wrapped_children.append(child)
return wrapped_children
@warning(EMPTY_LAYOUT)
def _check_empty_layout(self):
from itertools import chain
if not list(chain(self.children)):
return str(self)
@warning(BOTH_CHILD_AND_ROOT)
def _check_child_is_also_root(self):
problems = []
for c in self.children:
if c.document is not None and c in c.document.roots:
problems.append(str(c))
if problems:
return ", ".join(problems)
else:
return None
#TODO Debating the following instead to prevent people adding just a plain
# widget into a box, which sometimes works and sometimes looks disastrous
#children = List(
# Either(
# Instance('bokeh.models.layouts.Row'),
# Instance('bokeh.models.layouts.Column'),
# Instance('bokeh.models.plots.Plot'),
# Instance('bokeh.models.layouts.WidgetBox')
# ), help="""
# The list of children, which can be other components including plots, rows, columns, and widgets.
#""")
children = List(Instance(LayoutDOM), help="""
The list of children, which can be other components including plots, rows, columns, and widgets.
""")
class Row(Box):
''' Lay out child components in a single horizontal row.
Children can be specified as positional arguments, as a single argument
that is a sequence, or using the ``children`` keyword argument.
'''
class Column(Box):
''' Lay out child components in a single vertical row.
Children can be specified as positional arguments, as a single argument
that is a sequence, or using the ``children`` keyword argument.
'''
|
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import logging
import os
import shutil
from pants.backend.jvm.subsystems.scoverage_platform import ScoveragePlatform
from pants.backend.jvm.tasks.coverage.cobertura import Cobertura
from pants.backend.jvm.tasks.coverage.engine import NoCoverage
from pants.backend.jvm.tasks.coverage.jacoco import Jacoco
from pants.backend.jvm.tasks.coverage.scoverage import Scoverage
from pants.subsystem.subsystem import Subsystem
from pants.util.dirutil import safe_mkdir
from pants.util.strutil import safe_shlex_split
logger = logging.getLogger(__name__)
class CodeCoverageSettings:
"""A class containing settings for code coverage tasks."""
def __init__(
self,
options,
context,
workdir,
tool_classpath,
confs,
log,
copy2=shutil.copy2,
copytree=shutil.copytree,
is_file=os.path.isfile,
safe_md=safe_mkdir,
):
self.options = options
self.context = context
self.workdir = workdir
self.tool_classpath = tool_classpath
self.confs = confs
self.log = log
self.coverage_dir = os.path.join(self.workdir, "coverage")
self.coverage_jvm_options = []
for jvm_option in options.coverage_jvm_options:
self.coverage_jvm_options.extend(safe_shlex_split(jvm_option))
self.coverage_open = options.coverage_open
self.coverage_force = options.coverage_force
# Injecting these methods to make unit testing cleaner.
self.copy2 = copy2
self.copytree = copytree
self.is_file = is_file
self.safe_makedir = safe_md
@classmethod
def from_task(cls, task, workdir=None):
return cls(
options=task.get_options(),
context=task.context,
workdir=workdir or task.workdir,
tool_classpath=task.tool_classpath,
confs=task.confs,
log=task.context.log,
)
class CodeCoverage(Subsystem):
"""Manages setup and construction of JVM code coverage engines."""
options_scope = "coverage"
@classmethod
def subsystem_dependencies(cls):
return super().subsystem_dependencies() + (
Cobertura.Factory,
Jacoco.Factory,
Scoverage.Factory,
)
# TODO(jtrobec): move these to subsystem scope after deprecating
@staticmethod
def register_junit_options(register, register_jvm_tool):
register("--coverage", type=bool, fingerprint=True, help="Collect code coverage data.")
register(
"--coverage-processor",
advanced=True,
fingerprint=True,
choices=["cobertura", "jacoco", "scoverage"],
default=None,
help="Which coverage processor to use if --coverage is enabled. If this option is "
"unset but coverage is enabled implicitly or explicitly, defaults to 'cobertura'. "
"If this option is explicitly set, implies --coverage. If this option is set to "
"scoverage, then first scoverage MUST be enabled by passing option "
"--scoverage-enable-scoverage.",
)
# We need to fingerprint this even though it nominally UI-only affecting option since the
# presence of this option alone can implicitly flag on | `--coverage`.
register(
"--coverage-open",
type=bool,
fingerprint=True,
help="Open the generated HTML coverage report in a browser. Implies --coverage ",
)
register(
"--coverage-jvm-options",
advanced=True,
type=list,
fingerprint=True,
help="JVM flags to be add | ed when running the coverage processor. For example: "
"{flag}=-Xmx4g {flag}=-Xms2g".format(flag="--coverage-jvm-options"),
)
register(
"--coverage-force",
advanced=True,
type=bool,
help="Attempt to run the reporting phase of coverage even if tests failed "
"(defaults to False, as otherwise the coverage results would be unreliable).",
)
# register options for coverage engines
# TODO(jtrobec): get rid of these calls when engines are dependent subsystems
Cobertura.register_junit_options(register, register_jvm_tool)
class InvalidCoverageEngine(Exception):
"""Indicates an invalid coverage engine type was selected."""
def get_coverage_engine(self, task, output_dir, all_targets, execute_java):
options = task.get_options()
enable_scoverage = ScoveragePlatform.global_instance().get_options().enable_scoverage
processor = options.coverage_processor
if processor == "scoverage" and not enable_scoverage:
raise self.InvalidCoverageEngine(
"Cannot set processor to scoverage without first enabling "
"scoverage (by passing --scoverage-enable-scoverage option)"
)
if enable_scoverage:
if processor not in (None, "scoverage"):
raise self.InvalidCoverageEngine(
f"Scoverage is enabled. "
f"Cannot use {processor} as the engine. Set engine to scoverage "
f"(--test-junit-coverage-processor=scoverage)"
)
processor = "scoverage"
if options.coverage or processor or options.is_flagged("coverage_open"):
settings = CodeCoverageSettings.from_task(task, workdir=output_dir)
if processor in ("cobertura", None):
return Cobertura.Factory.global_instance().create(
settings, all_targets, execute_java
)
elif processor == "jacoco":
return Jacoco.Factory.global_instance().create(settings, all_targets, execute_java)
elif processor == "scoverage":
return Scoverage.Factory.global_instance().create(
settings, all_targets, execute_java
)
else:
# NB: We should never get here since the `--coverage-processor` is restricted by `choices`,
# but for clarity.
raise self.InvalidCoverageEngine(
"Unknown and unexpected coverage processor {!r}!".format(
options.coverage_processor
)
)
else:
return NoCoverage()
|
from importlib import import_module
import math
import os
from PIL import Image, ImageDraw, ImageColor, ImageFont, ImageFilter
from misago.conf import settings
from misago.users.avatars import store
def set_avatar(user):
name_bits = settings.MISAGO_DYNAMIC_AVATAR_DRAWER.split('.')
drawer_module = '.'.join(name_bits[:-1])
drawer_module = import_module(drawer_module)
drawer_function = getattr(drawer_module, name_bits[-1])
image = drawer_function(user)
store.store_new_avatar(user, image)
"""
Default drawer
"""
def draw_default(user):
image_size = max(settings.MISAGO_AVATARS_SIZES)
image = Image.new("RGBA", (image_size, image_size), 0)
image = draw_avatar_bg(user, image)
image = draw_avatar_flavour(user, image)
return image
COLOR_WHEEL = ('#d32f2f', '#c2185b', '#7b1fa2', '#512da8',
'#303f9f', '#1976d2', '#0288D1', '#0288d1',
'#0097a7', '#00796b', '#388e3c', '#689f38',
'#afb42b', '#fbc02d', '#ffa000', '#f57c00',
'#e64a19' | )
COLOR_WHEEL_LEN = len(COLOR_WHEEL)
def draw_avatar_bg(user, image):
image_size = image.size
color_index = user.pk - COLOR_WHEEL_LEN * (user.pk / COLOR_WHEEL_LEN)
| main_color = COLOR_WHEEL[color_index]
rgb = ImageColor.getrgb(main_color)
bg_drawer = ImageDraw.Draw(image)
bg_drawer.rectangle([(0, 0), image_size], rgb)
return image
FONT_FILE = os.path.join(os.path.dirname(__file__), 'font.ttf')
def draw_avatar_flavour(user, image):
string = user.username[0]
image_size = image.size[0]
size = int(image_size * 0.7)
font = ImageFont.truetype(FONT_FILE, size=size)
text_size = font.getsize(string)
text_pos = ((image_size - text_size[0]) / 2,
(image_size - text_size[1]) / 2)
writer = ImageDraw.Draw(image)
writer.text(text_pos, string, font=font)
return image
"""
Some utils for drawring avatar programmatically
"""
CHARS = 'qwertyuiopasdfghjklzxcvbnm1234567890'
def string_to_int(string):
value = 0
for p, c in enumerate(string.lower()):
value += p * (CHARS.find(c))
return value
|
from concurrent. | futures import ThreadPoolExecutor
class SingletonThreadPoolExecutor(ThreadPoolExecutor):
"""
该类不要直接实例化
"""
def __new__(cls, max_workers=None, thread_name_prefix=None):
if cls is SingletonThreadPoolExecutor:
raise NotImplementedError
if getattr(cls, '_object', None) is None:
cls._object = ThreadPoolExecutor(
max_workers=max_workers,
thread_name_prefix=threa | d_name_prefix
)
return cls._object
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Copyright 2016 Twitter. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in co | mpliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
''' pmaphandler.py '''
import json
import tornado.web
from heron.shel | l.src.python import utils
class PmapHandler(tornado.web.RequestHandler):
"""
Responsible for reporting memory map of a process given its pid.
"""
# pylint: disable=attribute-defined-outside-init
@tornado.web.asynchronous
def get(self, pid):
''' get method '''
body = utils.str_cmd(['pmap', '-pXX', pid], None, None)
self.content_type = 'application/json'
self.write(json.dumps(body))
self.finish()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import matplotlib.pyplot as plt
import matplotlib.pyplot as plt2
from numpy import *
from scipy import interpolate
from matplotlib import rcParams, rcParamsDefault
import argparse
from nema_common import *
outputformat = ""
def plot_rates(geometry,float_activity,N_true,N_dsca,N_psca,N_acci,time):
rcParams['font.size'] = 20
rcParams['legend.fontsize'] = 16
fig, axs = plt2.subplots(nrows=1, ncols=1, sharex=True)
plt2.subplots_adjust(left=0.2, right=0.95, top=0.95, bottom=0.17)
plt2.plot(float_activity,N_true/time/1000.,'o-',label="true", markersize=4)
plt2.plot(float_activity,N_dsca/time/1000.,'o-',label="dsca", markersize=4)
plt2.plot(float_activity,N_psca/time/1000.,'o-',label="psca", markersize=4)
plt2.plot(float_activity,N_acci/time/1000.,'o-',label="acci", markersize=4)
plt2.legend(loc=2)
plt2.xlim(0,90)
plt2.ylim(ymin=0)
plt2.xlabel("Activity concentration [kBq/cc]")
plt2.ylabel("Rate [kcps]")
plt2.savefig(workdir_NECR + geometry + "_rates." + outputformat)
plt2.clf()
plt2.close()
rcParams.update(rcParamsDefault)
def plot_necrs(float_activities, NECRs, colors, labels, necr_type, lstyles):
fig, axs = plt.subplots(nrows=1, ncols=1, sharex=True)
for i in xrange(len(NECRs)):
plt.plot(float_activities[i], NECRs[i], lstyles[i], color=colors[i], label=labels[i], markersize=4)
rcParams.update(rcParamsDefault)
rcParams['legend.fontsize'] = 11
rcParams['font.size'] = 20
FONTSIZE = 20
plt.subplots_adjust(left=0.2, right=0.95, top=0.95, bottom=0.17)
plt.legend(loc=1)
plt.xlim(0,90)
plt.ylim(0,1.1*NECR_sin_max)
plt.xticks(fontsize=FONTSIZE)
plt.yticks(fontsize=FONTSIZE)
plt.xlabel("Activity concentration [kBq/cc]", fontsize=FONTSIZE)
plt.ylabel("NECR [kcps]", fontsize=FONTSIZE)
plt.savefig(workdir_NECR + "NECR_all_geometries_" + necr_type + '.' + outputformat)
plt.clf()
plt.close()
def calculate_reduction_for_necr_simulations(necr_simulations):
for g in geometries_NECR:
sls_file = workdir_NECR + g + "/second_lvl_selection.txt"
if os.path.exists(sls_file):
os.system('rm ' + sls_file)
for a in activities_NECR:
coincidences_file = necr_simulations + "/" + g + "_" + a + "_NECR_COINCIDENCES_short"
tmp = loadtxt(coincidences_file)
posX1 = tmp[:,0]
posY1 = tmp[:,1]
times1 = tmp[:,3]
posX2 = tmp[:,4]
posY2 = tmp[:,5]
times2 = tmp[:,7]
[tim_diffs, ang_diffs] = calculate_differences(times1, times2, posX1, posY1, posX2, posY2)
[counter_above, counter_below] = calculate_counters(tim_diffs, ang_diffs)
with open(sls_file, "a") as myfile:
myfile.write("{0}\t{1}\t{2}\n".format(counter_above, counter_below, counter_above+counter_below))
print g + "\t" + a + "\t" + str(counter_above) + "\t" + str(counter_below) + "\t" + str(counter_above+counter_below)
def plot_reduction_for_necr_simulations():
rcParams['font.size'] = 24
rcParams['legend.fontsize'] = 18
activities = []
for a in activities_NECR:
activities.append(float(a)/22000.*1000) # in kBq/cc
new_activities = linspace(activities[0],activities[-1],100)
fig = plt.figure(figsize=(8, 6))
ax = fig.add_subplot(111)
plt.ylim(ymin=0,ymax=80)
plt.xlim(xmin=0,xmax=90)
for g in geometries_NECR:
lab = ""
c = ""
l = ""
if "1lay" in g:
lab += "1 layer, "
c = 'k'
elif "2lay" in g:
lab += "2 layers, "
c = 'r'
if "L020" in g:
lab += "L = 20 cm"
l = '--'
elif "L050" in g:
lab += "L = 50 cm"
l = '-'
elif "L100" in g:
lab += "L = 100 cm"
l = '-.'
elif "L200" in g:
lab += "L = 200 cm"
l = ':'
sls_file = workdir_NECR + g + "/second_lvl_selection.txt"
if os.path.exists(sls_file):
tmp = loadtxt(sls_file)
counter_above = tmp[:,0]
counter_below = tmp[:,1]
reduction = counter_below/(counter_above+counter_below)*100.
new_reduction = interpolate.splev(new_activities, interpolate.splrep(activities, reduction, s=5), der=0)
plt.plot(new_activities, new_reduction, linestyle=l, color=c, label=lab)
plt.legend(loc=4)
plt.xlabel("Activity concentration [kBq/cc]")
plt.ylabel("Reduction [%]")
plt.savefig(workdir_NECR + "second_lvl_selection" + outputformat, bbox_inches='tight')
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Plot NECR.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-of', '--outputformat',
type=str,
default="png",
help='output format of images')
parser.add_argument('-r', '--reduction',
| dest='reduction',
action='store_true',
help='set if you want to calculate and plot the reduction ' +
'given by the 2nd level of the reduction method in case ' +
'of the NECR simulations (must be used with -ns option)')
parser.add_argument('-ns', '--necr-simulations',
dest='necr_simulations',
type=str,
| help='path to the base directory of the NECR simulations')
args = parser.parse_args()
outputformat = args.outputformat
if args.reduction:
create_work_directories()
calculate_reduction_for_necr_simulations(args.necr_simulations)
plot_reduction_for_necr_simulations()
else:
plt.subplots(nrows=1, ncols=1, sharex=True)
float_activities = []
NECRs_sin = []
NECRs_ctr = []
colors = []
labels = []
lstyles = []
for geometry in geometries_NECR:
tmp = loadtxt(workdir_NECR + geometry + "/necr_dependency.txt")
float_activity = tmp[:,0]
SF_sin = tmp[:,1]
SF_crt = tmp[:,2]
NECR_sin = tmp[:,3]
NECR_sin_max = max(NECR_sin)
NECR_ctr = tmp[:,4]
T = tmp[:,5]
S = tmp[:,6]
N_true = tmp[:,7]
N_dsca = tmp[:,8]
N_psca = tmp[:,9]
N_acci = tmp[:,10]
time = tmp[:,11]
plot_rates(geometry,float_activity,N_true,N_dsca,N_psca,N_acci,time)
new_label = ""
if "1lay" in geometry:
linestyle='o-'
new_label += "1 layer"
else:
linestyle = 'o--'
new_label += "2 layers"
if "L020" in geometry:
datacolor = 'r'
new_label += ", L = 20 cm"
elif "L050" in geometry:
datacolor = 'b'
new_label += ", L = 50 cm"
elif "L100" in geometry:
datacolor = 'y'
new_label += ", L = 100 cm"
elif "L200" in geometry:
datacolor = 'g'
new_label += ", L = 200 cm"
float_activities.append(float_activity)
NECRs_sin.append(NECR_sin)
NECRs_ctr.append(NECR_ctr)
colors.append(datacolor)
labels.append(new_label)
lstyles.append(linestyle)
plot_necrs(float_activities, NECRs_sin, colors, labels, "sin", lstyles)
plot_necrs(float_activities, NECRs_ctr, colors, labels, "ctr", lstyles)
|
version = 0x105016
tools = [{'tool': 'ar', 'tooldir': None, 'fun | s': None}, {'tool': 'cc', 'tooldir': None, 'funs': None}, {'tool': 'gcc', 'tooldir': None, 'funs': None}, {'tool': 'compiler_cc', | 'tooldir': None, 'funs': None}, {'tool': 'cxx', 'tooldir': None, 'funs': None}, {'tool': 'gxx', 'tooldir': None, 'funs': None}, {'tool': 'compiler_cxx', 'tooldir': None, 'funs': None}, {'tool': 'node_addon', 'tooldir': None, 'funs': None}]
|
__author__ = 'Bohdan Mushkevych'
import json
from synergy.db.model.freerun_process_entry import FreerunProcessEntry
from synergy.db.dao.freerun_process_dao import FreerunProcessDao
from synergy.mx.base_request_handler import valid_action_request
from syner | gy.mx.abstract_action_handler import AbstractActionHandler
from synergy.scheduler.scheduler_constants import STATE_MACHINE_FREERUN
class FreerunActionHandler(AbstractActionHandler):
def __init__(self, request, **values):
super(FreerunActionHandler, self).__init__(request, **values)
self.process_name = self.request_arguments.get('process_name')
self.entry_name = self.request_arguments.get('entry_name')
self.freerun_process_dao = FreerunProcessDao(s | elf.logger)
self.is_request_valid = True if self.process_name and self.entry_name else False
if self.is_request_valid:
self.process_name = self.process_name.strip()
self.entry_name = self.entry_name.strip()
self.is_requested_state_on = self.request_arguments.get('is_on') == 'on'
@AbstractActionHandler.thread_handler.getter
def thread_handler(self):
handler_key = (self.process_name, self.entry_name)
return self.scheduler.freerun_handlers[handler_key]
@AbstractActionHandler.process_entry.getter
def process_entry(self):
return self.thread_handler.process_entry
@AbstractActionHandler.uow_id.getter
def uow_id(self):
return self.process_entry.related_unit_of_work
@valid_action_request
def cancel_uow(self):
freerun_state_machine = self.scheduler.timetable.state_machines[STATE_MACHINE_FREERUN]
freerun_state_machine.cancel_uow(self.process_entry)
return self.reply_ok()
@valid_action_request
def get_event_log(self):
return {'event_log': self.process_entry.event_log}
@valid_action_request
def create_entry(self):
process_entry = FreerunProcessEntry()
process_entry.process_name = self.process_name
process_entry.entry_name = self.entry_name
if self.request_arguments['arguments']:
arguments = self.request_arguments['arguments']
if isinstance(arguments, bytes):
arguments = arguments.decode('unicode-escape')
process_entry.arguments = json.loads(arguments)
else:
process_entry.arguments = {}
process_entry.description = self.request_arguments['description']
process_entry.is_on = self.is_requested_state_on
process_entry.trigger_frequency = self.request_arguments['trigger_frequency']
self.freerun_process_dao.update(process_entry)
self.scheduler._register_process_entry(process_entry, self.scheduler.fire_freerun_worker)
return self.reply_ok()
@valid_action_request
def delete_entry(self):
handler_key = (self.process_name, self.entry_name)
self.thread_handler.deactivate()
self.freerun_process_dao.remove(handler_key)
del self.scheduler.freerun_handlers[handler_key]
self.logger.info(f'MX: Deleted FreerunThreadHandler for {handler_key}')
return self.reply_ok()
@valid_action_request
def update_entry(self):
is_interval_changed = self.process_entry.trigger_frequency != self.request_arguments['trigger_frequency']
if self.request_arguments['arguments']:
arguments = self.request_arguments['arguments']
if isinstance(arguments, bytes):
arguments = arguments.decode('unicode-escape')
self.process_entry.arguments = json.loads(arguments)
else:
self.process_entry.arguments = {}
self.process_entry.description = self.request_arguments['description']
self.process_entry.is_on = self.is_requested_state_on
self.process_entry.trigger_frequency = self.request_arguments['trigger_frequency']
self.freerun_process_dao.update(self.process_entry)
if is_interval_changed:
self.change_interval()
if self.process_entry.is_on != self.is_requested_state_on:
if self.is_requested_state_on:
self.activate_trigger()
else:
self.deactivate_trigger()
return self.reply_ok()
|
#Calculen el área de un rectán | gulo (alineado con los ejes x e y) dadas sus coordenadas x1,x2,y1,y2.
from math import pi
print 'Ejercicio 6'
print '-'*60
x1 = float(ra | w_input('Introduce x1: '))
x2 = float(raw_input('Introduce x2: '))
y1 = float(raw_input('Introduce y1: '))
y2 = float(raw_input('Introduce y2: '))
base= x2-x1
altura= y2-y1
print 'El area del rectangulo es: ', base * altura
raw_input('Pulse la tecla enter para finalizar')
|
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn import tree
from subprocess import call
# https://archive.ics.uci.edu/ml/machine-learning-databases/mushroom/agaricus-lepiota.names
#
# TODO: Load up the mushroom dataset into dataframe 'X'
# Verify you did it properly.
# Indices shouldn't be doubled.
# Header information is on the dataset's website at the UCI ML Repo
# Check NA Encoding
X = pd.read_csv('Datasets/agaricus-lepiota.data', names=['label', 'cap-shape', 'cap-surface', 'cap-color',
'bruises', 'odor', 'gill-attachment',
'gill-spacing', 'gill-size', 'gill-color',
'stalk-shape', 'stalk-root',
'sta | lk-surface-above-ring',
| 'stalk-surface-below-ring', 'stalk-color-above-ring',
'stalk-color-below-ring', ' veil-type', 'veil-color',
'ring-number', 'ring-type', 'spore-print-colo', 'population',
'habitat'], header=None)
# INFO: An easy way to show which rows have nans in them
# print X[pd.isnull(X).any(axis=1)]
#
# TODO: Go ahead and drop any row with a nan
X.replace(to_replace='?', value=np.NaN, inplace=True)
X.dropna(axis=0, inplace=True)
print(X.shape)
#
# TODO: Copy the labels out of the dset into variable 'y' then Remove
# them from X. Encode the labels, using the .map() trick we showed
# you in Module 5 -- canadian:0, kama:1, and rosa:2
X['label'] = X['label'].map({'e': 1, 'p': 0})
y = X['label'].copy()
X.drop(labels=['label'], axis=1, inplace=True)
#
# TODO: Encode the entire dataset using dummies
X = pd.get_dummies(X)
#
# TODO: Split your data into test / train sets
# Your test size can be 30% with random_state 7
# Use variable names: X_train, X_test, y_train, y_test
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=7)
#
# TODO: Create an DT classifier. No need to set any parameters
model = tree.DecisionTreeClassifier()
#
# TODO: train the classifier on the training data / labels:
# TODO: score the classifier on the testing data / labels:
model.fit(X_train, y_train)
score = model.score(X_test, y_test)
print('High-Dimensionality Score: %f' % round((score * 100), 3))
#
# TODO: Use the code on the courses SciKit-Learn page to output a .DOT file
# Then render the .DOT to .PNGs. Ensure you have graphviz installed.
# If not, `brew install graphviz. If you can't, use: http://webgraphviz.com/
tree.export_graphviz(model.tree_, out_file='tree.dot', feature_names=X.columns)
|
<Group: Roll>",
],
ordered=False
)
def test_retrieve_forward_m2m_items(self):
self.assertQuerysetEqual(
self.roll.members.all(), [
"<Person: Bob>",
]
)
def test_cannot_use_setattr_on_reverse_m2m_with_intermediary_model(self):
msg = (
"Cannot set values on a ManyToManyField which specifies an "
"intermediary model. Use m2m_through_regress.Membership's Manager "
"instead."
)
with self.assertRaisesMessage(AttributeError, msg):
self.bob.group_set.set([])
def test_cannot_use_setattr_on_forward_m2m_with_intermediary_model(self):
msg = (
"Cannot set values on a ManyToManyField which specifies an "
"intermediary model. Use m2m_through_regress.Membership's Manager "
"instead."
)
with self.assertRaisesMessage(AttributeError, msg):
self.roll.members.set([])
def test_cannot_use_create_on_m2m_with_intermediary_model(self):
with self.assertRaises(AttributeError):
self.rock.members.create(name="Anne")
def test_cannot_use_create_on_reverse_m2m_with_intermediary_model(self):
with self.assertRaises(AttributeError):
self.bob.group_set.create(name="Funk")
def test_retrieve_reverse_m2m_items_via_custom_id_intermediary(self):
self.assertQuerysetEqual(
self.frank.group_set.all(), [
"<Group: Rock>",
"<Group: Roll>",
],
ordered=False
)
def test_retrieve_forward_m2m_items_via_custom_id_intermediary(self):
self.assertQuerysetEqual(
self.roll.user_members.all(), [
"<User: frank>",
]
)
def test_join_trimming_forwards | (self):
"Check that we don't involve too many copies of the intermediate table when doing a join. Refs #8046, #8254"
self.assertQuerysetEqual(
self.rock.members.filter(membership__price=50), [
"<Person: Jim>",
]
)
def test_join_trimming_reverse(se | lf):
self.assertQuerysetEqual(
self.bob.group_set.filter(membership__price=50), [
"<Group: Roll>",
]
)
class M2MThroughSerializationTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.bob = Person.objects.create(name="Bob")
cls.roll = Group.objects.create(name="Roll")
cls.bob_roll = Membership.objects.create(person=cls.bob, group=cls.roll)
def test_serialization(self):
"m2m-through models aren't serialized as m2m fields. Refs #8134"
pks = {"p_pk": self.bob.pk, "g_pk": self.roll.pk, "m_pk": self.bob_roll.pk}
out = StringIO()
management.call_command("dumpdata", "m2m_through_regress", format="json", stdout=out)
self.assertJSONEqual(
out.getvalue().strip(),
'[{"pk": %(m_pk)s, "model": "m2m_through_regress.membership", "fields": {"person": %(p_pk)s, "price": '
'100, "group": %(g_pk)s}}, {"pk": %(p_pk)s, "model": "m2m_through_regress.person", "fields": {"name": '
'"Bob"}}, {"pk": %(g_pk)s, "model": "m2m_through_regress.group", "fields": {"name": "Roll"}}]'
% pks
)
out = StringIO()
management.call_command("dumpdata", "m2m_through_regress", format="xml",
indent=2, stdout=out)
self.assertXMLEqual(out.getvalue().strip(), """
<?xml version="1.0" encoding="utf-8"?>
<django-objects version="1.0">
<object pk="%(m_pk)s" model="m2m_through_regress.membership">
<field to="m2m_through_regress.person" name="person" rel="ManyToOneRel">%(p_pk)s</field>
<field to="m2m_through_regress.group" name="group" rel="ManyToOneRel">%(g_pk)s</field>
<field type="IntegerField" name="price">100</field>
</object>
<object pk="%(p_pk)s" model="m2m_through_regress.person">
<field type="CharField" name="name">Bob</field>
</object>
<object pk="%(g_pk)s" model="m2m_through_regress.group">
<field type="CharField" name="name">Roll</field>
</object>
</django-objects>
""".strip() % pks)
class ToFieldThroughTests(TestCase):
def setUp(self):
self.car = Car.objects.create(make="Toyota")
self.driver = Driver.objects.create(name="Ryan Briscoe")
CarDriver.objects.create(car=self.car, driver=self.driver)
# We are testing if wrong objects get deleted due to using wrong
# field value in m2m queries. So, it is essential that the pk
# numberings do not match.
# Create one intentionally unused driver to mix up the autonumbering
self.unused_driver = Driver.objects.create(name="Barney Gumble")
# And two intentionally unused cars.
self.unused_car1 = Car.objects.create(make="Trabant")
self.unused_car2 = Car.objects.create(make="Wartburg")
def test_to_field(self):
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Ryan Briscoe>"]
)
def test_to_field_reverse(self):
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>"]
)
def test_to_field_clear_reverse(self):
self.driver.car_set.clear()
self.assertQuerysetEqual(
self.driver.car_set.all(), [])
def test_to_field_clear(self):
self.car.drivers.clear()
self.assertQuerysetEqual(
self.car.drivers.all(), [])
# Low level tests for _add_items and _remove_items. We test these methods
# because .add/.remove aren't available for m2m fields with through, but
# through is the only way to set to_field currently. We do want to make
# sure these methods are ready if the ability to use .add or .remove with
# to_field relations is added some day.
def test_add(self):
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Ryan Briscoe>"]
)
# Yikes - barney is going to drive...
self.car.drivers._add_items('car', 'driver', self.unused_driver)
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Barney Gumble>", "<Driver: Ryan Briscoe>"]
)
def test_add_null(self):
nullcar = Car.objects.create(make=None)
with self.assertRaises(ValueError):
nullcar.drivers._add_items('car', 'driver', self.unused_driver)
def test_add_related_null(self):
nulldriver = Driver.objects.create(name=None)
with self.assertRaises(ValueError):
self.car.drivers._add_items('car', 'driver', nulldriver)
def test_add_reverse(self):
car2 = Car.objects.create(make="Honda")
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>"]
)
self.driver.car_set._add_items('driver', 'car', car2)
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>", "<Car: Honda>"],
ordered=False
)
def test_add_null_reverse(self):
nullcar = Car.objects.create(make=None)
with self.assertRaises(ValueError):
self.driver.car_set._add_items('driver', 'car', nullcar)
def test_add_null_reverse_related(self):
nulldriver = Driver.objects.create(name=None)
with self.assertRaises(ValueError):
nulldriver.car_set._add_items('driver', 'car', self.car)
def test_remove(self):
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Ryan Briscoe>"]
)
self.car.drivers._remove_items('car', 'driver', self.driver)
self.assertQuerysetEqual(
self.car.drivers.all(), [])
def test_remove_reverse(self):
self.assertQuerysetEqual(
self.driver.car_set.all(), |
from config | import configs
from onelove import create_app
config = | configs['development']
app = create_app(config)
|
# -*- coding: utf-8 -*-
import logging
from flask import Flask, redirect, request, jsonify, render_template, session
import requests
import fbvoting.pagebuilders.buildhome
import fbvoting.pagebuilders.buildoverview
import fbvoting.pagebuilders.buildprofile
import fbvoting.pagebuilders.buildfriends
import fbvoting.pagebuilders.buildrank
import fbvoting.pagebuilders.buildvote
import fbvoting.pagebuilders.builderrors
import fbvoting.pagebuilders.ajaxfeedback
import fbvoting.pagebuilders.jsonchart
import fbvoting.pagebuilders.commons
from fbvoting.mylogging import report, report_view
from fbvoting.apis.youtube import youtube_search
from fbvoting.lib import add_GET_params, url_for, as_full_url
from fbvoting.serverutils import refresh_token
from fbvoting.serverutils import configure_app
from fbvoting.admininterface import activate_admin_interface
from fbvoting.apis.fb import store_oauth_token
logger = logging.getLogger(__name__)
requests = requests.session()
app, debugged_app = configure_app(Flask(__name__))
route = lambda url: app.route(url, methods=['GET', 'POST'])
activate_admin_interface(route)
#### HANDLING LOGIN ####
@route('/auth')
def store_auth_token():
return store_oauth_token(request.form.get('token'))
#### PAGES ####
@route('/')
@report_view
def index():
return fbvoting.pagebuilders.buildhome.build_home()
@route('/ajax/intro')
@report_view
def home_intro():
return fbvoting.pagebuilders.buildhome.build_intro()
@route('/profile')
@refresh_token
@report_view
def egoboost():
return fbvoting.pagebuilders.buildprofile.build_profile()
@route('/friends')
@refresh_token
@report_view
def friends():
return fbvoting.pagebuilders.buildfriends.build_friends()
@route('/votes')
@report_view
@refresh_token
def overview():
return fbvoting.pagebuilders.buildoverview.build_overview()
@route('/votes/<category>')
@refresh_token
@report_view
def vote(category):
return fbvoting.pagebuilders.buildvote.build_vote(category)
@app.route('/savevote', methods=["POST"])
def savevote():
parameters = fbvoting.pagebuilders.buildvote.savevote(request.form)
url = request.args.get('next', url_for('overview'))
if not url.startswith('https'):
url = as_full_url(url)
url = add_GET_params(url, parameters)
if 'token' | in request.args:
url = add_GET_params(url, {'token': request.args['token']} )
report.mark('saved-vote')
return redirect(url)
@route('/recommendation')
@refresh_token
@report_view
def recommendation():
return fbvoting.pagebuilders.buildrank.build_recommendation_overview()
@route('/chart')
@refresh_token
@report_view
def chart():
return fbvoting.pagebuilders.buildrank.build_chart_overview()
@route('/chart/<category>')
@refresh_token
@report_view
def category_chart(ca | tegory):
if request.args.get('query'):
return fbvoting.pagebuilders.buildrank.build_category_chart_from_query(category, request.args.get('query'))
else:
return fbvoting.pagebuilders.buildrank.build_category_chart(category,
int(request.args.get('page', 0)),
playfirst=bool(request.args.get('playfirst'))
)
@route('/recommendation/<category>')
@refresh_token
@report_view
def category_recommendation(category):
page = int(request.args.get('page', 0))
return fbvoting.pagebuilders.buildrank.build_category_recommendation(category, page,
playfirst=bool(request.args.get('playfirst')))
@route('/about')
@report_view
def about():
data = fbvoting.pagebuilders.commons.get_base_data()
data['active_section'] = 'about'
data['activate_fb'] = False
return render_template('about.html', **data)
#### ajax ####
@route('/ajax/check-token/')
def check_token_in_cookies():
return jsonify({'results': 'oauth_token' in session})
@route('/ajax/musicbrainz/log/update')
def musicbrainz_logger():
fbvoting.apis.musicbrainz.log_update()
return "OK\n"
@route('/ajax/musicbrainz/check/')
def musicbrainz_check_if_exist():
return jsonify({'results': fbvoting.pagebuilders.buildvote.check_with_suggestion(request.form)})
@route('/ajax/musicbrainz/search/artist')
def musicbrainz_search_artist():
query = request.args.get('q','')
return jsonify( {'query': query, 'suggestions':
fbvoting.apis.musicbrainz.search_artists(query)
} )
@route('/ajax/musicbrainz/search/song')
def musicbrainz_search_song():
query = request.args.get('q','')
return jsonify( {'query': query, 'suggestions':
fbvoting.apis.musicbrainz.search_songs(
query,
request.args.get('artist'),
category=request.args.get('category','')
)
} )
@route('/ajax/youtube/search')
def ajax_youtube_search():
return jsonify({ 'results': youtube_search(
request.args['q'],
max_results=request.args.get('max-results', 4)
) } )
@route('/ajax/rank/<rank_type>/<category>')
def ajax_ranks(rank_type, category):
return jsonify({
'results': fbvoting.pagebuilders.jsonchart.get_ranks(rank_type, category,
page=int(request.args.get('page', 0)))
})
@route('/feedback/put/<category>/<song>')
def feedback(category, song):
return fbvoting.pagebuilders.ajaxfeedback.put_feedback(category, song, request.args)
@route('/feedback/get/<category>/<song>')
def get_rating(category, song):
return fbvoting.pagebuilders.ajaxfeedback.get_rating(category, song)
## ERROR PAGES ##
for error_code in (404, 500):
app.error_handler_spec[None][error_code] = lambda _ : fbvoting.pagebuilders.builderrors.build_error(error_code)
###############
logger.info("FBVoting server is now ready.")
if fbvoting.conf.DEBUG:
logger.warn("We are in DEBUG mode.")
if __name__ == '__main__':
app.run(host='0.0.0.0', port=fbvoting.conf.PORT)
|
#
# Copyright (C) 2007, One Laptop Per Child
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
import logging
import dbus
from dbus.gobject_service import ExportedGObject
import base64
SERVICE = "org.laptop.WebActivity"
IFACE = SERVICE
PATH = "/org/laptop/WebActivity"
_logger = logging.getLogger('messenger')
class Messenger(ExportedGObject):
def __init__(self, tube, is_initiator, model):
ExportedGObject.__init__(self, tube, PATH)
self.tube = tube
self.is_initiator = is_initiator
self.members = []
self.entered = False
self.model = model
self.bus_name = None
self.tube.watch_participants(self.participant_change_cb)
def participant_change_cb(self, added, removed):
_logger.debug('Participants change add=%s rem=%s'
%(added, removed))
for handle, bus_name in added:
_logger.debug('Add member handle=%s bus_name=%s'
%(str(handle), str(bus_name)))
self.members.append(bus_name)
for handle in removed:
_logger.debug('Remove member %r', handle)
try:
self.members.remove(self.tube.participants[handle])
except ValueError:
_logger.debug('Remove member %r - already absent', handle)
if not self.entered:
self.tube.add_signal_receiver(self._add_link_receiver, '_add_link',
IFACE, path=PATH,
sender_keyword='sender',
byte_arrays=True)
self.bus_name = self.tube.get_unique_name()
if self.is_initiator:
_logger.debug('Initialising a new shared browser, I am %s .'
%self.tube.get_unique_name())
else:
# sync with other members
_logger.debug('Joined I am %s .'%self.bus_name)
for member in self.members:
if member != self.bus_name:
_logger.debug('Get info from %s' %member)
self.tube.get_object(member, PATH).sync_with_members(
self.model.get_links_ids(), dbus_interface=IFACE,
reply_handler=self.reply_sync, error_handler=lambda
e:self.error_sync(e, 'transfering file'))
self.entered = True
def reply_sync(self, a_ids, sender):
a_ids.pop()
for link in self.model.data['shared_links']:
if link['hash'] not in a_ids:
self.tube.get_object(sender, PATH).send_link(
link['hash'], link['url'], link['title'], link['color'],
link['owner'], link['thumb'], link['timestamp'])
def error_sync(self, e, when):
_logger.error('Error %s: %s'%(when, e))
@dbus.service.method(dbus_interface=IFACE, in_signature='as',
out_signature='ass' | , sender_keyword='sender')
def sync_with_members(self, b_ids, sender=None):
'''Sync with members '''
b_ids.pop()
# links the caller wa | nts from me
for link in self.model.data['shared_links']:
if link['hash'] not in b_ids:
self.tube.get_object(sender, PATH).send_link(
link['hash'], link['url'], link['title'], link['color'],
link['owner'], link['thumb'], link['timestamp'])
a_ids = self.model.get_links_ids()
a_ids.append('')
# links I want from the caller
return (a_ids, self.bus_name)
@dbus.service.method(dbus_interface=IFACE, in_signature='ssssssd',
out_signature='')
def send_link(self, identifier, url, title, color, owner, buf, timestamp):
'''Send link'''
a_ids = self.model.get_links_ids()
if identifier not in a_ids:
thumb = base64.b64decode(buf)
self.model.add_link(url, title, thumb, owner, color, timestamp)
@dbus.service.signal(IFACE, signature='sssssd')
def _add_link(self, url, title, color, owner, thumb, timestamp):
'''Signal to send the link information (add)'''
_logger.debug('Add Link: %s '%url)
def _add_link_receiver(self, url, title, color, owner, buf, timestamp,
sender=None):
'''Member sent a link'''
handle = self.tube.bus_name_to_handle[sender]
if self.tube.self_handle != handle:
thumb = base64.b64decode(buf)
self.model.add_link(url, title, thumb, owner, color, timestamp)
_logger.debug('Added link: %s to linkbar.'%(url))
|
#
# Copyright 2013/2015 Matthew Nottingham
#
# This file is part of GroundStation
#
# GroundStation is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GroundStation is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR | PURPOSE. | See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GroundStation; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import ephem
import GnuRadio2
class ReceiverEvent():
def __init__(self, time, duration, frequency, bandwidth):
self.freq = frequency
self.duration = 60.0 * duration # Lets keep it in seconds.
self.bandwidth = bandwidth
self.channels = []
self.timerInterval = (time - ephem.now()) * 24.0 * 60.0 * 60.0 * 1000
self.timer = QtCore.QTimer(self)
self.timer.timeout.connect(self.startReceiving)
self.timer.setSingleShot(True)
self.timer.start(self.timerInterval)
def addChannel(self, channel):
self.channels.append(channel)
def startReceiving(self):
self.timer.stop()
self.timer.timeout.connect(self.stopReceiving)
self.timer.start(self.duration * 1000)
for c in self.channels:
c.startCountdown()
self.rx = GnuRadio2.Receiver(frequency, bandwidth)
self.rx.start()
def stopReceiving(self):
# Loop through all the channels and make sure they're dead?...
self.rx.stop()
|
from discord.ext import commands
from math import floor
from datetime import datetime
from util.arguments import Arguments
from shlex import split
class Vorago:
def __init__(self, bot):
self.bot = bot
@commands.command(aliases=['rago'],
description='Shows the current rotation of Vorago and what the next one will be.')
async def vorago(self, *, msg=''):
parser = Arguments(allow_abbrev=False, prog='vorago')
parser.add_argument('-H', '--hard-mode', action='store_true', help='Shows hardmode rotations.')
try:
args = parser.parse_args(split(msg))
except SystemExit:
await self.bot.say('```%s```' % parser.format_help())
return
except Exception as e:
await self.bot.say('```%s```' % str(e))
return
await self.execute(args)
async def execute(self, args):
rotations = (
{
'type': 'Ceiling Collapse',
'unlock': 'Torso of Omens',
'p10': ('Team Split', 'Green Bomb'),
'p11': ('Team Split', 'Vitalis')
},
{
'type': 'Scopulus',
'unlock': 'Helm of Omens',
'p10': ('Purple Bomb', 'Team Split'),
'p11': ('Purple Bomb', 'Vitalis')
},
{
'type': 'Vitalis',
'unlock': 'Legs of Omens',
'p10': ('Vitalis', 'Purple Bomb'),
'p11': ('Vitalis', 'bleeds')
},
{
'type': 'Green Bomb',
'unlock': 'Boots of Omens',
'p10': ('Green Bomb', 'Vitalis'),
'p11': ('Green Bomb', 'Team Split')
},
{
'type': 'Team Split',
'unlock': 'Maul of Omens',
'p10': ('Team Split', 'Team Split'),
'p11': ('Team Split', 'Purple Bomb')
},
{
'type': 'The End',
'unlock | ': 'Gloves of Omens',
'p10': ('Purple Bomb', 'Bleeds'),
'p11': ('Purple Bomb', 'Vitalis')
}
)
ms = round(datetime.utcnow().timestamp() * 1000)
current = floor((((floor(floor(ms / 1000) / (24 * 60 * 60))) - 6) % (7 * len(rotations))) / 7)
days_until = 7 - ((floor((ms / 1000) / (24 * 60 * 60))) - 6) % (7 * len(rotations)) % 7
next = | current + 1 if current + 1 < len(rotations) else 0
m = '**Curent Rotation**: %s.\n' % rotations[current]['type']
m += '**Next Rotation**: %s in `%s` day%s.' % (rotations[next]['type'], days_until,
's' if days_until == 1 else '')
# Adding hard mode information
if args.hard_mode:
m += '\n\n**__Hard mode__**:\n'
m += '**Phase 10**: %s + %s.\n' % rotations[current]['p10']
m += '**Phase 11**: %s + %s.\n' % rotations[current]['p11']
m += '**Unlock**: %s' % rotations[current]['unlock']
await self.bot.say(m)
def setup(bot):
bot.add_cog(Vorago(bot))
|
## This file is part of Scapy
## See http://www.secdev.org/projects/scapy for more informations
## Copyright (C) Philippe Biondi <phil@secdev.org>
## This program is published under a GPLv2 license
import re
from dadict import DADict
from error import log_loading
############
## Consts ##
############
ETHER_ANY = "\x00"*6
ETHER_BROADCAST = "\xff"*6
ETH_P_ALL = 3
ETH_P_IP = 0x800
ETH_P_ARP = 0x806
ETH_P_IPV6 = 0x86dd
# From net/if_arp.h
ARPHDR_ETHER = 1
ARPHDR_METRICOM = 23
ARPHDR_PPP = 512
ARPHDR_LOOPBACK = 772
ARPHDR_TUN = 65534
# From net/ipv6.h on Linux (+ Additions)
IPV6_ADDR_UNICAST = 0x01
IPV6_ADDR_MULTICAST = 0x02
IPV6_ADDR_CAST_MASK = 0x0F
IPV6_ADDR_LOOPBACK = 0x10
IPV6_ADDR_GLOBAL = 0x00
IPV6_ADDR_LINKLOCAL = 0x20
IPV6_ADDR_SITELOCAL = 0x40 # deprecated since Sept. 2004 by RFC 3879
IPV6_ADDR_SCOPE_MASK = 0xF0
#IPV6_ADDR_COMPATv4 = 0x80 # deprecated; i.e. ::/96
#IPV6_ADDR_MAPPED = 0x1000 # i.e.; ::ffff:0.0.0.0/96
IPV6_ADDR_6TO4 = 0x0100 # Added to have more specific info (should be 0x0101 ?)
IPV6_ADDR_UNSPECIFIED = 0x10000
MTU = 1600
# file parsing to get some values :
def load_protocols(filename):
spaces = re.compile("[ \t]+|\n")
dct = DADict(_name=filename)
try:
| for l in open(filename):
try:
shrp = l.find("#")
if shrp >= 0:
l = l[:shrp]
l = l.strip()
if not l:
continue
lt = tuple(re.split(spaces, l))
| if len(lt) < 2 or not lt[0]:
continue
dct[lt[0]] = int(lt[1])
except Exception,e:
log_loading.info("Couldn't parse file [%s]: line [%r] (%s)" % (filename,l,e))
except IOError:
log_loading.info("Can't open /etc/protocols file")
return dct
IP_PROTOS=load_protocols("/etc/protocols")
def load_ethertypes(filename):
spaces = re.compile("[ \t]+|\n")
dct = DADict(_name=filename)
try:
f=open(filename)
for l in f:
try:
shrp = l.find("#")
if shrp >= 0:
l = l[:shrp]
l = l.strip()
if not l:
continue
lt = tuple(re.split(spaces, l))
if len(lt) < 2 or not lt[0]:
continue
dct[lt[0]] = int(lt[1], 16)
except Exception,e:
log_loading.info("Couldn't parse file [%s]: line [%r] (%s)" % (filename,l,e))
f.close()
except IOError,msg:
pass
return dct
ETHER_TYPES=load_ethertypes("/etc/ethertypes")
def load_services(filename):
spaces = re.compile("[ \t]+|\n")
tdct=DADict(_name="%s-tcp"%filename)
udct=DADict(_name="%s-udp"%filename)
try:
f=open(filename)
for l in f:
try:
shrp = l.find("#")
if shrp >= 0:
l = l[:shrp]
l = l.strip()
if not l:
continue
lt = tuple(re.split(spaces, l))
if len(lt) < 2 or not lt[0]:
continue
if lt[1].endswith("/tcp"):
tdct[lt[0]] = int(lt[1].split('/')[0])
elif lt[1].endswith("/udp"):
udct[lt[0]] = int(lt[1].split('/')[0])
except Exception,e:
log_loading.warning("Couldn't file [%s]: line [%r] (%s)" % (filename,l,e))
f.close()
except IOError:
log_loading.info("Can't open /etc/services file")
return tdct,udct
TCP_SERVICES,UDP_SERVICES=load_services("/etc/services")
class ManufDA(DADict):
def fixname(self, val):
return val
def _get_manuf_couple(self, mac):
oui = ":".join(mac.split(":")[:3]).upper()
return self.__dict__.get(oui,(mac,mac))
def _get_manuf(self, mac):
return self._get_manuf_couple(mac)[1]
def _get_short_manuf(self, mac):
return self._get_manuf_couple(mac)[0]
def _resolve_MAC(self, mac):
oui = ":".join(mac.split(":")[:3]).upper()
if oui in self:
return ":".join([self[oui][0]]+ mac.split(":")[3:])
return mac
def load_manuf(filename):
try:
manufdb=ManufDA(_name=filename)
for l in open(filename):
try:
l = l.strip()
if not l or l.startswith("#"):
continue
oui,shrt=l.split()[:2]
i = l.find("#")
if i < 0:
lng=shrt
else:
lng = l[i+2:]
manufdb[oui] = shrt,lng
except Exception,e:
log_loading.warning("Couldn't parse one line from [%s] [%r] (%s)" % (filename, l, e))
except IOError:
#log_loading.warning("Couldn't open [%s] file" % filename)
pass
return manufdb
#####################
## knowledge bases ##
#####################
class KnowledgeBase:
def __init__(self, filename):
self.filename = filename
self.base = None
def lazy_init(self):
self.base = ""
def reload(self, filename = None):
if filename is not None:
self.filename = filename
oldbase = self.base
self.base = None
self.lazy_init()
if self.base is None:
self.base = oldbase
def get_base(self):
if self.base is None:
self.lazy_init()
return self.base
|
import numpy as np
import scipy.spatial.distance as dist
from matplotlib.backends.backend_pdf import PdfPages
import matplotlib.pyplot as plt
import matplotlib.lines as mplines
import scipy.cluster.hierarchy as clust
import os
def kabsch(coord, ref,app):
C = np.dot(np.transpose(coord), ref)
V, S, W = np.linalg.svd(C)
#print("VSW", V,S,W)
d = (np.linalg.det(V) * np.linalg.det(W)) < 0.0
if d:
S[-1] = -S[-1]
V[:,-1] = -V[:,-1]
# Create Rotation matrix U
U = np.dot(V, W)
# Rotate coord
kcoord = np.dot(app, U)
return kcoord
def rmsd(coord, ref):
sd = (coord - ref)**2
ssd = np.mean(sd)
rmsd = np.sqrt(ssd)
return rmsd
#colors = [(1,.4,.4),(.4,.4,1),(.4,1,.4),(1,.4,1),(.4,1,1),(1,.7,.4),(1,.4,.7)]
colors = [(0,.6,.6),(1,0,.5),(1,1,.2),(1,1,.2),(.8,.4,0),(.6,1,1),(.8,0,.8),(0,.9,0),(0,.6,.6),(1,0,.5),(1,1,.2),(1,1,.2),(.8,.4,0),(.6,1,1),(.8,0,.8),(0,.9,0),(0,.6,.6),(1,0,.5),(1,1,.2),(1,1,.2),(.8,.4,0),(.6,1,1),(.8,0,.8),(0,.9,0)]
def writepym(i,coords,radii):
pymfilename= i + ".pym"
pymfile=open(pymfilename, "w")
pymfile.write('from pymol.cgo import *'+ '\n')
pymfile.write('from pymol import cmd'+ '\n')
pymfile.write('from pymol.vfont import plain' + '\n' + 'data={}' + '\n' + "curdata=[]" + '\n')
#print(x for x in enumerate(coords))
for item in enumerate(coords):
#print(colors[item[0]][0],colors[item[0]][1], colors[item[0]][2])
#print(colors[item[0]][0])
#print(item)
pymfile.write("k='Protein" + str(item[0]) + " geometry'" +'\n'+ "if not k in data.keys():" +'\n'+" data[k]=[]"+'\n'+'curdata=['+'\n'+'COLOR,' + str(colors[item[0]%8][0])+","+str(colors[item[0]%8][1])+","+ str(colors[item[0]%8][2])+"," + '\n' + 'SPHERE,'+ str(item[1][0])+ ','+ str(item[1][1])+',' + str(item[1][2])+','+ str(radii[item[0]]) +'\n')
pymfile.write("]"+"\n"+"k='Protein" + str(item[0]) + " geometry'" + '\n' + "if k in data.keys():" + "\n" + " data[k]= data[k]+curdata"+'\n'+"else:" +'\n' +" data[k]= curdata"+"\n")
pymfile.write("for k in data.keys():" + "\n" + " cmd.load_cgo(data[k], k, 1)" +"\n"+ "data= {}")
pymfile.close()
files=os.listdir(".")
#refs=[x for x in files if x.endswith('1k90_refcoords.npy')]
np.set_printoptions(threshold=1000000)
pdf = PdfPages("corrected.pdf")
# Read the pairwise distance matrix (discard row and column labels).
#fname = "corrected-res.csv"
distmat = np.load("rmsdmat.npy")
# Calculate the mean of the pairwise similarities.
ii = np.tril_indices(distmat.shape[0], -1)
pwise = distmat[ii]
mdist = np.mean(pwise)
print(mdist)
#print(pwise)
# Generate a historgram of the pairwise similarities.
plt.clf()
plt.hist(pwise, 20, color='lightblue')
plt.xlabel("Similarity")#, size=17)
plt.ylabel("Frequency")#, size=17)
pdf.savefig()
# Do the clustering
h = clust.average(distmat)
# Plot the dendrogram
plt.figure(figsize=(16,10))
plt.figure(linewidth=100.0)
plt.clf()
ax = plt.axes()
for pos in 'right','bottom','top':
ax.spines[pos].set_color('none')
ax.xaxis.set_ticks_position('none')
ax.yaxis.set_ticks_position('left')
ax.spines['left'].set_position(('outward', 10))
x=clust.dendrogram(h)
#plt.getp(x)
pdf.savefig()
pdf.close()
#ll = clust.leaves_list(h)
#print(len(ll))
tree = clust.to_tree(h)
#print(tree)
#ctree = clust.cut_tree(h, height = 150)
#print(np.shape(ctree))
ctree = clust.cut_tree(h, n_clusters = 2)
leaves = clust.leaves_list(h)
#print(np.shape(ctree))
ctree = np.reshape(ctree, len(leaves))
#print(np.shape(leaves))
#print(np.shape(ctree))
#print(np.vstack((leaves,ctree)))
files=os.listdir(".")
files=[x for x in files if x.startswith('tetramer_model_')]
print(le | n(files))
n_clusters = np.max(ctree) + 1
#print(n_clusters)
clusters = [[] for i in range(n_clusters)]
CCC = np.array([2,3,10,11,18,19])
AC3 = np.array([0,2,3,8,10,11,16,18,19])
#MDFG = np.array([4,5,6,7,12,13,14,15,20,21,22,23])
##actually MD
MDFG = np.array([4,5,12,13,20,21])
for i, leaf in enumerate(leaves):
cluster = ctree[i]
structure = np.load("goodmodels0.npy")[i]
# print(len(clusters))
# pri | nt(cluster)
clusters[cluster].append(structure)
rmsdlist = []
coordlist = []
for clust in clusters:
l = len(clust)
av = round(l / 2, -1)
av = int(av)
crmsdlist = []
alignedcoordlist = []
for o,st in enumerate(clust):
strmsdlist = []
stCst = st[CCC]
stC = stCst - np.mean(stCst, axis = 0)
st3 = st - np.mean(st, axis = 0)
#ik = i[np.array([2,7,12])]
#ikm = ik - np.mean(ik, axis = 0)
#im = i - np.mean(i, axis = 0)
#print(i)
for st2 in clust:
st2Cst = st2[CCC]
st2C = st2Cst - np.mean(st2Cst, axis = 0)
st23 = st2 - np.mean(st2Cst, axis = 0)
k = kabsch(st2C, stC, st23)
k = k - np.mean(k, axis =0)
#r2 = rmsd(k[np.array([3,4,8,9,13,14])], st3[np.array([3,4,8,9,13,14])])
r = rmsd(k, st3)
#print(r, r2)
#r = rmsd(st, k)
strmsdlist.append(r)
if o == av:
alignedcoordlist.append(k)
#print(r)
#jm = j - np.mean(j, axis = 0)
#jk = j[np.array([2,7,12])]
#jkm = jk - np.mean(jk, axis = 0)
#k = kabsch(jkm, ikm, jm)
#k = k - np.mean(k, axis =0)
#r = rmsd(k[np.array([3,4,8,9,13,14])], im[np.array([3,4,8,9,13,14])])
#r2 = rmsd(k[np.array([2,7,12])], im[np.array([2,7,12])])
#print(i)
#print(r, r2)
#rmsdlist1.append(r)
crmsdlist.append(strmsdlist)
#print(alignedcoordlist)
rmsdlist.append(crmsdlist)
coordlist.append(alignedcoordlist)
radii = np.load("radii.npy")
clustcoords = []
for i,item in enumerate(coordlist):
print(np.shape(item))
mean = np.mean(item, axis = 0)
med = round(len(item)/2)
writepym("cluster_mean_"+str(i), mean, radii)
#writepym("cluster_med_"+str(i), item[med],radii)
#print(item))
np.save("cluster_"+str(i)+".npy", item)
#print("std ", np.std(item, axis = 0))
clustcoords.append(mean)
np.save("clust_av_coordsn.npy",clustcoords)
m = []
for cl in rmsdlist:
mean = np.mean(cl)
m.append(mean)
print(mean)
print(np.mean(m))
|
turn None
return cls(_type, _name)
class Git():
REPO_DIR = os.getcwd()
REPO_TOP_DIR = os.getcwd()
REF_MAP = {}
REV_HEAD = None
# local uncommitted changes
LUC_SHA1 = "0000000000000000000000000000000000000000"
# local changes checked
LCC_SHA1 = "0000000000000000000000000000000000000001"
@staticmethod
def available():
return GitProcess.GIT_BIN is not None
@staticmethod
def run(args, text=None):
return GitProcess(Git.REPO_DIR, args, text)
@staticmethod
def checkOutput(args, text=None):
process = Git.run(args, text)
data = process.communicate()[0]
if process.returncode != 0:
return None
return data
@staticmethod
def repoTopLevelDir(directory):
"""get top level repo directory
if @directory is not a repository, None returned"""
if not os.path.isdir(directory):
return None
if not os.path.exists(directory):
return None
args = ["rev-parse", "--show-toplevel"]
process = GitProcess(directory, args)
realDir = process.communicate()[0]
if process.returncode != 0:
return None
return realDir.decode("utf-8").replace("\n", "")
@staticmethod
def refs():
args = ["show-ref", "-d"]
data = Git.checkOutput(args)
if not data:
return None
lines = data.decode("utf-8").split('\n')
refMap = defaultdict(list)
for line in lines:
ref = Ref.fromRawString(line)
if not ref:
continue
sha1 = line[0:40]
bisect.insort(refMap[sha1], ref)
return refMap
@staticmethod
def revHead():
args = ["rev-parse", "HEAD"]
data = Git.checkOutput(args)
if not data:
return None
return data.decode("utf-8").rstrip('\n')
@staticmethod
def branches():
args = ["branch", "-a"]
data = Git.checkOutput(args)
if not data:
return None
return data.decode("utf-8").split('\n')
@staticmethod
def commitSummary(sha1):
fmt = "%h%x01%s%x01%ad%x01%an%x01%ae"
args = ["show", "-s",
"--pretty=format:{0}".format(fmt),
"--date=short", sha1]
data = Git.checkOutput(args)
if not data:
return None
parts = data.decode("utf-8").split("\x01")
return {"sha1": parts[0],
| "subject": parts[1],
"date": parts[2],
"author": parts[3],
"email": parts[4]}
@staticmethod
def abbrevCommit(sha1):
args = ["show", "-s", "--pretty=format:%h", sha1]
data = Git.checkOutput(args)
if not data:
return sha1[:7]
return data.rstrip().decode("utf-8")
@staticmethod
def commitSubject(sha1):
args = ["s | how", "-s", "--pretty=format:%s", sha1]
data = Git.checkOutput(args)
return data
@staticmethod
def commitRawDiff(sha1, filePath=None, gitArgs=None):
if sha1 == Git.LCC_SHA1:
args = ["diff-index", "--cached", "HEAD"]
elif sha1 == Git.LUC_SHA1:
args = ["diff-files"]
else:
args = ["diff-tree", "-r", "--root", sha1]
args.extend(["-p", "--textconv", "--submodule",
"-C", "--no-commit-id", "-U3"])
if gitArgs:
args.extend(gitArgs)
if filePath:
args.append("--")
args.append(filePath)
data = Git.checkOutput(args)
if not data:
return None
return data
@staticmethod
def externalDiff(branchDir, commit, path=None, tool=None):
args = ["difftool", "--no-prompt"]
if commit.sha1 == Git.LUC_SHA1:
pass
elif commit.sha1 == Git.LCC_SHA1:
args.append("--cached")
else:
args.append("{0}^..{0}".format(commit.sha1))
if tool:
args.append("--tool={}".format(tool))
if path:
args.append("--")
args.append(path)
cwd = branchDir if branchDir else Git.REPO_DIR
process = GitProcess(cwd, args)
@staticmethod
def conflictFiles():
args = ["diff", "--name-only",
"--diff-filter=U",
"-no-color"]
data = Git.checkOutput(args)
if not data:
return None
return data.rstrip(b'\n').decode("utf-8").split('\n')
@staticmethod
def gitDir():
args = ["rev-parse", "--git-dir"]
data = Git.checkOutput(args)
if not data:
return None
return data.rstrip(b'\n').decode("utf-8")
@staticmethod
def gitPath(name):
dir = Git.gitDir()
if not dir:
return None
if dir[-1] != '/' and dir[-1] != '\\':
dir += '/'
return dir + name
@staticmethod
def mergeBranchName():
"""return the current merge branch name"""
# TODO: is there a better way?
path = Git.gitPath("MERGE_MSG")
if not os.path.exists(path):
return None
name = None
with open(path, "r") as f:
line = f.readline()
m = re.match("Merge.* '(.*)'.*", line)
if m:
name = m.group(1)
# likely a sha1
if name and re.match("[a-f0-9]{7,40}", name):
data = Git.checkOutput(["branch", "--remotes",
"--contains", name])
if data:
data = data.rstrip(b'\n')
if data:
# might have more than one branch
name = data.decode("utf-8").split('\n')[0].strip()
return name
@staticmethod
def resolveBy(ours, path):
args = ["checkout",
"--ours" if ours else "--theirs",
path]
process = Git.run(args)
process.communicate()
if process.returncode != 0:
return False
args = ["add", path]
process = Git.run(args)
process.communicate()
return True if process.returncode == 0 else False
@staticmethod
def undoMerge(path):
"""undo a merge on the @path"""
if not path:
return False
args = ["checkout", "-m", path]
process = Git.run(args)
process.communicate()
return process.returncode == 0
@staticmethod
def hasLocalChanges(branch, cached=False):
# A remote branch should never have local changes
if branch.startswith("remotes/"):
return False
dir = Git.branchDir(branch)
# only branch checked out can have local changes
if not dir:
return False
args = ["diff", "--quiet"]
if cached:
args.append("--cached")
process = GitProcess(dir, args)
process.communicate()
return process.returncode == 1
@staticmethod
def branchDir(branch):
"""returned the branch directory if it checked out
otherwise returned an empty string"""
if not branch or branch.startswith("remotes/"):
return ""
# Use the repo dir directly
# since we are unable to get two detached branch
if branch.startswith("(HEAD detached"):
return Git.REPO_DIR
args = ["worktree", "list"]
data = Git.checkOutput(args)
if not data:
return ""
worktree_re = re.compile(
r"(\S+)\s+[a-f0-9]+\s+(\[(\S+)\]|\(detached HEAD\))$")
worktrees = data.rstrip(b'\n').decode("utf8").split('\n')
for wt in worktrees:
m = worktree_re.fullmatch(wt)
if not m:
print("Oops! Wrong format for worktree:", wt)
elif m.group(3) == branch:
return m.group(1)
return ""
@staticmethod
def generateDiff(sha1, filePath):
data = Git.commitRawDiff(sha1)
if not data:
return False
w |
aise NotImplementedError()
#
# The following code defines write methods for each of the
# primitive protobuf types.
#
class DoubleWriteMethod(WriteMethod):
"""Method which writes a proto double value."""
def params(self) -> List[Tuple[str, str]]:
return [('double', 'value')]
def _encoder_fn(self) -> str:
return 'WriteDouble'
class PackedDoubleWriteMethod(PackedWriteMethod):
"""Method which writes a packed list of doubles."""
def params(self) -> List[Tuple[str, str]]:
return [('std::span<const double>', 'values')]
def _encoder_fn(self) -> str:
return 'WritePackedDouble'
class FloatWriteMethod(WriteMethod):
"""Method which writes a proto float value."""
def params(self) -> List[Tuple[str, str]]:
return [('float', 'value')]
def _encoder_fn(self) -> str:
return 'WriteFloat'
class PackedFloatWriteMethod(PackedWriteMethod):
"""Method which writes a packed list of floats."""
def params(self) -> List[Tuple[str, str]]:
return [('std::span<const float>', 'values')]
def _encoder_fn(self) -> str:
return 'WritePackedFloat'
class Int32WriteMethod(WriteMethod):
"""Method which writes a proto int32 value."""
def params(self) -> List[Tuple[str, str]]:
return [('int32_t', 'value')]
def _encoder_fn(self) -> str:
return 'WriteInt32'
class PackedInt32WriteMethod(PackedWriteMethod):
| """Method which writes a packed list of int32."""
def params(self) -> List[Tuple[str, str]]:
return [('std::span<const int32_t>', 'values')]
def _encoder_fn(self) -> str:
return 'WritePackedInt32'
class Sint32 | WriteMethod(WriteMethod):
"""Method which writes a proto sint32 value."""
def params(self) -> List[Tuple[str, str]]:
return [('int32_t', 'value')]
def _encoder_fn(self) -> str:
return 'WriteSint32'
class PackedSint32WriteMethod(PackedWriteMethod):
"""Method which writes a packed list of sint32."""
def params(self) -> List[Tuple[str, str]]:
return [('std::span<const int32_t>', 'values')]
def _encoder_fn(self) -> str:
return 'WritePackedSint32'
class Sfixed32WriteMethod(WriteMethod):
"""Method which writes a proto sfixed32 value."""
def params(self) -> List[Tuple[str, str]]:
return [('int32_t', 'value')]
def _encoder_fn(self) -> str:
return 'WriteSfixed32'
class PackedSfixed32WriteMethod(PackedWriteMethod):
"""Method which writes a packed list of sfixed32."""
def params(self) -> List[Tuple[str, str]]:
return [('std::span<const int32_t>', 'values')]
def _encoder_fn(self) -> str:
return 'WritePackedSfixed32'
class Int64WriteMethod(WriteMethod):
"""Method which writes a proto int64 value."""
def params(self) -> List[Tuple[str, str]]:
return [('int64_t', 'value')]
def _encoder_fn(self) -> str:
return 'WriteInt64'
class PackedInt64WriteMethod(PackedWriteMethod):
"""Method which writes a proto int64 value."""
def params(self) -> List[Tuple[str, str]]:
return [('std::span<const int64_t>', 'values')]
def _encoder_fn(self) -> str:
return 'WritePackedInt64'
class Sint64WriteMethod(WriteMethod):
"""Method which writes a proto sint64 value."""
def params(self) -> List[Tuple[str, str]]:
return [('int64_t', 'value')]
def _encoder_fn(self) -> str:
return 'WriteSint64'
class PackedSint64WriteMethod(PackedWriteMethod):
"""Method which writes a proto sint64 value."""
def params(self) -> List[Tuple[str, str]]:
return [('std::span<const int64_t>', 'values')]
def _encoder_fn(self) -> str:
return 'WritePackedSint64'
class Sfixed64WriteMethod(WriteMethod):
"""Method which writes a proto sfixed64 value."""
def params(self) -> List[Tuple[str, str]]:
return [('int64_t', 'value')]
def _encoder_fn(self) -> str:
return 'WriteSfixed64'
class PackedSfixed64WriteMethod(PackedWriteMethod):
"""Method which writes a proto sfixed64 value."""
def params(self) -> List[Tuple[str, str]]:
return [('std::span<const int64_t>', 'values')]
def _encoder_fn(self) -> str:
return 'WritePackedSfixed4'
class Uint32WriteMethod(WriteMethod):
"""Method which writes a proto uint32 value."""
def params(self) -> List[Tuple[str, str]]:
return [('uint32_t', 'value')]
def _encoder_fn(self) -> str:
return 'WriteUint32'
class PackedUint32WriteMethod(PackedWriteMethod):
"""Method which writes a proto uint32 value."""
def params(self) -> List[Tuple[str, str]]:
return [('std::span<const uint32_t>', 'values')]
def _encoder_fn(self) -> str:
return 'WritePackedUint32'
class Fixed32WriteMethod(WriteMethod):
"""Method which writes a proto fixed32 value."""
def params(self) -> List[Tuple[str, str]]:
return [('uint32_t', 'value')]
def _encoder_fn(self) -> str:
return 'WriteFixed32'
class PackedFixed32WriteMethod(PackedWriteMethod):
"""Method which writes a proto fixed32 value."""
def params(self) -> List[Tuple[str, str]]:
return [('std::span<const uint32_t>', 'values')]
def _encoder_fn(self) -> str:
return 'WritePackedFixed32'
class Uint64WriteMethod(WriteMethod):
"""Method which writes a proto uint64 value."""
def params(self) -> List[Tuple[str, str]]:
return [('uint64_t', 'value')]
def _encoder_fn(self) -> str:
return 'WriteUint64'
class PackedUint64WriteMethod(PackedWriteMethod):
"""Method which writes a proto uint64 value."""
def params(self) -> List[Tuple[str, str]]:
return [('std::span<const uint64_t>', 'values')]
def _encoder_fn(self) -> str:
return 'WritePackedUint64'
class Fixed64WriteMethod(WriteMethod):
"""Method which writes a proto fixed64 value."""
def params(self) -> List[Tuple[str, str]]:
return [('uint64_t', 'value')]
def _encoder_fn(self) -> str:
return 'WriteFixed64'
class PackedFixed64WriteMethod(PackedWriteMethod):
"""Method which writes a proto fixed64 value."""
def params(self) -> List[Tuple[str, str]]:
return [('std::span<const uint64_t>', 'values')]
def _encoder_fn(self) -> str:
return 'WritePackedFixed64'
class BoolWriteMethod(WriteMethod):
"""Method which writes a proto bool value."""
def params(self) -> List[Tuple[str, str]]:
return [('bool', 'value')]
def _encoder_fn(self) -> str:
return 'WriteBool'
class BytesWriteMethod(WriteMethod):
"""Method which writes a proto bytes value."""
def params(self) -> List[Tuple[str, str]]:
return [('std::span<const std::byte>', 'value')]
def _encoder_fn(self) -> str:
return 'WriteBytes'
class StringLenWriteMethod(WriteMethod):
"""Method which writes a proto string value with length."""
def params(self) -> List[Tuple[str, str]]:
return [('const char*', 'value'), ('size_t', 'len')]
def _encoder_fn(self) -> str:
return 'WriteString'
class StringWriteMethod(WriteMethod):
"""Method which writes a proto string value."""
def params(self) -> List[Tuple[str, str]]:
return [('std::string_view', 'value')]
def _encoder_fn(self) -> str:
return 'WriteString'
class EnumWriteMethod(WriteMethod):
"""Method which writes a proto enum value."""
def params(self) -> List[Tuple[str, str]]:
return [(self._relative_type_namespace(), 'value')]
def body(self) -> List[str]:
line = 'return WriteUint32(' \
'{}, static_cast<uint32_t>(value));'.format(self.field_cast())
return [line]
def in_class_definition(self) -> bool:
return True
def _encoder_fn(self) -> str:
raise NotImplementedError()
# Mapping of protobuf field types to their method definitions.
PROTO_FIELD_WRITE_METHODS: Dict[int, List] = {
descriptor_pb2.FieldDescriptorProto.TYPE_DOUBLE:
[DoubleWriteMethod, PackedDoubleWriteMethod],
descriptor_pb2.FieldDescriptorProto.TYPE_FLOAT:
[FloatWriteMethod, |
#!/usr/bin/python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example sets a bid modifier for the mobile platform on given campaign.
To get campaigns, run get_campaigns.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
"""
from googleads import adwords
CAMPAIGN_ID = 'INSERT_CAMPAIGN | _ID_HERE'
BID_MODIFIER = '1.5'
def main(client, campaign_id, bid_modifier):
# Initialize appropriate service.
campaign_criterion_service = client.GetService(
'CampaignCriterionService', version | ='v201502')
# Create mobile platform.The ID can be found in the documentation.
# https://developers.google.com/adwords/api/docs/appendix/platforms
mobile = {
'xsi_type': 'Platform',
'id': '30001'
}
# Create campaign criterion with modified bid.
campaign_criterion = {
'campaignId': campaign_id,
'criterion': mobile,
'bidModifier': bid_modifier
}
# Create operations.
operations = [
{
'operator': 'SET',
'operand': campaign_criterion
}
]
# Make the mutate request.
result = campaign_criterion_service.mutate(operations)
# Display the resulting campaign criteria.
for campaign_criterion in result['value']:
print ('Campaign criterion with campaign id \'%s\' and criterion id \'%s\' '
'was updated with bid modifier \'%s\'.'
% (campaign_criterion['campaignId'],
campaign_criterion['criterion']['id'],
campaign_criterion['bidModifier']))
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage()
main(adwords_client, CAMPAIGN_ID, BID_MODIFIER)
|
# -*- coding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
#### | ##########################################################################
from openerp import models, fields
class StockMove(models.Model):
_inherit = 'stock.move'
mai | n_project_id = fields.Many2one('project.project',
string="Main Project")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.