code
stringlengths 1
199k
|
|---|
try:
theme = settings.get_theme()
except:
print "ERROR: File now needs to be run in the web2py environment in order to pick up which theme to build"
exit()
import os
import sys
import shutil
SCRIPTPATH = os.path.join(request.folder, "static", "scripts", "tools")
os.chdir(SCRIPTPATH)
sys.path.append("./")
import getopt
import jsmin, mergejs
import re
def mergeCSS(inputFilenames, outputFilename):
output = ""
for inputFilename in inputFilenames:
output += open(inputFilename, "r").read()
open(outputFilename, "w").write(output)
return outputFilename
def cleanline(theLine):
""" Kills line breaks, tabs, and double spaces """
p = re.compile("(\n|\r|\t|\f|\v)+")
m = p.sub("", theLine)
# Kills double spaces
p = re.compile("( )+")
m = p.sub(" ", m)
# Removes last semicolon before }
p = re.compile("(; }|;})+")
m = p.sub("}", m)
# Removes space before {
p = re.compile("({ )+")
m = p.sub("{", m)
# Removes all comments
p = re.compile("/\*([^*]|[\r\n]|(\*+([^*/]|[\r\n])))*\*+/")
m = p.sub("", m)
# Strip off the Charset
p = re.compile("@CHARSET .*;")
m = p.sub("", m)
# Strip spaces before the {
p = re.compile(" {")
m = p.sub("{", m)
# Strip space after :
p = re.compile(": ")
m = p.sub(":", m)
# Strip space after ,
p = re.compile(", ")
m = p.sub(",", m)
# Strip space after ;
p = re.compile("; ")
m = p.sub(";", m)
return m
def compressCSS(inputFilename, outputFilename):
theFile = open(inputFilename, "r").read()
output = ""
for line in theFile:
output = output + cleanline(line)
# Once more, clean the entire file string
_output = cleanline(output)
open(outputFilename, "w").write(_output)
return
def dojs(dogis = False, warnings = True):
""" Minifies the JavaScript """
# Do we have local version of the Closure Compiler available?
use_compressor = "jsmin" # Fallback
try:
import closure
use_compressor = "closure"
print "using local Closure Compiler"
except Exception, E:
print "No closure (%s)" % E
print "Download from http://closure-compiler.googlecode.com/files/compiler-latest.zip"
try:
import closure_ws
use_compressor = "closure_ws"
print "Using Closure via Web Service - limited to files < 1Mb!"
except ImportError:
print "No closure_ws"
if use_compressor == "closure":
if not warnings:
closure.extra_params = "--warning_level QUIET"
minimize = closure.minimize
elif use_compressor == "closure_ws":
minimize = closure_ws.minimize
elif use_compressor == "jsmin":
minimize = jsmin.jsmin
sourceDirectory = ".."
configFilename = "sahana.js.cfg"
outputFilename = "S3.min.js"
# Merge JS files
print "Merging Core libraries."
merged = mergejs.run(sourceDirectory, None, configFilename)
# Compress JS files
print "Compressing - JS"
minimized = minimize(merged)
# Add license
print "Adding license file."
minimized = open("license.txt").read() + minimized
# Print to output files
print "Writing to %s." % outputFilename
open(outputFilename, "w").write(minimized)
# Remove old JS files
print "Deleting %s." % outputFilename
try:
os.remove("../S3/%s" % outputFilename)
except:
pass
# Move new JS files
print "Moving new JS files"
shutil.move(outputFilename, "../S3")
# dataTables
print "Compressing dataTables"
sourceDirectorydataTables = ".."
configFilenamedataTables = "sahana.js.dataTables.cfg"
outputFilenamedataTables = "s3.dataTables.min.js"
mergeddataTables = mergejs.run(sourceDirectorydataTables,
None,
configFilenamedataTables)
minimizeddataTables = minimize(mergeddataTables)
open(outputFilenamedataTables, "w").write(minimizeddataTables)
try:
os.remove("../S3/%s" % outputFilenamedataTables)
except:
pass
shutil.move(outputFilenamedataTables, "../S3")
# Vulnerability
print "Compressing Vulnerability"
sourceDirectoryVulnerability = ".."
configFilenameVulnerability = "sahana.js.vulnerability.cfg"
outputFilenameVulnerability = "s3.vulnerability.min.js"
mergedVulnerability = mergejs.run(sourceDirectoryVulnerability,
None,
configFilenameVulnerability)
minimizedVulnerability = minimize(mergedVulnerability)
open(outputFilenameVulnerability, "w").write(minimizedVulnerability)
try:
os.remove("../S3/%s" % outputFilenameVulnerability)
except:
pass
shutil.move(outputFilenameVulnerability, "../S3")
print "Compressing Vulnerability GIS"
sourceDirectoryVulnerability = "../../themes/Vulnerability/js"
configFilenameVulnerability = "sahana.js.vulnerability_gis.cfg"
outputFilenameVulnerability = "OpenLayers.js"
mergedVulnerability = mergejs.run(sourceDirectoryVulnerability,
None,
configFilenameVulnerability)
minimizedVulnerability = minimize(mergedVulnerability)
open(outputFilenameVulnerability, "w").write(minimizedVulnerability)
try:
os.remove("../../themes/Vulnerability/js/%s" % outputFilenameVulnerability)
except:
pass
shutil.move(outputFilenameVulnerability, "../../themes/Vulnerability/js")
# Single scripts
for filename in [
"contacts",
"embed_component",
"inline_component",
"locationselector.widget",
"popup",
"report",
"select_person",
"timeline",
]:
print "Compressing s3.%s.js" % filename
inputFilename = os.path.join("..", "S3", "s3.%s.js" % filename)
outputFilename = "s3.%s.min.js" % filename
input = open(inputFilename, "r").read()
minimized = minimize(input)
open(outputFilename, "w").write(minimized)
try:
os.remove("../S3/%s" % outputFilename)
except:
pass
shutil.move(outputFilename, "../S3")
if dogis:
sourceDirectoryGIS = "../S3"
sourceDirectoryOpenLayers = "../gis/openlayers/lib"
sourceDirectoryOpenLayersExten = "../gis"
sourceDirectoryMGRS = "../gis"
sourceDirectoryGeoExt = "../gis/GeoExt/lib"
sourceDirectoryGeoExtux = "../gis/GeoExt/ux"
sourceDirectoryGxp = "../gis/gxp"
#sourceDirectoryGeoExplorer = "../gis/GeoExplorer"
configFilenameGIS = "sahana.js.gis.cfg"
configFilenameOpenLayers = "sahana.js.ol.cfg"
configFilenameOpenLayersExten = "sahana.js.ol_exten.cfg"
configFilenameMGRS = "sahana.js.mgrs.cfg"
configFilenameGeoExt = "sahana.js.geoext.cfg"
configFilenameGeoExtux = "sahana.js.geoextux.cfg"
configFilenameGxpMin = "sahana.js.gxp.cfg"
configFilenameGxpFull = "sahana.js.gxpfull.cfg"
#configFilenameGeoExplorer = "sahana.js.geoexplorer.cfg"
outputFilenameGIS = "s3.gis.min.js"
outputFilenameOpenLayers = "OpenLayers.js"
outputFilenameMGRS = "MGRS.min.js"
outputFilenameGeoExt = "GeoExt.js"
outputFilenameGxp = "gxp.js"
#outputFilenameGeoExplorer = "GeoExplorer.js"
# Merge GIS JS Files
print "Merging GIS scripts."
mergedGIS = mergejs.run(sourceDirectoryGIS,
None,
configFilenameGIS)
print "Merging OpenLayers libraries."
mergedOpenLayers = mergejs.run(sourceDirectoryOpenLayers,
None,
configFilenameOpenLayers)
mergedOpenLayersExten = mergejs.run(sourceDirectoryOpenLayersExten,
None,
configFilenameOpenLayersExten)
print "Merging MGRS libraries."
mergedMGRS = mergejs.run(sourceDirectoryMGRS,
None,
configFilenameMGRS)
print "Merging GeoExt libraries."
mergedGeoExt = mergejs.run(sourceDirectoryGeoExt,
None,
configFilenameGeoExt)
mergedGeoExtux = mergejs.run(sourceDirectoryGeoExtux,
None,
configFilenameGeoExtux)
print "Merging gxp libraries."
mergedGxpMin = mergejs.run(sourceDirectoryGxp,
None,
configFilenameGxpMin)
mergedGxpFull = mergejs.run(sourceDirectoryGxp,
None,
configFilenameGxpFull)
#print "Merging GeoExplorer libraries."
#mergedGeoExplorer = mergejs.run(sourceDirectoryGeoExplorer,
# None,
# configFilenameGeoExplorer)
# Compress JS files
print "Compressing - GIS JS"
minimizedGIS = minimize(mergedGIS)
print "Compressing - OpenLayers JS"
if use_compressor == "closure_ws":
# Limited to files < 1Mb!
minimizedOpenLayers = jsmin.jsmin("%s\n%s" % (mergedOpenLayers,
mergedOpenLayersExten))
else:
minimizedOpenLayers = minimize("%s\n%s" % (mergedOpenLayers,
mergedOpenLayersExten))
print "Compressing - MGRS JS"
minimizedMGRS = minimize(mergedMGRS)
print "Compressing - GeoExt JS"
minimizedGeoExt = minimize("%s\n%s\n%s" % (mergedGeoExt,
mergedGeoExtux,
mergedGxpMin))
print "Compressing - gxp JS"
minimizedGxp = minimize(mergedGxpFull)
#print "Compressing - GeoExplorer JS"
#minimizedGeoExplorer = minimize(mergedGeoExplorer)
# Add license
#minimizedGIS = open("license.gis.txt").read() + minimizedGIS
# Print to output files
print "Writing to %s." % outputFilenameGIS
open(outputFilenameGIS, "w").write(minimizedGIS)
print "Writing to %s." % outputFilenameOpenLayers
open(outputFilenameOpenLayers, "w").write(minimizedOpenLayers)
print "Writing to %s." % outputFilenameMGRS
open(outputFilenameMGRS, "w").write(minimizedMGRS)
print "Writing to %s." % outputFilenameGeoExt
open(outputFilenameGeoExt, "w").write(minimizedGeoExt)
print "Writing to %s." % outputFilenameGxp
open(outputFilenameGxp, "w").write(minimizedGxp)
#print "Writing to %s." % outputFilenameGeoExplorer
#open(outputFilenameGeoExplorer, "w").write(minimizedGeoExplorer)
# Move new JS files
print "Deleting %s." % outputFilenameGIS
try:
os.remove("../S3/%s" % outputFilenameGIS)
except:
pass
print "Moving new GIS JS files"
shutil.move(outputFilenameGIS, "../S3")
print "Deleting %s." % outputFilenameOpenLayers
try:
os.remove("../gis/%s" % outputFilenameOpenLayers)
except:
pass
print "Moving new OpenLayers JS files"
shutil.move(outputFilenameOpenLayers, "../gis")
print "Deleting %s." % outputFilenameMGRS
try:
os.remove("../gis/%s" % outputFilenameMGRS)
except:
pass
print "Moving new MGRS JS files"
shutil.move(outputFilenameMGRS, "../gis")
print "Deleting %s." % outputFilenameGeoExt
try:
os.remove("../gis/%s" % outputFilenameGeoExt)
except:
pass
print "Moving new GeoExt JS files"
shutil.move(outputFilenameGeoExt, "../gis")
print "Deleting %s." % outputFilenameGxp
try:
os.remove("../gis/%s" % outputFilenameGxp)
except:
pass
print "Moving new gxp JS files"
shutil.move(outputFilenameGxp, "../gis")
#print "Deleting %s." % outputFilenameGeoExplorer
#try:
# os.remove("../gis/%s" % outputFilenameGeoExplorer)
#except:
# pass
#print "Moving new GeoExplorer JS files"
#shutil.move(outputFilenameGeoExplorer, "../gis")
def docss():
""" Compresses the CSS files """
listCSS = []
theme = settings.get_theme()
print "Using theme %s" % theme
css_cfg = os.path.join("..", "..", "..", "private", "templates", theme, "css.cfg")
f = open(css_cfg, "r")
files = f.readlines()
f.close()
for file in files[:-1]:
p = re.compile("(\n|\r|\t|\f|\v)+")
file = p.sub("", file)
listCSS.append("../../styles/%s" % file)
outputFilenameCSS = "eden.min.css"
# Merge CSS files
print "Merging Core styles."
mergedCSS = mergeCSS(listCSS, outputFilenameCSS)
# Compress CSS files
print "Writing to %s." % outputFilenameCSS
compressCSS(mergedCSS, outputFilenameCSS)
# Move files to correct locations
print "Deleting %s." % outputFilenameCSS
try:
os.remove("../../themes/%s/%s" % (theme, outputFilenameCSS))
except:
pass
print "Moving new %s." % outputFilenameCSS
shutil.move(outputFilenameCSS, "../../themes/%s" % theme)
def main(argv):
try:
parameter1 = argv[0]
except:
parameter1 = "ALL"
try:
if(argv[1] == "DOGIS"):
parameter2 = True
else:
parameter2 = False
except:
parameter2 = True
closure_warnings = True
if "NOWARN" in argv:
closure_warnings = False
if parameter1 in ("ALL", "NOWARN"):
dojs(warnings=closure_warnings)
docss()
else:
if parameter1 == "CSS":
docss()
else:
dojs(parameter2, warnings=closure_warnings)
docss()
print "Done."
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
|
"""
Consider this game: Write 8 blanks on a sheet of paper. Randomly pick a digit 0-9. After seeing the digit, choose one
of the 8 blanks to place that digit in. Randomly choose another digit (with replacement) and then choose one of the 7
remaining blanks to place it in. Repeat until you've filled all 8 blanks. You win if the 8 digits written down are in
order from smallest to largest.
Write a program that plays this game by itself and determines whether it won or not. Run it 1 million times and post
your probability of winning.
Assigning digits to blanks randomly lets you win about 0.02% of the time. Here's a python script that wins about 10.3%
of the time. Can you do better?
import random
def trial():
indices = range(8) # remaining unassigned indices
s = [None] * 8 # the digits in their assigned places
while indices:
d = random.randint(0,9) # choose a random digit
index = indices[int(d*len(indices)/10)] # assign it an index
s[index] = str(d)
indices.remove(index)
return s == sorted(s)
print sum(trial() for _ in range(1000000))
thanks to cosmologicon for the challenge at /r/dailyprogrammer_ideas ..
link [http://www.reddit.com/r/dailyprogrammer_ideas/comments/s30be/intermediate_digitassigning_game/]
"""
import random
import itertools
def que_sort(data):
# print(data)
return all(b >= a for a, b in zip(data, itertools.islice(data, 1, None)))
TRIALS = 1
win = 0
for a in range(TRIALS):
l = [None] * 8
p = list(range(8))
while p:
d = random.randint(0,9)
# i = random.choice(p)
i = int(d * (len(p)) / 10)
print(p[i])
l[p[i]] = d
p.pop(i)
print(l)
if que_sort(l):
win += 1
print('{}/{} - {}%'.format(win, TRIALS, win/TRIALS*100))
|
definition = {
"where": "?subj a foaf:Organization .",
"fields": {
"name": {
"where": "?subj rdfs:label ?obj ."
}
}
}
|
import sys
import re
re_valid_email = re.compile(r'^[-_0-9a-zA-Z]+@[0-9a-zA-Z]+\.[0-9a-zA-Z]{1,3}$')
def valid_email(s):
return not (re_valid_email.search(s) == None)
N = int(raw_input().strip())
A = []
for i in range(N):
A += [ str(raw_input().strip()) ]
A.sort()
V = filter(valid_email, A)
print V
|
import argparse
from nltk.corpus import brown
import requests
import arrow
import json
parser = argparse.ArgumentParser()
parser.add_argument('host')
args = parser.parse_args()
def create_new_novel():
url = 'http://{host}/api/novel'.format(host=args.host)
response = requests.post(url, json={'title': 'Test Novel {}'.format(arrow.utcnow())})
return json.loads(response.text)['id']
def create_new_chapter(novel_id):
url = 'http://{host}/api/chapter'.format(host=args.host)
chapter_title = 'Chapter {}'.format(arrow.utcnow())
response = requests.post(url, json={'title': chapter_title, 'novel_id': novel_id})
return json.loads(response.text)['id']
def post_example_text_to_chapter(chapter_id, host):
url = 'http://{host}/api/novel_token'.format(host=host)
words = brown.words(categories=['news'])
for ordinal, word in enumerate(words):
if ordinal > 1000:
break
requests.post(url, json={'token': word.lower(), 'ordinal': ordinal, 'chapter_id': chapter_id})
if __name__ == '__main__':
novel_id = create_new_novel()
chapter_id = create_new_chapter(novel_id)
post_example_text_to_chapter(chapter_id, args.host)
|
from __future__ import unicode_literals
from django.core.exceptions import ValidationError
from django.forms import models
from djanban.apps.hourly_rates.models import HourlyRate
from django import forms
class HourlyRateForm(models.ModelForm):
class Meta:
model = HourlyRate
fields = ["name", "start_date", "end_date", "amount", "is_active"]
widgets = {
'start_date': forms.SelectDateWidget(),
'end_date': forms.SelectDateWidget(empty_label=u"Until now"),
}
def __init__(self, *args, **kwargs):
super(HourlyRateForm, self).__init__(*args, **kwargs)
def clean(self):
cleaned_data = super(HourlyRateForm, self).clean()
if cleaned_data.get("end_date") and cleaned_data.get("start_date") > cleaned_data.get("end_date"):
raise ValidationError(u"Start date can't be greater that end date")
return cleaned_data
class DeleteHourlyRateForm(forms.Form):
confirmed = forms.BooleanField(label=u"Please confirm you really want to do this action", required=True)
|
"""
The daemon that calls auto_copy.py uppon optical disc insertion
"""
import signal
import sys
import time
sys.path.append('/usr/local/bin')
import auto_copy
SIGNAL_RECEIVED = False
def run_daemon(config):
"""
Run the damon
config: configParser object
"""
signal.signal(signal.SIGUSR1, signal_handler)
while True:
time.sleep(1)
global SIGNAL_RECEIVED
if SIGNAL_RECEIVED:
auto_copy.auto_copy(config)
SIGNAL_RECEIVED = False
def signal_handler(dump1, dump2):
global SIGNAL_RECEIVED
SIGNAL_RECEIVED = True
if __name__ == "__main__":
main_config = auto_copy.read_config('/etc/auto_copy.yml')
auto_copy.setup_logging(main_config)
run_daemon(main_config)
|
import json
import os
import unittest
from monty.json import MontyDecoder
from pymatgen.apps.battery.conversion_battery import ConversionElectrode
from pymatgen.apps.battery.insertion_battery import InsertionElectrode
from pymatgen.apps.battery.plotter import VoltageProfilePlotter
from pymatgen.core.composition import Composition
from pymatgen.entries.computed_entries import ComputedEntry
from pymatgen.util.testing import PymatgenTest
class VoltageProfilePlotterTest(unittest.TestCase):
def setUp(self):
entry_Li = ComputedEntry("Li", -1.90753119)
with open(os.path.join(PymatgenTest.TEST_FILES_DIR, "LiTiO2_batt.json")) as f:
entries_LTO = json.load(f, cls=MontyDecoder)
self.ie_LTO = InsertionElectrode.from_entries(entries_LTO, entry_Li)
with open(os.path.join(PymatgenTest.TEST_FILES_DIR, "FeF3_batt.json")) as fid:
entries = json.load(fid, cls=MontyDecoder)
self.ce_FF = ConversionElectrode.from_composition_and_entries(Composition("FeF3"), entries)
def testName(self):
plotter = VoltageProfilePlotter(xaxis="frac_x")
plotter.add_electrode(self.ie_LTO, "LTO insertion")
plotter.add_electrode(self.ce_FF, "FeF3 conversion")
self.assertIsNotNone(plotter.get_plot_data(self.ie_LTO))
self.assertIsNotNone(plotter.get_plot_data(self.ce_FF))
def testPlotly(self):
plotter = VoltageProfilePlotter(xaxis="frac_x")
plotter.add_electrode(self.ie_LTO, "LTO insertion")
plotter.add_electrode(self.ce_FF, "FeF3 conversion")
fig = plotter.get_plotly_figure()
self.assertEqual(fig.layout.xaxis.title.text, "Atomic Fraction of Li")
plotter = VoltageProfilePlotter(xaxis="x_form")
plotter.add_electrode(self.ce_FF, "FeF3 conversion")
fig = plotter.get_plotly_figure()
self.assertEqual(fig.layout.xaxis.title.text, "x in Li<sub>x</sub>FeF3")
plotter.add_electrode(self.ie_LTO, "LTO insertion")
fig = plotter.get_plotly_figure()
self.assertEqual(fig.layout.xaxis.title.text, "x Workion Ion per Host F.U.")
if __name__ == "__main__":
unittest.main()
|
from abc import ABCMeta, abstractmethod
class AbstractAuthenticator(metaclass=ABCMeta):
def __init__(self):
"""
Every authenticator has to have a name
:param name:
"""
super().__init__()
@abstractmethod
def authorise_transaction(self, customer):
"""
Decide whether to authorise transaction.
Note that all relevant information can be obtained from the customer.
:param customer: the customer making a transaction
:return: boolean, whether or not to authorise the transaction
"""
|
from django.conf.urls import url, include
urlpatterns = [
url(r'^postcode-lookup/', include('django_postcode_lookup.urls')),
]
|
import sys
[_, ms, _, ns] = list(sys.stdin)
ms = set(int(m) for m in ms.split(' '))
ns = set(int(n) for n in ns.split(' '))
print(sep='\n', *sorted(ms.difference(ns).union(ns.difference(ms))))
|
import os
import numpy as np
class Dataset(object):
"""
This class represents a dataset and consists of a list of SongData along with some metadata about the dataset
"""
def __init__(self, songs_data=None):
if songs_data is None:
self.songs_data = []
else:
self.songs_data = songs_data
def add_song(self, song_data):
self.songs_data.append(song_data)
def songs(self):
for s in self.songs_data:
yield s
@property
def num_features(self):
if len(self.songs_data):
return self.songs_data[0].X.shape[1]
@property
def size(self):
return len(self.songs_data)
def __repr__(self):
return ', '.join([s.name for s in self.songs()])
class SongData(object):
"""
This class holds features, labels, and metadata for a song.
"""
def __init__(self, audio_path, label_path):
if not os.path.isfile(audio_path):
raise IOError("Audio file at %s does not exist" % audio_path)
if label_path and not os.path.isfile(label_path):
raise IOError("MIDI file at %s does not exist" % label_path)
self.audio_path = audio_path
self.label_path = label_path
"""
x [num_samples,] is the samples of the song
"""
@property
def x(self):
return self.__x
@x.setter
def x(self, x):
self.__x = x
"""
X [num_frames x num_features] is the feature matrix for the song
"""
@property
def X(self):
return self.__X
@X.setter
def X(self, X):
if hasattr(self, 'Y') and self.Y.shape[0] != X.shape[0]:
raise ValueError("Number of feature frames must equal number of label frames")
self.__X = X
"""
Y [num_frames x num_pitches] is the label matrix for the song
"""
@property
def Y(self):
return self.__Y
@Y.setter
def Y(self, Y):
if hasattr(self, 'X') and self.X.shape[0] != Y.shape[0]:
raise ValueError("Number of label frames must equal number of feature frames")
self.__Y = Y
@property
def num_pitches(self):
if hasattr(self, 'Y'):
return np.shape(self.Y)[1]
return 0
@property
def num_features(self):
if hasattr(self, 'X'):
return self.X.shape[1]
@property
def num_frames(self):
if hasattr(self, 'X'):
return self.X.shape[0]
@property
def name(self):
return os.path.splitext(os.path.split(self.audio_path)[-1])[0]
|
import gevent
import time
def doit(i):
print "do it:%s" % (i)
gevent.sleep(2)
print "done:%s" %(i)
t2 = time.time()
threads = {}
for i in range(5):
t = gevent.spawn(doit, i)
threads[i] = t
#print dir(t)
gevent.sleep(1)
print threads
print threads[3].dead
threads[3].kill()
print threads[3].dead
del threads[3]
threads[2].kill()
print threads
print time.time() - t2
for i in threads:
print threads[ i ].dead
gevent.sleep(3)
print time.time() - t2
for i in threads:
print threads[ i ].dead
|
import sys
import os
import time
import numpy
import cv2
import cv2.cv as cv
from PIL import Image
sys.path.insert(0, os.path.join(
os.path.dirname(os.path.dirname(os.path.dirname(__file__)))))
from picture.util import define
from picture.util.system import POINT
from picture.util.log import LOG as L
THRESHOLD = 0.96
class PatternMatch(object):
def __init__(self):
pass
@classmethod
def __patternmatch(self, reference, target):
L.info("reference : %s" % reference)
img_rgb = cv2.imread(reference)
img_gray = cv2.cvtColor(img_rgb, cv2.COLOR_BGR2GRAY)
template = cv2.imread(target, 0)
w, h = template.shape[::-1]
res = cv2.matchTemplate(img_gray,template,cv2.TM_CCOEFF_NORMED)
loc = numpy.where( res >= THRESHOLD)
result = None
for pt in zip(*loc[::-1]):
result = POINT(pt[0], pt[1], w, h)
return result
@classmethod
def bool(self, reference, target):
result = PatternMatch.__patternmatch(reference, target)
if result is None:
return False
else:
return True
@classmethod
def coordinate(self, reference, target):
return PatternMatch.__patternmatch(reference, target)
if __name__ == "__main__":
pmc = PatternMatch()
print pmc.bool(os.path.join(define.APP_TMP,"screen.png"),
os.path.join(define.APP_TMP,"login.png"))
|
def output_gpx(points, output_filename):
"""
Output a GPX file with latitude and longitude from the points DataFrame.
"""
from xml.dom.minidom import getDOMImplementation
def append_trkpt(pt, trkseg, doc):
trkpt = doc.createElement('trkpt')
trkpt.setAttribute('lat', '%.8f' % (pt['lat']))
trkpt.setAttribute('lon', '%.8f' % (pt['lon']))
trkseg.appendChild(trkpt)
doc = getDOMImplementation().createDocument(None, 'gpx', None)
trk = doc.createElement('trk')
doc.documentElement.appendChild(trk)
trkseg = doc.createElement('trkseg')
trk.appendChild(trkseg)
points.apply(append_trkpt, axis=1, trkseg=trkseg, doc=doc)
with open(output_filename, 'w') as fh:
doc.writexml(fh, indent=' ')
def main():
points = get_data(sys.argv[1])
print('Unfiltered distance: %0.2f' % (distance(points),))
smoothed_points = smooth(points)
print('Filtered distance: %0.2f' % (distance(smoothed_points),))
output_gpx(smoothed_points, 'out.gpx')
if __name__ == '__main__':
main()
|
import json
import urllib
import time
import datetime
def CalculateDistance(Origin = False,Destination = False, Method = "driving",TimeUnits = "Minutes",DistUnits = "Miles"):
#this is the start of a distnace matrix url
base = "https://maps.googleapis.com/maps/api/distancematrix/json?"
#Converts the variables to the required format
urlorigin = "origins=%s&".encode('utf-8') %(Origin)
urldestination = "destinations=%s&".encode('utf-8') %(Destination)
urlmethod = "mode=%s&" %(Method)
if DistUnits == "Kilometers" or DistUnits == "Meters":
urlunits = "units=metric&"
else:
urlunits = "units=imperial&"
#constructs the completed url
url = base.decode('utf-8') + urlorigin.decode('utf-8') + urldestination.decode('utf-8') + urlmethod.decode('utf-8') + urlunits.decode('utf-8') + "language=en-EN&sensor=false".decode('utf-8')
#Interprets the json data recieved
try:
result= json.load(urllib.urlopen(url))
except:
return 'ERROR','ERROR'
#Reads the status code and takes the appropriate action
if result["status"] == "OK":
if result["rows"][0]["elements"][0]["status"] == "OK":
time = result["rows"][0]["elements"][0]["duration"]["value"]
distance = result["rows"][0]["elements"][0]["distance"]["value"]
if TimeUnits == "Minutes":
time = time/60.0
elif TimeUnits == "Hours":
time = time/3600.0
if DistUnits == "Kilometres":
distance = distance/1000.0
elif DistUnits == "Yards":
distance = distance*1.0936133
elif DistUnits == "Miles":
distance = distance*0.000621371192
return time,distance
else:
return result["rows"][0]["elements"][0]["status"],result["rows"][0]["elements"][0]["status"]
else:
return result["status"]
|
from datetime import date
NTESTS = 1
PREV_DAYS = 10
PERCENT_UP = 0.01
PERCENT_DOWN = 0.01
PERIOD = 'Hourly' # [5-min, 15-min, 30-min, Hourly, 2-hour, 6-hour, 12-hour, Daily, Weekly]
MARKET = 'bitstampUSD'
YEAR_START = 2011
MONTH_START = 9
DAY_START = 13
DATE_START = date(YEAR_START, MONTH_START, DAY_START)
DATE_END = date.today()
URL_DATA_BASE = 'http://bitcoincharts.com/charts/chart.json?'
|
from rest_framework import test, status
from waldur_core.structure.models import CustomerRole, ProjectRole
from waldur_core.structure.tests import factories as structure_factories
from . import factories
class ServiceProjectLinkPermissionTest(test.APITransactionTestCase):
def setUp(self):
self.users = {
'owner': structure_factories.UserFactory(),
'admin': structure_factories.UserFactory(),
'manager': structure_factories.UserFactory(),
'no_role': structure_factories.UserFactory(),
'not_connected': structure_factories.UserFactory(),
}
# a single customer
self.customer = structure_factories.CustomerFactory()
self.customer.add_user(self.users['owner'], CustomerRole.OWNER)
# that has 3 users connected: admin, manager
self.connected_project = structure_factories.ProjectFactory(customer=self.customer)
self.connected_project.add_user(self.users['admin'], ProjectRole.ADMINISTRATOR)
self.connected_project.add_user(self.users['manager'], ProjectRole.MANAGER)
# has defined a service and connected service to a project
self.service = factories.OpenStackServiceFactory(customer=self.customer)
self.service_project_link = factories.OpenStackServiceProjectLinkFactory(
project=self.connected_project,
service=self.service)
# the customer also has another project with users but without a permission link
self.not_connected_project = structure_factories.ProjectFactory(customer=self.customer)
self.not_connected_project.add_user(self.users['not_connected'], ProjectRole.ADMINISTRATOR)
self.not_connected_project.save()
self.url = factories.OpenStackServiceProjectLinkFactory.get_list_url()
def test_anonymous_user_cannot_grant_service_to_project(self):
response = self.client.post(self.url, self._get_valid_payload())
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_user_can_connect_service_and_project_he_owns(self):
user = self.users['owner']
self.client.force_authenticate(user=user)
service = factories.OpenStackServiceFactory(customer=self.customer)
project = structure_factories.ProjectFactory(customer=self.customer)
payload = self._get_valid_payload(service, project)
response = self.client.post(self.url, payload)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_admin_cannot_connect_new_service_and_project_if_he_is_project_admin(self):
user = self.users['admin']
self.client.force_authenticate(user=user)
service = factories.OpenStackServiceFactory(customer=self.customer)
project = self.connected_project
payload = self._get_valid_payload(service, project)
response = self.client.post(self.url, payload)
# the new service should not be visible to the user
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertDictContainsSubset(
{'service': ['Invalid hyperlink - Object does not exist.']}, response.data)
def test_user_cannot_revoke_service_and_project_permission_if_he_is_project_manager(self):
user = self.users['manager']
self.client.force_authenticate(user=user)
url = factories.OpenStackServiceProjectLinkFactory.get_url(self.service_project_link)
response = self.client.delete(url)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def _get_valid_payload(self, service=None, project=None):
return {
'service': factories.OpenStackServiceFactory.get_url(service),
'project': structure_factories.ProjectFactory.get_url(project)
}
|
from email.mime.text import MIMEText
from jinja2 import Environment, FileSystemLoader
from datetime import datetime as dt
import os
import six
import smtplib
SECRET_SANTA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'templates')
j2env = Environment(loader=FileSystemLoader(SECRET_SANTA_DIR),
trim_blocks=False)
class SantaMail(object):
"""
The SantaMail object is used to send email. This class will load email
templates that should be sent out (the master list email and the email
for each Secret Santa.
Templates will be loaded from the template directory and is configurable
via the template_master and template_santa configuration variables.
"""
REQUIRED_PARAMS = ['author', 'email', 'smtp', 'username', 'password']
def __init__(self, author, email, smtp, username, password,
template_master="master.tmpl", template_santa="santa.tmpl"):
self.author = author
self.email = email
self.smtp = smtp
self.username = username
self.password = password
self.template_master = template_master
self.template_santa = template_santa
def send(self, pairings):
"""
Sends the emails out to the secret santa participants.
The secret santa host (the user configured to send the email from)
will receive a copy of the master list.
Each Secret Santa will receive an email with the contents of the
template_santa template.
"""
for pair in pairings:
self._send_to_secret_santa(pair)
self._send_master_list(pairings)
def _do_send(self, toaddr, body, subject):
try:
msg = MIMEText(body)
msg['Subject'] = subject
msg['From'] = self.email
msg['To'] = toaddr
server = smtplib.SMTP(self.smtp)
server.starttls()
server.login(self.username, self.password)
server.sendmail(self.email, [toaddr], msg.as_string())
server.quit()
except:
print("Error sending email to %s!" % toaddr)
def _send_to_secret_santa(self, pair):
"""
Sends an email to the secret santa pairing.
"""
(giver, receiver) = pair
template = j2env.get_template(self.template_santa)
body = template.render(giver=giver, receiver=receiver)
year = dt.utcnow().year
subject = ('Your %s Farmer Family Secret Santa Match' % year)
self._do_send(giver.email, body, subject)
def _send_master_list(self, pairings):
"""
Sends an email to the game master.
"""
pair_list = []
for pair in pairings:
(giver, recipient) = pair
pair_list.append("%s -> %s" % (giver.name, recipient.name))
template = j2env.get_template(self.template_master)
body = template.render(pairs=pair_list)
year = dt.utcnow().year
subject = ('%s Farmer Family Secret Santa Master List' % year)
self._do_send(self.email, body, subject)
|
"""
Collect the elasticsearch stats for the local node
* urlib2
"""
import urllib2
import re
try:
import json
json # workaround for pyflakes issue #13
except ImportError:
import simplejson as json
import diamond.collector
RE_LOGSTASH_INDEX = re.compile('^(.*)-\d\d\d\d\.\d\d\.\d\d$')
class ElasticSearchCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(ElasticSearchCollector,
self).get_default_config_help()
config_help.update({
'host': "",
'port': "",
'stats': "Available stats: \n"
+ " - jvm (JVM information) \n"
+ " - thread_pool (Thread pool information) \n"
+ " - indices (Individual index stats)\n",
'logstash_mode': "If 'indices' stats are gathered, remove "
+ "the YYYY.MM.DD suffix from the index name "
+ "(e.g. logstash-adm-syslog-2014.01.03) and use that "
+ "as a bucket for all 'day' index stats.",
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(ElasticSearchCollector, self).get_default_config()
config.update({
'host': '127.0.0.1',
'port': 9200,
'path': 'elasticsearch',
'stats': ['jvm', 'thread_pool', 'indices'],
'logstash_mode': False,
})
return config
def _get(self, path):
url = 'http://%s:%i/%s' % (
self.config['host'], int(self.config['port']), path)
try:
response = urllib2.urlopen(url)
except Exception, err:
self.log.error("%s: %s", url, err)
return False
try:
return json.load(response)
except (TypeError, ValueError):
self.log.error("Unable to parse response from elasticsearch as a"
+ " json object")
return False
def _copy_one_level(self, metrics, prefix, data, filter=lambda key: True):
for key, value in data.iteritems():
if filter(key):
metric_path = '%s.%s' % (prefix, key)
self._set_or_sum_metric(metrics, metric_path, value)
def _copy_two_level(self, metrics, prefix, data, filter=lambda key: True):
for key1, d1 in data.iteritems():
self._copy_one_level(metrics, '%s.%s' % (prefix, key1), d1, filter)
def _index_metrics(self, metrics, prefix, index):
if self.config['logstash_mode']:
"""Remove the YYYY.MM.DD bit from logstash indices.
This way we keep using the same metric naming and not polute
our metrics system (e.g. Graphite) with new metrics every day."""
m = RE_LOGSTASH_INDEX.match(prefix)
if m:
prefix = m.group(1)
# keep a telly of the number of indexes
self._set_or_sum_metric(metrics,
'%s.indexes_in_group' % prefix, 1)
self._add_metric(metrics, '%s.docs.count' % prefix, index,
['docs', 'count'])
self._add_metric(metrics, '%s.docs.deleted' % prefix, index,
['docs', 'deleted'])
self._add_metric(metrics, '%s.datastore.size' % prefix, index,
['store', 'size_in_bytes'])
# publish all 'total' and 'time_in_millis' stats
self._copy_two_level(
metrics, prefix, index,
lambda key: key.endswith('total') or key.endswith('time_in_millis'))
def _add_metric(self, metrics, metric_path, data, data_path):
"""If the path specified by data_path (a list) exists in data,
add to metrics. Use when the data path may not be present"""
current_item = data
for path_element in data_path:
current_item = current_item.get(path_element)
if current_item is None:
return
self._set_or_sum_metric(metrics, metric_path, current_item)
def _set_or_sum_metric(self, metrics, metric_path, value):
"""If we already have a datapoint for this metric, lets add
the value. This is used when the logstash mode is enabled."""
if metric_path in metrics:
metrics[metric_path] += value
else:
metrics[metric_path] = value
def collect(self):
if json is None:
self.log.error('Unable to import json')
return {}
result = self._get('_nodes/_local/stats?all=true')
if not result:
return
metrics = {}
node = result['nodes'].keys()[0]
data = result['nodes'][node]
#
# http connections to ES
metrics['http.current'] = data['http']['current_open']
#
# indices
indices = data['indices']
metrics['indices.docs.count'] = indices['docs']['count']
metrics['indices.docs.deleted'] = indices['docs']['deleted']
metrics['indices.datastore.size'] = indices['store']['size_in_bytes']
transport = data['transport']
metrics['transport.rx.count'] = transport['rx_count']
metrics['transport.rx.size'] = transport['rx_size_in_bytes']
metrics['transport.tx.count'] = transport['tx_count']
metrics['transport.tx.size'] = transport['tx_size_in_bytes']
# elasticsearch < 0.90RC2
if 'cache' in indices:
cache = indices['cache']
self._add_metric(metrics, 'cache.bloom.size', cache,
['bloom_size_in_bytes'])
self._add_metric(metrics, 'cache.field.evictions', cache,
['field_evictions'])
self._add_metric(metrics, 'cache.field.size', cache,
['field_size_in_bytes'])
metrics['cache.filter.count'] = cache['filter_count']
metrics['cache.filter.evictions'] = cache['filter_evictions']
metrics['cache.filter.size'] = cache['filter_size_in_bytes']
self._add_metric(metrics, 'cache.id.size', cache,
['id_cache_size_in_bytes'])
# elasticsearch >= 0.90RC2
if 'filter_cache' in indices:
cache = indices['filter_cache']
metrics['cache.filter.evictions'] = cache['evictions']
metrics['cache.filter.size'] = cache['memory_size_in_bytes']
self._add_metric(metrics, 'cache.filter.count', cache, ['count'])
# elasticsearch >= 0.90RC2
if 'id_cache' in indices:
cache = indices['id_cache']
self._add_metric(metrics, 'cache.id.size', cache,
['memory_size_in_bytes'])
# elasticsearch >= 0.90
if 'fielddata' in indices:
fielddata = indices['fielddata']
self._add_metric(metrics, 'fielddata.size', fielddata,
['memory_size_in_bytes'])
self._add_metric(metrics, 'fielddata.evictions', fielddata,
['evictions'])
#
# process mem/cpu (may not be present, depending on access restrictions)
self._add_metric(metrics, 'process.cpu.percent', data,
['process', 'cpu', 'percent'])
self._add_metric(metrics, 'process.mem.resident', data,
['process', 'mem', 'resident_in_bytes'])
self._add_metric(metrics, 'process.mem.share', data,
['process', 'mem', 'share_in_bytes'])
self._add_metric(metrics, 'process.mem.virtual', data,
['process', 'mem', 'total_virtual_in_bytes'])
#
# filesystem (may not be present, depending on access restrictions)
if 'fs' in data and 'data' in data['fs'] and data['fs']['data']:
fs_data = data['fs']['data'][0]
self._add_metric(metrics, 'disk.reads.count', fs_data,
['disk_reads'])
self._add_metric(metrics, 'disk.reads.size', fs_data,
['disk_read_size_in_bytes'])
self._add_metric(metrics, 'disk.writes.count', fs_data,
['disk_writes'])
self._add_metric(metrics, 'disk.writes.size', fs_data,
['disk_write_size_in_bytes'])
#
# jvm
if 'jvm' in self.config['stats']:
jvm = data['jvm']
mem = jvm['mem']
for k in ('heap_used', 'heap_committed', 'non_heap_used',
'non_heap_committed'):
metrics['jvm.mem.%s' % k] = mem['%s_in_bytes' % k]
for pool, d in mem['pools'].iteritems():
pool = pool.replace(' ', '_')
metrics['jvm.mem.pools.%s.used' % pool] = d['used_in_bytes']
metrics['jvm.mem.pools.%s.max' % pool] = d['max_in_bytes']
metrics['jvm.threads.count'] = jvm['threads']['count']
gc = jvm['gc']
collection_count = 0
collection_time_in_millis = 0
for collector, d in gc['collectors'].iteritems():
metrics['jvm.gc.collection.%s.count' % collector] = d[
'collection_count']
collection_count += d['collection_count']
metrics['jvm.gc.collection.%s.time' % collector] = d[
'collection_time_in_millis']
collection_time_in_millis += d['collection_time_in_millis']
# calculate the totals, as they're absent in elasticsearch > 0.90.10
if 'collection_count' in gc:
metrics['jvm.gc.collection.count'] = gc['collection_count']
else:
metrics['jvm.gc.collection.count'] = collection_count
k = 'collection_time_in_millis'
if k in gc:
metrics['jvm.gc.collection.time'] = gc[k]
else:
metrics['jvm.gc.collection.time'] = collection_time_in_millis
#
# thread_pool
if 'thread_pool' in self.config['stats']:
self._copy_two_level(metrics, 'thread_pool', data['thread_pool'])
#
# network
self._copy_two_level(metrics, 'network', data['network'])
if 'indices' in self.config['stats']:
#
# individual index stats
result = self._get('_stats?clear=true&docs=true&store=true&'
+ 'indexing=true&get=true&search=true')
if not result:
return
_all = result['_all']
self._index_metrics(metrics, 'indices._all', _all['primaries'])
if 'indices' in _all:
indices = _all['indices']
elif 'indices' in result: # elasticsearch >= 0.90RC2
indices = result['indices']
else:
return
for name, index in indices.iteritems():
self._index_metrics(metrics, 'indices.%s' % name,
index['primaries'])
for key in metrics:
self.publish(key, metrics[key])
|
class VecEnv(object):
"""
An abstract asynchronous, vectorized environment.
"""
def __init__(self, num_envs, observation_space, action_space):
self.num_envs = num_envs
self.observation_space = observation_space
self.action_space = action_space
def reset(self):
"""
Reset all the environments and return an array of
observations, or a tuple of observation arrays.
If step_async is still doing work, that work will
be cancelled and step_wait() should not be called
until step_async() is invoked again.
"""
pass
def step_async(self, actions):
"""
Tell all the environments to start taking a step
with the given actions.
Call step_wait() to get the results of the step.
You should not call this if a step_async run is
already pending.
"""
raise NotImplementedError()
def step_wait(self):
"""
Wait for the step taken with step_async().
Returns (obs, rews, dones, infos):
- obs: an array of observations, or a tuple of
arrays of observations.
- rews: an array of rewards
- dones: an array of "episode done" booleans
- infos: a sequence of info objects
"""
raise NotImplementedError()
def close(self):
"""
Clean up the environments' resources.
"""
raise NotImplementedError()
def step(self, actions):
self.step_async(actions)
return self.step_wait()
def render(self, mode='human'):
logger.warn('Render not defined for %s' % self)
def seed(self, i):
raise NotImplementedError()
@property
def unwrapped(self):
if isinstance(self, VecEnvWrapper):
return self.venv.unwrapped
else:
return self
class CloudpickleWrapper(object):
"""
Uses cloudpickle to serialize contents (otherwise multiprocessing tries to use pickle)
"""
def __init__(self, x):
self.x = x
def __getstate__(self):
import cloudpickle
return cloudpickle.dumps(self.x)
def __setstate__(self, ob):
import pickle
self.x = pickle.loads(ob)
|
EGA2RGB = [
(0x00, 0x00, 0x00),
(0x00, 0x00, 0xAA),
(0x00, 0xAA, 0x00),
(0x00, 0xAA, 0xAA),
(0xAA, 0x00, 0x00),
(0xAA, 0x00, 0xAA),
(0xAA, 0x55, 0x00),
(0xAA, 0xAA, 0xAA),
(0x55, 0x55, 0x55),
(0x55, 0x55, 0xFF),
(0x55, 0xFF, 0x55),
(0x55, 0xFF, 0xFF),
(0xFF, 0x55, 0x55),
(0xFF, 0x55, 0xFF),
(0xFF, 0xFF, 0x55),
(0xFF, 0xFF, 0xFF),
]
def load_shapes():
shapes = []
bytes = open("ULT/SHAPES.EGA").read()
for i in range(256):
shape = []
for j in range(16):
for k in range(8):
d = ord(bytes[k + 8 * j + 128 * i])
a, b = divmod(d, 16)
shape.append(EGA2RGB[a])
shape.append(EGA2RGB[b])
shapes.append(shape)
return shapes
|
from django.apps import AppConfig
class BallerShotCallerConfig(AppConfig):
name = 'baller_shot_caller'
|
import json
from util import d
import os
__home = os.path.expanduser("~").replace('\\', '/') + "/PixelWeb/"
BASE_SERVER_CONFIG = d({
"id":"server_config",
"display": "server_config",
"preconfig": False,
"presets":[],
"params": [{
"id": "external_access",
"label": "Allow External Access",
"type": "bool",
"default": True,
"help":"On: Other computers on your network can access PixelWeb. Off: LocalHost access only."
},{
"id": "port",
"label": "Server Port",
"type": "int",
"default": 8080,
"help":"Port to listen on."
},{
"id": "load_defaults",
"label": "Load Last Config on Start",
"type": "bool",
"default": False,
"help":"Load last driver/controller configuration on application start."
},
{
"id": "show_debug",
"label": "Show Debug in Console",
"type": "bool",
"default": False,
"help":"Show BiblioPixel debug in server console (not in main UI)."
},{
"id": "mod_dirs",
"label": "Module Directories",
"type": "str_multi",
"default": [],
"help":"Directories from which to load modules (animations, drivers, controllers, pre-configs).",
"replace": {"\\":"/"}
},
{
"id": "off_anim_time",
"label": "All Off Timeout",
"type": "int",
"default": 10,
"min": 0,
"max": 3600,
"help":"Keep display off when not running an animation by actively turning all pixels off every X seconds. Set to 0 to disable."
},]
});
def setHome(home):
global __home
__home = home
def genDefaultConfig(params):
c = {}
for p in params:
p = d(p)
c[p.id] = p.default
return c
def initConfig():
try:
if not os.path.exists(__home):
print "Creating {}".format(__home)
os.makedirs(__home)
except:
print "Failed to initialize PixelWeb config!"
def readConfig(file, key = None, path=None):
if not path:
path = __home
data = {}
try:
with open(path + "/" + file + ".json", "r") as fp:
data = json.load(fp, encoding='utf-8')
if key:
if key in data:
data = data[key]
else:
data = {}
except Exception, e:
pass
return d(data)
def writeConfig(file, data, key = None, path=None):
if not path:
path = __home
base = data
if key:
base = readConfig(file, path=path)
base[key] = data
with open(path + "/" + file + ".json", "w") as fp:
json.dump(base, fp, indent=4, sort_keys=True)
def paramsToDict(params):
data = {}
for p in params:
if "default" not in p:
p.default = None
data[p.id] = p.default
return data
def readServerConfig():
data = readConfig("config", path=__home)
base = paramsToDict(BASE_SERVER_CONFIG.params)
if len(data.keys()) == 0:
data = paramsToDict(BASE_SERVER_CONFIG.params)
elif len(data.keys()) != len(base.keys()):
data.upgrade(base)
return d(data)
def writeServerConfig(data):
writeConfig("config", data)
def upgradeServerConfig():
b = genDefaultConfig(BASE_SERVER_CONFIG.params)
cfg = readServerConfig()
cfg.upgrade(b)
writeServerConfig(cfg)
|
"""
Created on Mon Sep 29 21:25:13 2014
@author: 27182_000
"""
import sys
ans = 1
for n in range(999,1,-1):
for m in range(999,1,-1):
num = n*m
if str(num) == str(num)[::-1] and num > ans:
ans = num
print ans
|
from django.http import HttpResponse
from django.shortcuts import render
def index(request):
return HttpResponse('Page content')
def custom(request):
return render(request, 'custom.html', {})
|
from __future__ import unicode_literals
from django.db import models, migrations
from django.utils.timezone import utc
import datetime
class Migration(migrations.Migration):
dependencies = [
('content', '0009_auto_20150829_1417'),
]
operations = [
migrations.CreateModel(
name='Message',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('msg_subject', models.CharField(max_length=255, verbose_name='Subject')),
('msg_text', models.TextField(verbose_name='Text')),
('msg_author', models.EmailField(max_length=75, verbose_name='From')),
('recv_date', models.DateTimeField(editable=False, verbose_name='Date Received', default=datetime.datetime(2015, 10, 19, 4, 10, 29, 712166, tzinfo=utc))),
],
options={
},
bases=(models.Model,),
),
migrations.AlterField(
model_name='event',
name='pub_date',
field=models.DateTimeField(editable=False, verbose_name='Date Published', default=datetime.datetime(2015, 10, 19, 4, 10, 29, 711232, tzinfo=utc)),
preserve_default=True,
),
migrations.AlterField(
model_name='post',
name='pub_date',
field=models.DateTimeField(editable=False, verbose_name='Date Published', default=datetime.datetime(2015, 10, 19, 4, 10, 29, 711716, tzinfo=utc)),
preserve_default=True,
),
]
|
'''
Manage Ruby gem packages. (see https://rubygems.org/ )
'''
from pyinfra.api import operation
from pyinfra.facts.gem import GemPackages
from .util.packaging import ensure_packages
@operation
def packages(packages=None, present=True, latest=False, state=None, host=None):
'''
Add/remove/update gem packages.
+ packages: list of packages to ensure
+ present: whether the packages should be installed
+ latest: whether to upgrade packages without a specified version
Versions:
Package versions can be pinned like gem: ``<pkg>:<version>``.
Example:
.. code:: python
# Note: Assumes that 'gem' is installed.
gem.packages(
name='Install rspec',
packages=['rspec'],
)
'''
yield ensure_packages(
host, packages, host.get_fact(GemPackages), present,
install_command='gem install',
uninstall_command='gem uninstall',
upgrade_command='gem update',
version_join=':',
latest=latest,
)
|
import sys, os
sys.path.insert(0, os.path.abspath('../'))
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinx.ext.doctest']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'partpy'
copyright = u'2013, Taylor "Nekroze" Lawson'
version = '1.2'
release = '1.2.4'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = 'default'
html_static_path = ['_static']
htmlhelp_basename = 'partpydoc'
latex_elements = {
}
latex_documents = [
('index', 'partpy.tex', u'partpy Documentation',
u'Taylor "Nekroze" Lawson', 'manual'),
]
man_pages = [
('index', 'partpy', u'partpy Documentation',
[u'Taylor "Nekroze" Lawson'], 1)
]
texinfo_documents = [
('index', 'partpy', u'partpy Documentation',
u'Taylor "Nekroze" Lawson', 'partpy', 'One line description of project.',
'Miscellaneous'),
]
epub_title = u'partpy'
epub_author = u'Taylor "Nekroze" Lawson'
epub_publisher = u'Taylor "Nekroze" Lawson'
epub_copyright = u'2013, Taylor "Nekroze" Lawson'
|
import pytest
from clustaar.authorize.conditions import TrueCondition
@pytest.fixture
def condition():
return TrueCondition()
class TestCall(object):
def test_returns_true(self, condition):
assert condition({})
|
from ulnoiot import *
from ulnoiot.shield.onboardled import blue
blue.high() # make sure it's off (it's reversed)
button("b1", d6, pullup=False, threshold=2)
run(5)
|
from __future__ import absolute_import
from . import common
from .. import rw
from ..glossary import DEFAULT_TIMEOUT
from .base import BaseMessage
class CancelMessage(BaseMessage):
__slots__ = BaseMessage.__slots__ + (
'ttl',
'tracing',
'why',
)
def __init__(self, ttl=DEFAULT_TIMEOUT, tracing=None, why=None, id=0):
super(CancelMessage, self).__init__(id)
self.ttl = ttl
self.tracing = tracing or common.Tracing(0, 0, 0, 0)
self.why = why or ''
cancel_rw = rw.instance(
CancelMessage,
('ttl', rw.number(4)), # ttl:4
('tracing', common.tracing_rw), # tracing:24
('why', rw.len_prefixed_string(rw.number(2))), # why:2
)
|
import pygame
import os
from color import *
from pygame.locals import *
class Score(pygame.sprite.Sprite):
def __init__(self, score, player, width, height):
super(pygame.sprite.Sprite).__init__(Score)
self.score = int(score)
self.color = None
self.player = player
self.bossHeight = height
self.bossWidth = width
self.size = 70
self.update()
def update(self):
self.score = int(self.score)
self.whatColor()
self.score = str(self.score)
scoreFont = pygame.font.Font('./fonts/Dearest.ttf', self.size)
# We need to convert it to do the condition in 'self.wharColor'
# and 'scoreFont.rend' only takes 'str' as argument
self.surface = scoreFont.render(self.score, True, self.color)
self.rect = self.surface.get_rect()
if self.player == 1:
self.rect.center = (55, self.bossHeight - 50)
elif self.player == -1:
self.rect.center = (self.bossWidth - 55, self.bossHeight - 50)
def whatColor(self):
self.size = 80
if self.score < 6:
self.color = white
elif self.score < 8:
self.color = aqua
elif self.score < 10:
self.color = blueGreen
else:
self.color = lime
self.size = 100
def updateScore(self, score):
self.score = score
def __repr__(self):
return "<Score de ", str(self.player), "= ", str(self.score)
|
from __future__ import unicode_literals
from django.db import models
from django.utils.timezone import now, timedelta
Q = models.Q
class LogisticJob(models.Model):
LOCK_FOR = (
(60*15, '15 minutes'),
(60*30, '30 minutes'),
(60*45, '45 minutes'),
(60*60, '1 hour'),
(60*60*3, '3 hours'),
(60*60*6, '6 hours'),
(60*60*9, '9 hours'),
(60*60*12, '12 hours'),
(60*60*18, '18 hours'),
(60*60*24, '24 hours'),
)
RESOURCE = (
('wood', 'Wood'),
('stone', 'Stone'),
('food', 'Food'),
# ('cole', 'Cole'),
)
SPEED = (
('-1', 'Keine Pferde'),
('1001', 'Gold Pferde (test)'),
('1004', 'Rubi Pferde 1 (test)'),
('1007', 'Rubi Pferde 2 (test)'),
)
player = models.ForeignKey("gge_proxy_manager.Player", related_name='logistic_jobs')
castle = models.ForeignKey("gge_proxy_manager.Castle", related_name='outgoing_logistic_jobs')
receiver = models.ForeignKey("gge_proxy_manager.Castle", related_name='incoming_logistic_jobs')
speed = models.CharField(max_length=5, choices=SPEED)
is_active = models.BooleanField(default=True)
resource = models.CharField(max_length=6, choices=RESOURCE)
gold_limit = models.PositiveIntegerField(null=True, blank=True, default=None)
resource_limit = models.PositiveIntegerField()
lock_for = models.PositiveIntegerField(choices=LOCK_FOR, default=60*45)
locked_till = models.DateTimeField(default=now, db_index=True)
class Meta:
app_label = 'gge_proxy_manager'
def delay(self):
self.locked_till = now() + timedelta(seconds=self.lock_for)
self.save()
def last_succeed(self):
from .log import LogisticLog
log = LogisticLog.objects.filter(castle=self.castle,
receiver=self.receiver,
resource=self.resource).order_by('-sent').first()
if log:
return log.sent
return None
class ProductionJob(models.Model):
player = models.ForeignKey("gge_proxy_manager.Player", related_name='production_jobs')
castle = models.ForeignKey("gge_proxy_manager.Castle", related_name='production_jobs')
unit = models.ForeignKey("gge_proxy_manager.Unit")
valid_until = models.PositiveIntegerField(null=True, blank=True, default=None,
help_text='Bis zu welcher Menge ist der Auftrag gueltig')
is_active = models.BooleanField(default=True)
gold_limit = models.PositiveIntegerField(null=True, blank=True, default=None)
food_balance_limit = models.IntegerField(null=True, blank=True, default=None)
wood_limit = models.PositiveIntegerField(null=True, blank=True, default=None)
stone_limit = models.PositiveIntegerField(null=True, blank=True, default=None)
burst_mode = models.BooleanField(default=False, help_text='Ignoriert Nahrungsbilanz')
locked_till = models.DateTimeField(default=now, db_index=True)
last_fault_reason = models.CharField(null=True, default=None, max_length=128)
last_fault_date = models.DateTimeField(default=None, null=True)
class Meta:
app_label = 'gge_proxy_manager'
def last_succeed(self):
from .log import ProductionLog
log = ProductionLog.objects.filter(castle=self.castle, unit=self.unit).order_by('-produced').first()
if log:
return log.produced
return None
|
"""Parse ISI journal abbreviations website."""
try:
from html.parser import HTMLParser
except ImportError:
from HTMLParser import HTMLParser
class ISIJournalParser(HTMLParser):
"""Parser for ISI Web of Knowledge journal abbreviation pages.
**Note:**
Due to the ISI pages containing malformed html one must call
the :py:meth:`ISIJournalParser.finalize` method once
parsing is complete to ensure all entries are read correctly.
"""
def __init__(self):
HTMLParser.__init__(self)
self.journal_names = []
self.journal_abbreviations = []
self.parser_state = None
self.data_entities = None
def handle_starttag(self, tag, attrs):
if tag not in ('dd', 'dt'):
return
self._storedata()
self.parser_state = tag
self.data_entities = []
def handle_data(self, data):
if self.parser_state in ('dd', 'dt'):
self.data_entities.append(data)
def _storedata(self):
if self.data_entities and self.parser_state:
if self.parser_state == 'dt':
self.journal_names.append(''.join(self.data_entities).strip())
elif self.parser_state == 'dd':
self.journal_abbreviations.append(''.join(self.data_entities).strip())
def finalize(self):
"""Ensures all data is stored.
This method must be called when parsing is complete.
"""
self._storedata()
|
import glob
import os
import pandas as pd
class CTD(object):
"""docstring for CTD"""
def __init__(self):
self.format_l = []
self.td_l = []
self.iternum = 0
self.formatname = ""
def feature(self,index):
format_l = self.format_l
feature = ((float(format_l[index+1][1])-float(format_l[index+3][1]))/float(format_l[index+1][1]))+((float(format_l[index+1][4])-float(format_l[index+3][4]))/float(format_l[index+1][4]))
if (feature == 0):
feature = 0.0001
return feature
def format(self,path):
a = path.split('/')
self.formatname = a[2]
with open(path, 'r') as f:
a = f.read()
f = a.split('\n')
f.pop(0)
self.iternum = len(f)-3
for a in range(len(f)):
a = f[a].split(',')
a.pop(0)
self.format_l.append(a)
def trainData(self):
for index in range(self.iternum):
try:
format_l = self.format_l
classify = (float(format_l[index][3])-float(format_l[index+1][3]))/float(format_l[index+1][3])*100
feature = self.feature(index)
a = ['0']+format_l[index+1]+format_l[index+2]+format_l[index+3]+[feature]
self.td_l.append(a)
except:
pass
def storage_csv(self):
rowname=['classify','feature','1-open','1-high','1-low','1-close','1-volume','1-adj close','2-open','2-high','2-low','2-close','2-volume','2-adj close','3-open','3-high','3-low','3-close','3-volume','3-adj close']
df = pd.DataFrame(self.td_l,columns=rowname)
with open('./traindata/td_'+self.formatname+'.csv', 'w') as f:
df.to_csv(f)
print('td_'+self.formatname+'.csv is creat!')
def storage_txt(self,pathname):
with open('./predict/data/'+pathname,'ab') as f:
for a in self.td_l:
b = str(a[0])+'\t'
for c in range(1,20):
d = str(c)+':'+str(a[c])+'\t'
b += d
f.write(b+'\n')
def run(self):
path = './stock/*'
paths=glob.glob(path)
for index,path in enumerate(paths,1):
print(index)
self.format_l = []
self.td_l = []
self.format(path)
self.trainData()
path = path.split('/')
pathname = path[2]
self.storage_txt(pathname)
print os.popen("./bin/svm-scale -s predict_scale_model ./predict/data/"+pathname+" > ./predict/scale/"+pathname+"predict_data.scale").read()
print os.popen("./bin/rvkde --best --predict --classify -v ./train/scale/"+pathname+"train_data.scale -V ./predict/scale/"+pathname+"predict_data.scale > ./predict/result/"+pathname+"predict_result").read()
def main():
ctd = CTD()
ctd.run()
if __name__ == '__main__' :
main()
|
"""
Running the template pre-processor standalone.
Input: Templated Antimony model (stdin)
Output: Expanded Antimony model (stdout)
"""
import fileinput
import os
import sys
directory = os.path.dirname(os.path.abspath(__file__))
path = os.path.join(directory, "TemplateSB")
sys.path.append(path)
from template_processor import TemplateProcessor
template_stg = ''
for line in fileinput.input():
template_stg += "\n" + line
processor = TemplateProcessor(template_stg)
expanded_stg = processor.do()
sys.stdout.write(expanded_stg)
|
from bottle import route, default_app
app = default_app()
data = {
"id": 78874,
"seriesName": "Firefly",
"aliases": [
"Serenity"
],
"banner": "graphical/78874-g3.jpg",
"seriesId": "7097",
"status": "Ended",
"firstAired": "2002-09-20",
"network": "FOX (US)",
"networkId": "",
"runtime": "45",
"genre": [
"Drama",
"Science-Fiction"
],
"overview": "In the far-distant future, Captain Malcolm \"Mal\" Reynolds is a renegade former brown-coat sergeant, now turned smuggler & rogue, "
"who is the commander of a small spacecraft, with a loyal hand-picked crew made up of the first mate, Zoe Warren; the pilot Hoban \"Wash\" Washburn; "
"the gung-ho grunt Jayne Cobb; the engineer Kaylee Frye; the fugitives Dr. Simon Tam and his psychic sister River. "
"Together, they travel the far reaches of space in search of food, money, and anything to live on.",
"lastUpdated": 1486759680,
"airsDayOfWeek": "",
"airsTime": "",
"rating": "TV-14",
"imdbId": "tt0303461",
"zap2itId": "EP00524463",
"added": "",
"addedBy": None,
"siteRating": 9.5,
"siteRatingCount": 472,
}
@route('/api')
def api():
return data
|
import RPi.GPIO as GPIO
from common.adafruit.Adafruit_MCP230xx.Adafruit_MCP230xx import Adafruit_MCP230XX
class Relay(object):
_mcp23017_chip = {} # Conceivably, we could have up to 8 of these as there are a possibility of 8 MCP chips on a bus.
def __init__(self, mcp_pin, i2c_address=0x27):
"""
Initialize a relay
:param mcp_pin: BCM gpio number that is connected to a relay
:return:
"""
self.ON = 0
self.OFF = 1
self._i2c_address = i2c_address
self._mcp_pin = mcp_pin
if GPIO.RPI_REVISION == 1:
i2c_busnum = 0
else:
i2c_busnum = 1
if not self._mcp23017_chip.has_key(self._i2c_address):
self._mcp23017_chip[self._i2c_address] = Adafruit_MCP230XX(busnum=i2c_busnum, address=self._i2c_address, num_gpios=16)
self._relay = self._mcp23017_chip[self._i2c_address]
self._relay.config(self._mcp_pin, self._relay.OUTPUT)
self._relay.output(self._mcp_pin, self.OFF)
self.state = self.OFF
def set_state(self, state):
"""
Set the state of the relay. relay.ON, relay.OFF
:param state:
:return:
"""
if state == self.ON:
self._relay.output(self._mcp_pin, self.ON)
self.state = self.ON
elif state == self.OFF:
self._relay.output(self._mcp_pin, self.OFF)
self.state = self.OFF
def toggle(self):
"""
Toggle the state of a relay
:return:
"""
if self.state == self.ON:
self._relay.output(self._mcp_pin, self.OFF)
self.state = self.OFF
else:
self._relay.output(self._mcp_pin, self.ON)
self.state = self.ON
def get_state(self):
return self.state
if __name__ == '__main__':
import time
pause = .15
for pin in range(16):
print("Pin: %s" % pin)
r = Relay(pin)
r.set_state(r.ON)
time.sleep(pause)
r.set_state(r.OFF)
time.sleep(pause)
r.toggle()
time.sleep(pause)
r.toggle()
time.sleep(pause)
r1 = Relay(10)
r2 = Relay(2)
r3 = Relay(15)
r1.set_state(r1.ON)
print(r1._mcp_pin)
r2.set_state(r2.ON)
print(r2._mcp_pin)
r3.set_state(r3.ON)
print(r3._mcp_pin)
time.sleep(1)
r1.set_state(r1.OFF)
r2.set_state(r2.OFF)
r3.set_state(r3.OFF)
|
import os
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.bcrypt import Bcrypt
from flask_sockets import Sockets
app = Flask(__name__, static_folder="../static/dist", template_folder="../static")
if os.environ.get('PRODUCTION'):
app.config.from_object('config.ProductionConfig')
else:
app.config.from_object('config.TestingConfig')
db = SQLAlchemy(app)
bcrypt = Bcrypt(app)
sockets = Sockets(app)
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('job_board', '0004_jobpost_is_from_recruiting_agency'),
]
operations = [
migrations.AlterField(
model_name='jobpost',
name='location',
field=models.CharField(choices=[('CH', 'Chicago'), ('CT', 'Chicago and Temporarily Remote'), ('CR', 'Chicago and Remote'), ('RO', 'Remote Only')], default='CH', help_text='ChiPy is a locally based group. Position must not move candidate out of the Chicago area. Working remote or commuting is acceptable. Any position requiring relocation out of the Chicago land is out of scope of the mission of the group.', max_length=2),
),
]
|
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn import tree
from subprocess import call
X = pd.read_csv('Datasets/agaricus-lepiota.data', names=['label', 'cap-shape', 'cap-surface', 'cap-color',
'bruises', 'odor', 'gill-attachment',
'gill-spacing', 'gill-size', 'gill-color',
'stalk-shape', 'stalk-root',
'stalk-surface-above-ring',
'stalk-surface-below-ring', 'stalk-color-above-ring',
'stalk-color-below-ring', ' veil-type', 'veil-color',
'ring-number', 'ring-type', 'spore-print-colo', 'population',
'habitat'], header=None)
X.replace(to_replace='?', value=np.NaN, inplace=True)
X.dropna(axis=0, inplace=True)
print(X.shape)
X['label'] = X['label'].map({'e': 1, 'p': 0})
y = X['label'].copy()
X.drop(labels=['label'], axis=1, inplace=True)
X = pd.get_dummies(X)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=7)
model = tree.DecisionTreeClassifier()
model.fit(X_train, y_train)
score = model.score(X_test, y_test)
print('High-Dimensionality Score: %f' % round((score * 100), 3))
tree.export_graphviz(model.tree_, out_file='tree.dot', feature_names=X.columns)
|
import os
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
Base = declarative_base()
class DBConnector():
'''
where every row is the details one employee was paid for an entire month.
'''
@classmethod
def get_session(cls):
database_path = os.environ["SQL_DATABASE"]
engine = create_engine(database_path)
session = sessionmaker(bind=engine)()
return session
|
import sys
import urllib.parse
import urllib.request
def main():
search = sys.argv[1]
url = 'http://rarbg.to/torrents.php?order=seeders&by=DESC&search='
url = url + search
print(url)
req = urllib.request.Request(url, headers={'User-Agent' : "Magic Browser"})
resp = urllib.request.urlopen(req)
respData = resp.read()
if __name__ == '__main__':
main()
|
"""
Module containing classes for HTTP client/server interactions
"""
try:
from urllib.error import HTTPError, URLError
from urllib.parse import urlencode
except ImportError:
from urllib2 import HTTPError, URLError
from urllib import urlencode
import socket
from pyowm.exceptions import api_call_error, unauthorized_error, not_found_error
from pyowm.webapi25.configuration25 import ROOT_API_URL
class WeatherHttpClient(object):
API_SUBSCRIPTION_SUBDOMAINS = {
'free': 'api',
'pro': 'pro'
}
"""
An HTTP client class for the OWM web API. The class can leverage a
caching mechanism
:param API_key: a Unicode object representing the OWM web API key
:type API_key: Unicode
:param cache: an *OWMCache* concrete instance that will be used to
cache OWM web API responses.
:type cache: an *OWMCache* concrete instance
:param subscription_type: the type of OWM web API subscription to be wrapped.
The value is used to pick the proper API subdomain for HTTP calls.
Defaults to: 'free'
:type subscription_type: str
"""
def __init__(self, API_key, cache, subscription_type='free'):
self._API_key = API_key
self._cache = cache
self._API_root_URL = ROOT_API_URL % \
(self.API_SUBSCRIPTION_SUBDOMAINS[subscription_type],)
def _lookup_cache_or_invoke_API(self, cache, API_full_url, timeout):
cached = cache.get(API_full_url)
if cached:
return cached
else:
try:
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
response = urlopen(API_full_url, None, timeout)
except HTTPError as e:
if '401' in str(e):
raise unauthorized_error.UnauthorizedError('Invalid API key')
if '404' in str(e):
raise not_found_error.NotFoundError('The resource was not found')
if '502' in str(e):
raise api_call_error.BadGatewayError(str(e), e)
except URLError as e:
raise api_call_error.APICallError(str(e), e)
else:
data = response.read().decode('utf-8')
cache.set(API_full_url, data)
return data
def call_API(self, API_endpoint_URL, params_dict,
timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
"""
Invokes a specific OWM web API endpoint URL, returning raw JSON data.
:param API_endpoint_URL: the API endpoint to be invoked
:type API_endpoint_URL: str
:param params_dict: a dictionary containing the query parameters to be
used in the HTTP request (given as key-value couples in the dict)
:type params_dict: dict
:param timeout: how many seconds to wait for connection establishment
(defaults to ``socket._GLOBAL_DEFAULT_TIMEOUT``)
:type timeout: int
:returns: a string containing raw JSON data
:raises: *APICallError*
"""
url = self._build_full_URL(API_endpoint_URL, params_dict)
return self._lookup_cache_or_invoke_API(self._cache, url, timeout)
def _build_full_URL(self, API_endpoint_URL, params_dict):
"""
Adds the API key and the query parameters dictionary to the specified
API endpoint URL, returning a complete HTTP request URL.
:param API_endpoint_URL: the API endpoint base URL
:type API_endpoint_URL: str
:param params_dict: a dictionary containing the query parameters to be
used in the HTTP request (given as key-value couples in the dict)
:type params_dict: dict
:param API_key: the OWM web API key
:type API_key: str
:returns: a full string HTTP request URL
"""
url =self._API_root_URL + API_endpoint_URL
params = params_dict.copy()
if self._API_key is not None:
params['APPID'] = self._API_key
return self._build_query_parameters(url, params)
def _build_query_parameters(self, base_URL, params_dict):
"""
Turns dictionary items into query parameters and adds them to the base
URL
:param base_URL: the base URL whom the query parameters must be added
to
:type base_URL: str
:param params_dict: a dictionary containing the query parameters to be
used in the HTTP request (given as key-value couples in the dict)
:type params_dict: dict
:returns: a full string HTTP request URL
"""
return base_URL + '?' + urlencode(params_dict)
def __repr__(self):
return "<%s.%s - cache=%s>" % \
(__name__, self.__class__.__name__, repr(self._cache))
|
import unittest
import itertools
class TestWorld(object):
def __init__(self, **kw):
self.__dict__.update(kw)
self.components = self
self.entities = set()
self.new_entity_id = itertools.count().__next__
self.new_entity_id() # skip id 0
for comp in list(kw.values()):
comp.world = self
class TestComponent(dict):
def __init__(self):
self.entities = set()
def set(self, entity):
data = TestData()
self[entity] = data
self.entities.add(entity)
return data
def remove(self, entity):
del self[entity]
class TestData(object):
attr = 'deadbeef'
def __init__(self, **kw):
self.__dict__.update(kw)
class EntityTestCase(unittest.TestCase):
def test_repr(self):
from grease import Entity
entity = Entity(TestWorld())
self.assertTrue(repr(entity).startswith(
'<Entity id: %s of TestWorld' % entity.entity_id),
('<Entity id: %s of TestWorld' % entity.entity_id, repr(entity)))
def test_accessor_getattr_for_nonexistant_component(self):
from grease import Entity
comp = TestComponent()
world = TestWorld(test=comp)
entity = Entity(world)
self.assertTrue(entity not in comp)
self.assertRaises(AttributeError, getattr, entity, 'foo')
def test_accessor_getattr_for_non_member_entity(self):
from grease import Entity
comp = TestComponent()
world = TestWorld(test=comp)
entity = Entity(world)
accessor = entity.test
self.assertFalse(entity in comp)
self.assertRaises(AttributeError, getattr, accessor, 'attr')
def test_accessor_getattr_for_member_entity(self):
from grease import Entity
comp = TestComponent()
world = TestWorld(test=comp)
entity = Entity(world)
comp.set(entity)
self.assertTrue(entity in comp)
self.assertEqual(entity.test.attr, 'deadbeef')
def test_accessor_setattr_adds_non_member_entity(self):
from grease import Entity
comp = TestComponent()
world = TestWorld(test=comp)
entity = Entity(world)
self.assertFalse(entity in comp)
entity.test.attr = 'foobar'
self.assertEqual(entity.test.attr, 'foobar')
self.assertTrue(entity in comp)
def test_accessor_setattr_for_member_entity(self):
from grease import Entity
comp = TestComponent()
world = TestWorld(test=comp)
entity = Entity(world)
comp.set(entity)
self.assertNotEqual(entity.test.attr, 'spam')
entity.test.attr = 'spam'
self.assertTrue(entity in comp)
self.assertEqual(entity.test.attr, 'spam')
def test_eq(self):
from grease import Entity
world = TestWorld()
e1 = Entity(world)
e2 = Entity(world)
self.assertNotEqual(e1, e2)
e2.entity_id = e1.entity_id
self.assertEqual(e1, e2)
otherworld = TestWorld()
e3 = Entity(otherworld)
self.assertNotEqual(e1, e3)
self.assertNotEqual(e2, e3)
e3.entity_id = e1.entity_id
self.assertNotEqual(e1, e3)
self.assertNotEqual(e2, e3)
def test_delattr(self):
from grease import Entity
comp = TestComponent()
world = TestWorld(test=comp)
entity = Entity(world)
comp.set(entity)
self.assertTrue(entity in comp)
del entity.test
self.assertFalse(entity in comp)
def test_entity_id(self):
from grease import Entity
world = TestWorld()
entity1 = Entity(world)
entity2 = Entity(world)
self.assertTrue(entity1.entity_id > 0)
self.assertTrue(entity2.entity_id > 0)
self.assertNotEqual(entity1.entity_id, entity2.entity_id)
def test_delete_exists(self):
from grease import Entity
world = TestWorld()
self.assertEqual(world.entities, set())
entity1 = Entity(world)
entity2 = Entity(world)
self.assertEqual(world.entities, set([entity1, entity2]))
self.assertTrue(entity1.exists)
self.assertTrue(entity2.exists)
entity1.delete()
self.assertEqual(world.entities, set([entity2]))
self.assertFalse(entity1.exists)
self.assertTrue(entity2.exists)
entity2.delete()
self.assertEqual(world.entities, set())
self.assertFalse(entity1.exists)
self.assertFalse(entity2.exists)
def test_entity_subclass_slots(self):
from grease import Entity
class NewEntity(Entity):
pass
world = TestWorld()
entity = NewEntity(world)
self.assertRaises(AttributeError, setattr, entity, 'notanattr', 1234)
def test_entity_subclass_cant_have_slots(self):
from grease import Entity
self.assertRaises(TypeError,
type, 'Test', (Entity,), {'__slots__': ('foo', 'bar')})
def test_entity_subclass_init(self):
from grease import Entity
stuff = []
class TestEntity(Entity):
def __init__(self, world, other):
stuff.append(world)
stuff.append(other)
world = TestWorld()
TestEntity(world, self)
self.assertEqual(stuff, [world, self])
class EntityComponentAccessorTestCase(unittest.TestCase):
def test_getattr(self):
from grease.entity import EntityComponentAccessor
from grease import Entity
world = TestWorld()
entity = Entity(world)
component = {entity: TestData(foo=5)}
accessor = EntityComponentAccessor(component, entity)
self.assertEqual(accessor.foo, 5)
self.assertRaises(AttributeError, getattr, accessor, 'bar')
entity2 = Entity(world)
accessor = EntityComponentAccessor(component, entity2)
self.assertRaises(AttributeError, getattr, accessor, 'foo')
self.assertRaises(AttributeError, getattr, accessor, 'bar')
def test_setattr_member_entity(self):
from grease.entity import EntityComponentAccessor
from grease import Entity
world = TestWorld()
entity = Entity(world)
data = TestData(foo=5)
accessor = EntityComponentAccessor({entity: data}, entity)
self.assertEqual(data.foo, 5)
accessor.foo = 66
self.assertEqual(data.foo, 66)
accessor.bar = '!!'
self.assertEqual(data.bar, '!!')
def test_setattr_nonmember_entity(self):
from grease.entity import EntityComponentAccessor
from grease import Entity
world = TestWorld()
entity = Entity(world)
component = TestComponent()
accessor = EntityComponentAccessor(component, entity)
self.assertRaises(AttributeError, getattr, entity, 'baz')
self.assertTrue(entity not in component)
accessor.baz = 1000
self.assertTrue(entity in component)
self.assertEqual(accessor.baz, 1000)
self.assertEqual(component[entity].baz, 1000)
def test_truthiness(self):
from grease.entity import EntityComponentAccessor
from grease import Entity
world = TestWorld()
entity = Entity(world)
component = TestComponent()
accessor = EntityComponentAccessor(component, entity)
self.assertFalse(accessor)
component[entity] = 456
self.assertTrue(accessor)
if __name__ == '__main__':
unittest.main()
|
from django.db import models
from django.contrib.sites.models import Site
class Link(models.Model):
url = models.URLField(max_length=512)
site = models.ForeignKey(Site, on_delete=models.SET_NULL, null=True)
request_times = models.PositiveIntegerField(default=0)
updated = models.DateTimeField(auto_now=True)
created = models.DateTimeField(auto_now_add=True)
def __str__(self):
return '{}-{}'.format(self.pk, self.url)
class RateLimit(models.Model):
ip = models.GenericIPAddressField(unique=True)
start_time = models.DateTimeField()
count = models.PositiveIntegerField(default=0)
def __str__(self):
return self.ip
|
class InvalidValueState(ValueError):
pass
|
import numpy as np
import sys
import scipy
from scipy import stats
data_file = sys.argv[1]
data = np.loadtxt(data_file)
slope, intercept, r_value, p_value, std_err = stats.linregress(data[499:2499,0], data[499:2499,1])
nf = open('linear_reg.dat', 'w')
nf.write("Linear Regression for data between %5d ps (frame: 499) and %5d ps (frame 2499) \n" %(data[499][0], data[2499][0]))
nf.write("slope: %10.5E Angstrom^2 ps^-1 \n" %(slope))
nf.write("intercept: %10.5E Angstrom^2\n" %(intercept))
nf.write("R^2: %10.5f \n" %(r_value**2))
nf.write('Diffusion coeff: %10.5E Angstrom^2 ps^-1$ \n' %(slope/6.0))
nf.write('Diffusion coeff: %10.5E m^2 s^-1$ \n' %(slope*10**(-8)/6.0))
nf.close()
|
import sys
import os
import urllib.request
import path_utils
def download_url(source_url, target_path):
if os.path.exists(target_path):
return False, "Target path [%s] already exists" % target_path
contents = None
try:
with urllib.request.urlopen(source_url) as f:
contents = f.read().decode("utf8")
except urllib.error.HTTPError as httpex:
return False, "Downloading failed: [%s]" % httpex
with open(target_path, "w") as f:
f.write(contents)
return True, None
def puaq():
print("Usage: %s source_url target_path" % path_utils.basename_filtered(__file__))
sys.exit(1)
if __name__ == "__main__":
if len(sys.argv) < 3:
puaq()
source_url = sys.argv[1]
target_path = sys.argv[2]
v, r = download_url(source_url, target_path)
if not v:
print(r)
sys.exit(1)
|
import os
import numpy as np
from plantcv.plantcv.threshold import binary as binary_threshold
from plantcv.plantcv import params
from plantcv.plantcv import fatal_error
from plantcv.plantcv._debug import _debug
import pandas as pd
from plotnine import ggplot, aes, geom_line, labels, scale_color_manual
def _hist_gray(gray_img, bins, lower_bound, upper_bound, mask=None):
""" Prepare the ready to plot histogram data
Inputs:
gray_img = grayscale image to analyze
bins = divide the data into n evenly spaced bins
lower_bound = the lower bound of the bins (x-axis min value)
upper_bound = the upper bound of the bins (x-axis max value)
mask = binary mask, calculate histogram from masked area only (default=None)
Returns:
bin_labels = an array of histogram bin labels
hist_percent = an array of histogram represented by percent values
hist_gray_data = an array of histogram (original values)
:param gray_img: numpy.ndarray
:param bins: int
:param lower_bound: int
:param upper_bound: int
:param mask: numpy.ndarray
:return bin_labels: numpy.ndarray
:return hist_percent: numpy.ndarray
:return hist_gray_data: numpy.ndarray
"""
params.device += 1
debug = params.debug
# Apply mask if one is supplied
if mask is not None:
min_val = np.min(gray_img)
pixels = len(np.where(mask > 0)[0])
# apply plant shaped mask to image
params.debug = None
mask1 = binary_threshold(mask, 0, 255, 'light')
mask1 = (mask1 / 255)
masked = np.where(mask1 != 0, gray_img, min_val - 5000)
else:
pixels = gray_img.shape[0] * gray_img.shape[1]
masked = gray_img
params.debug = debug
# Store histogram data
hist_gray_data, hist_bins = np.histogram(masked, bins, (lower_bound, upper_bound))
# make hist percentage for plotting
hist_percent = (hist_gray_data / float(pixels)) * 100
# use middle value of every bin as bin label
bin_labels = np.array([np.average([hist_bins[i], hist_bins[i+1]]) for i in range(0, len(hist_bins) - 1)])
return bin_labels, hist_percent, hist_gray_data
# hist_data = pd.DataFrame({'pixel intensity': bin_labels, 'proportion of pixels (%)': hist_percent})
# return hist_data
def histogram(img, mask=None, bins=100, lower_bound=None, upper_bound=None, title=None, hist_data=False):
"""Plot histograms of each input image channel
Inputs:
img = an RGB or grayscale image to analyze
mask = binary mask, calculate histogram from masked area only (default=None)
bins = divide the data into n evenly spaced bins (default=100)
lower_bound = the lower bound of the bins (x-axis min value) (default=None)
upper_bound = the upper bound of the bins (x-axis max value) (default=None)
title = a custom title for the plot (default=None)
hist_data = return the frequency distribution data if True (default=False)
Returns:
fig_hist = histogram figure
hist_df = dataframe with histogram data, with columns "pixel intensity" and "proportion of pixels (%)"
:param img: numpy.ndarray
:param mask: numpy.ndarray
:param bins: int
:param lower_bound: int
:param upper_bound: int
:param title: str
:param hist_data: bool
:return fig_hist: plotnine.ggplot.ggplot
:return hist_df: pandas.core.frame.DataFrame
"""
if not isinstance(img, np.ndarray):
fatal_error("Only image of type numpy.ndarray is supported input!")
if len(img.shape) < 2:
fatal_error("Input image should be at least a 2d array!")
if mask is not None:
masked = img[np.where(mask > 0)]
img_min, img_max = np.nanmin(masked), np.nanmax(masked)
else:
img_min, img_max = np.nanmin(img), np.nanmax(img)
# for lower / upper bound, if given, use the given value, otherwise, use the min / max of the image
lower_bound = lower_bound if lower_bound is not None else img_min
upper_bound = upper_bound if upper_bound is not None else img_max
if len(img.shape) > 2:
if img.shape[2] == 3:
b_names = ['blue', 'green', 'red']
else:
b_names = [str(i) for i in range(img.shape[2])]
if len(img.shape) == 2:
bin_labels, hist_percent, hist_ = _hist_gray(img, bins=bins, lower_bound=lower_bound, upper_bound=upper_bound,
mask=mask)
hist_df = pd.DataFrame(
{'pixel intensity': bin_labels, 'proportion of pixels (%)': hist_percent, 'hist_count': hist_,
'color channel': ['0' for _ in range(len(hist_percent))]})
else:
# Assumption: RGB image
# Initialize dataframe column arrays
px_int = np.array([])
prop = np.array([])
hist_count = np.array([])
channel = []
for (b, b_name) in enumerate(b_names):
bin_labels, hist_percent, hist_ = _hist_gray(img[:, :, b], bins=bins, lower_bound=lower_bound,
upper_bound=upper_bound, mask=mask)
# Append histogram data for each channel
px_int = np.append(px_int, bin_labels)
prop = np.append(prop, hist_percent)
hist_count = np.append(hist_count, hist_)
channel = channel + [b_name for _ in range(len(hist_percent))]
# Create dataframe
hist_df = pd.DataFrame(
{'pixel intensity': px_int, 'proportion of pixels (%)': prop, 'hist_count': hist_count,
'color channel': channel})
fig_hist = (ggplot(data=hist_df,
mapping=aes(x='pixel intensity', y='proportion of pixels (%)', color='color channel'))
+ geom_line())
if title is not None:
fig_hist = fig_hist + labels.ggtitle(title)
if len(img.shape) > 2 and img.shape[2] == 3:
fig_hist = fig_hist + scale_color_manual(['blue', 'green', 'red'])
# Plot or print the histogram
_debug(visual=fig_hist, filename=os.path.join(params.debug_outdir, str(params.device) + '_hist.png'))
if hist_data is True:
return fig_hist, hist_df
return fig_hist
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('basicviz', '0002_auto_20160717_1939'),
]
operations = [
migrations.AlterField(
model_name='document',
name='name',
field=models.CharField(unique=True, max_length=32),
),
]
|
"""This module contains the classes underlying SoCo's caching system."""
from __future__ import unicode_literals
import threading
from time import time
from . import config
from .compat import dumps
class _BaseCache(object):
"""An abstract base class for the cache."""
# pylint: disable=no-self-use, unused-argument
def __init__(self, *args, **kwargs):
super().__init__()
self._cache = {}
#: `bool`: whether the cache is enabled
self.enabled = True
def put(self, item, *args, **kwargs):
"""Put an item into the cache."""
raise NotImplementedError
def get(self, *args, **kwargs):
"""Get an item from the cache."""
raise NotImplementedError
def delete(self, *args, **kwargs):
"""Delete an item from the cache."""
raise NotImplementedError
def clear(self):
"""Empty the whole cache."""
raise NotImplementedError
class NullCache(_BaseCache):
"""A cache which does nothing.
Useful for debugging.
"""
def put(self, item, *args, **kwargs):
"""Put an item into the cache."""
def get(self, *args, **kwargs):
"""Get an item from the cache."""
return None
def delete(self, *args, **kwargs):
"""Delete an item from the cache."""
def clear(self):
"""Empty the whole cache."""
class TimedCache(_BaseCache):
"""A simple thread-safe cache for caching method return values.
The cache key is generated by from the given ``*args`` and ``**kwargs``.
Items are expired from the cache after a given period of time.
Example:
>>> from time import sleep
>>> cache = TimedCache()
>>> cache.put("item", 'some', kw='args', timeout=3)
>>> # Fetch the item again, by providing the same args and kwargs.
>>> assert cache.get('some', kw='args') == "item"
>>> # Providing different args or kwargs will not return the item.
>>> assert not cache.get('some', 'otherargs') == "item"
>>> # Waiting for less than the provided timeout does not cause the
>>> # item to expire.
>>> sleep(2)
>>> assert cache.get('some', kw='args') == "item"
>>> # But waiting for longer does.
>>> sleep(2)
>>> assert not cache.get('some', kw='args') == "item"
Warning:
At present, the cache can theoretically grow and grow, since entries
are not automatically purged, though in practice this is unlikely
since there are not that many different combinations of arguments in
the places where it is used in SoCo, so not that many different
cache entries will be created. If this becomes a problem,
use a thread and timer to purge the cache, or rewrite this to use
LRU logic!
"""
def __init__(self, default_timeout=0):
"""
Args:
default_timeout (int): The default number of seconds after
which items will be expired.
"""
super().__init__()
#: `int`: The default caching expiry interval in seconds.
self.default_timeout = default_timeout
# A thread lock for the cache
self._cache_lock = threading.Lock()
def get(self, *args, **kwargs):
"""Get an item from the cache for this combination of args and kwargs.
Args:
*args: any arguments.
**kwargs: any keyword arguments.
Returns:
object: The object which has been found in the cache, or `None` if
no unexpired item is found. This means that there is no point
storing an item in the cache if it is `None`.
"""
if not self.enabled:
return None
# Look in the cache to see if there is an unexpired item. If there is
# we can just return the cached result.
cache_key = self.make_key(args, kwargs)
# Lock and load
with self._cache_lock:
if cache_key in self._cache:
expirytime, item = self._cache[cache_key]
if expirytime >= time():
return item
else:
# An expired item is present - delete it
del self._cache[cache_key]
# Nothing found
return None
def put(self, item, *args, **kwargs):
"""Put an item into the cache, for this combination of args and kwargs.
Args:
*args: any arguments.
**kwargs: any keyword arguments. If ``timeout`` is specified as one
of the keyword arguments, the item will remain available
for retrieval for ``timeout`` seconds. If ``timeout`` is
`None` or not specified, the ``default_timeout`` for this
cache will be used. Specify a ``timeout`` of 0 (or ensure that
the ``default_timeout`` for this cache is 0) if this item is
not to be cached.
"""
if not self.enabled:
return
# Check for a timeout keyword, store and remove it.
timeout = kwargs.pop("timeout", None)
if timeout is None:
timeout = self.default_timeout
cache_key = self.make_key(args, kwargs)
# Store the item, along with the time at which it will expire
with self._cache_lock:
self._cache[cache_key] = (time() + timeout, item)
def delete(self, *args, **kwargs):
"""Delete an item from the cache for this combination of args and
kwargs."""
cache_key = self.make_key(args, kwargs)
with self._cache_lock:
try:
del self._cache[cache_key]
except KeyError:
pass
def clear(self):
"""Empty the whole cache."""
with self._cache_lock:
self._cache.clear()
@staticmethod
def make_key(*args, **kwargs):
"""Generate a unique, hashable, representation of the args and kwargs.
Args:
*args: any arguments.
**kwargs: any keyword arguments.
Returns:
str: the key.
"""
# This is not entirely straightforward, since args and kwargs may
# contain mutable items and unicode. Possibilities include using
# __repr__, frozensets, and code from Py3's LRU cache. But pickle
# works, and although it is not as fast as some methods, it is good
# enough at the moment
cache_key = dumps((args, kwargs))
return cache_key
class Cache(NullCache):
"""A factory class which returns an instance of a cache subclass.
A `TimedCache` is returned, unless `config.CACHE_ENABLED` is `False`,
in which case a `NullCache` will be returned.
"""
def __new__(cls, *args, **kwargs):
if config.CACHE_ENABLED:
new_cls = TimedCache
else:
new_cls = NullCache
instance = super(Cache, cls).__new__(new_cls)
instance.__init__(*args, **kwargs)
return instance
|
from logika import IGRALEC_R, IGRALEC_Y, PRAZNO, NEODLOCENO, NI_KONEC, MAKSIMALNO_STEVILO_POTEZ, nasprotnik
from five_logika import Five_logika
from powerup_logika import Powerup_logika, POWER_STOLPEC, POWER_ZETON, POWER_2X_NW, POWER_2X_W
from pop10_logika import Pop10_logika
from pop_logika import Pop_logika
import random
class AlphaBeta:
# Algoritem alphabeta
def __init__(self, globina):
self.globina = globina # Kako globoko iščemo?
self.prekinitev = False # Želimo algoritem prekiniti?
self.igra = None # Objekt, ki predstavlja igro
self.jaz = None # Katerega igralca igramo?
self.poteza = None # Sem vpišemo potezo, ko jo najdemo
def prekini(self):
'''Metoda, ki jo pokliče GUI, če je treba nehati razmišljati, ker
je uporabnik zapr okno ali izbral novo igro.'''
self.prekinitev = True
def izracunaj_potezo(self, igra):
'''Izračunaj potezo za trenutno stanje dane igre.'''
# To metodo pokličemo iz vzporednega vlakna
self.igra = igra
self.jaz = self.igra.na_potezi
self.prekinitev = False # Glavno vlakno bo to nastavilo na True, če bomo morali prekiniti
self.poteza = None # Sem napišemo potezo, ko jo najdemo
# Poženemo alphabeta
(poteza, vrednost) = self.alphabeta(self.globina, -AlphaBeta.NESKONCNO, AlphaBeta.NESKONCNO, True)
self.jaz = None
self.igra = None
if not self.prekinitev:
# Nismo bili prekinjeni, torej potezo izvedemo
self.poteza = poteza
def uredi_poteze(self, poteze):
'''Vrne urejen seznam potez, ki ga nato uporabimo v alphabeta.'''
urejene_poteze = [] # Urejen seznam potez
if isinstance(self.igra, Five_logika):
# Imamo 5 v vrsto
zeljen_vrstni_red = [1,4,7] # Željen vrstni red, če so na voljo vse poteze
zeljen_vrstni_red = random.sample(zeljen_vrstni_red, 3)
for i in range(1,3):
dodajamo = [4-i,4+i] # Poteze, ki jih želimo dodati
dodajamo = random.sample(dodajamo, 2)
for j in dodajamo:
zeljen_vrstni_red.append(j)
elif isinstance(self.igra, Powerup_logika):
# Imamo Power Up igro
# Dodajmo dvojne poteze brez možnosti zmage
# Najprej dodamo te, ker če bi takšne z možnostjo zmage,
# bi jih (lahek) algoritem že na začetku porabil
zeljen_vrstni_red = [74]
for i in range(1,4):
zeljen_vrstni_red += random.sample([74+i, 74-i], 2)
# Dodajmo dvojne poteze z možno zmago
zeljen_vrstni_red.append(84)
for i in range(1,4):
zeljen_vrstni_red += random.sample([84+i, 84-i], 2)
# Dodajmo 'navadne' poteze
zeljen_vrstni_red.append(4)
for i in range(1,4):
zeljen_vrstni_red += random.sample([4+i, 4-i], 2)
# Dodajmo poteze, ki poteptajo stolpec pod sabo
zeljen_vrstni_red.append(14)
for i in range(1,4):
zeljen_vrstni_red += random.sample([14+i, 14-i], 2)
# Dodajmo poteze, ki odstranijo nasprotnikov žeton
zeljen_vrstni_red += random.sample([24+7*i for i in range(6)], 6)
for i in range(1,4):
dodajamo = [24+i+7*j for j in range(6)] + [24-i+7*j for j in range(6)]
zeljen_vrstni_red += random.sample(dodajamo, 12)
elif isinstance(self.igra, Pop10_logika):
# Imamo Pop 10 igro
if self.igra.faza == 1:
# Smo v fazi odstranjevanja žetonov
zeljen_vrstni_red = random.sample([18, 68, 25, 75], 4) # Središčni dve polji
dodajamo = [10, 11, 12, 17, 19, 24, 26, 31, 32, 33]
dodajamo += [50+i for i in dodajamo]
zeljen_vrstni_red += random.sample(dodajamo, len(dodajamo))
dodajamo = [i for i in range(2, 7)] + [i for i in range(37, 42)] + [9+7*i for i in range(4)] + [13+7*i for i in range(4)]
dodajamo += [50+i for i in dodajamo]
zeljen_vrstni_red += random.sample(dodajamo, len(dodajamo))
dodajamo = [1+7*i for i in range(6)] + [7+7*i for i in range(6)]
dodajamo += [50+i for i in dodajamo]
zeljen_vrstni_red += random.sample(dodajamo, len(dodajamo))
else:
# Smo v fazi dodajanja žetonov (lahko faza 0 ali 2)
zeljen_vrstni_red = [4]
for i in range(1,4):
zeljen_vrstni_red += random.sample([4+i, 4-i], 2)
else:
# Imamo 4 v vrsto ali Pop Out
zeljen_vrstni_red = [4,-4] # Željen vrstni red, če so na voljo vse poteze
for i in range(1,4):
dodajamo = [4-i,-4+i,4+i,-4-i] # Poteze, ki jih želimo dodati
dodajamo = random.sample(dodajamo, 4)
for j in dodajamo:
zeljen_vrstni_red.append(j)
for i in zeljen_vrstni_red:
if i in poteze:
# Poteza je na voljo, treba jo je dodati
urejene_poteze.append(i)
else:
# Poteza ni na voljo
continue
return urejene_poteze
# Vrednosti igre
ZMAGA = 10**5
NESKONCNO = ZMAGA + 1 # Več kot zmaga
def vrednost_pozicije(self):
'''Vrne oceno vrednosti polozaja.'''
vrednost = 0
if self.igra is None:
# Če bi se slučajno zgodilo, da ne bi bila izbrana nobena igra
return vrednost
elif self.igra.na_potezi is None:
# Igre je konec
# Sem ne bi smeli nikoli priti zaradi if stavkov v alphabeta
return vrednost
else:
delez = 0.8 # Faktor za katerega mu je izguba manj vredna kot dobiček
tocke = [0, 0] # Sem bomo shranili število točk igralcev [R,Y]
# Najprej preverimo kateri tip igre imamo
if isinstance(self.igra, Five_logika):
# Imamo 5 v vrsto, torej imamo zmagovalne štirke (robne)
# ter petke, pokličimo jih spodaj
stirke_R = self.igra.stirke_R
stirke_Y = self.igra.stirke_Y
petke = self.igra.petke
# Pojdimo skozi vse štirke & petke ter jih primerno ovrednotimo
# Štirke / petke, ki vsebujejo žetone obeh igralcev so vredne 0 točk
# Prazne petke so vredne 0.1 točke
# Štirke so vredne 0.2 + a/5 točke, kjer je a število žetonov v štirki,
# če je igralec pravilne barve za to štirko.
# Petke so vredne a/5 točke, kjer je a število žetonov v petki.
for s in stirke_R: # Štirke na voljo rdečemu
((i1,j1),(i2,j2),(i3,j3),(i4,j4)) = s
stirka = [self.igra.polozaj[i1][j1], self.igra.polozaj[i2][j2],
self.igra.polozaj[i3][j3], self.igra.polozaj[i4][j4]]
if IGRALEC_Y in stirka:
continue
else:
tocke[0] += 0.2 + stirka.count(IGRALEC_R) / 5
for s in stirke_Y: # Štirke na voljo rumenemu
((i1,j1),(i2,j2),(i3,j3),(i4,j4)) = s
stirka = [self.igra.polozaj[i1][j1], self.igra.polozaj[i2][j2],
self.igra.polozaj[i3][j3], self.igra.polozaj[i4][j4]]
if IGRALEC_R in stirka:
continue
else:
tocke[1] += 0.2 + stirka.count(IGRALEC_Y) / 5
for p in petke:
((i1,j1),(i2,j2),(i3,j3),(i4,j4),(i5,j5)) = p
petka = [self.igra.polozaj[i1][j1], self.igra.polozaj[i2][j2],
self.igra.polozaj[i3][j3], self.igra.polozaj[i4][j4],
self.igra.polozaj[i5][j5]]
barve = list(set(stirka))
if len(barve) == 2:
if PRAZNO in barve:
# V petki so žetoni samo 1 barve
b = list(set(barve) - set([PRAZNO]))[0]
if b == IGRALEC_R:
tocke[0] += petka.count(b) / 5
else:
tocke[1] += petka.count(b) / 5
else:
# V petki so rdeči in rumeni
continue
elif barve == [PRAZNO]:
# Petka je prazna
tocke[0] += 0.1
tocke[1] += 0.1
else:
# V petki so rumeni in rdeči žetoni
continue
elif isinstance(self.igra, Pop10_logika):
# Naš cilj tukaj je, da bi imeli čim več štirk in še pomembneje,
# da bi izločili čim več žetonov
vrednost_tocke = AlphaBeta.ZMAGA / 30 # Da ne bomo nikoli imeli > ZMAGA brez da smo zmagali. To je vbistvu vrednost zmagovalne štirke.
for s in self.igra.stirke:
((i1,j1),(i2,j2),(i3,j3),(i4,j4)) = s
stirka = [self.igra.polozaj[i1][j1], self.igra.polozaj[i2][j2],
self.igra.polozaj[i3][j3], self.igra.polozaj[i4][j4]]
tocke[0] += stirka.count(IGRALEC_R) / 4 / (10-self.igra.odstranjeni[0])
tocke[1] += stirka.count(IGRALEC_Y) / 4 / (10-self.igra.odstranjeni[1])
vrednost_razlike_ods = (self.igra.odstranjeni[0] - self.igra.odstranjeni[1]) * 3 # Vrednost razlike odstranjenih
if self.jaz == IGRALEC_R:
vrednost += (tocke[0] - delez*tocke[1] + vrednost_razlike_ods) * vrednost_tocke
elif self.jaz == IGRALEC_Y:
vrednost += (tocke[1] - delez*tocke[0] - vrednost_razlike_ods) * vrednost_tocke
vrednost *= 0.984**(max(self.igra.stevilo_potez - 42, 0)) / 10
return vrednost
else:
# Imamo normalno, popout ali powerup igro
# Pojdimo sedaj skozi vse možne zmagovalne štirke in jih
# primerno ovrednotimo
# Stirke, ki ze vsebujejo zetone obeh igralec so vredne 0 tock
# Prazne stirke so vredne 0.1 tocke
# Ostale so vredne a/4 tock, kjer je a stevilo zetonov znotraj stirke
for s in self.igra.stirke:
((i1,j1),(i2,j2),(i3,j3),(i4,j4)) = s
stirka = [self.igra.polozaj[i1][j1], self.igra.polozaj[i2][j2],
self.igra.polozaj[i3][j3], self.igra.polozaj[i4][j4]]
barve = list(set(stirka))
# barve bo dolžine 2 ali 3, če bi bilo dolžine 1,
# bi bilo igre že konec
if len(barve) == 2:
if PRAZNO in barve:
# V štirki so žetoni samo 1 barve
b = list(set(barve) - set([PRAZNO]))[0]
if b == IGRALEC_R:
tocke[0] += stirka.count(b) / 4
else:
tocke[1] += stirka.count(b) / 4
else:
continue
elif barve == [PRAZNO]:
# Štirka je prazna
tocke[0] += 0.1
tocke[1] += 0.1
else:
# V štirki so rumene in rdeče
continue
if self.jaz == IGRALEC_R:
vrednost += (tocke[0] - delez*tocke[1]) / 69 * 0.1 * AlphaBeta.ZMAGA
else:
vrednost += (tocke[1] - delez*tocke[0]) / 69 * 0.1 * AlphaBeta.ZMAGA
if isinstance(self.igra, Pop_logika):
k = 0.984**self.igra.stevilo_potez
elif isinstance(self.igra, Powerup_logika):
k = 1 - self.igra.stevilo_potez / (2*58)
else:
k = 1 - self.igra.stevilo_potez / (2*6*7)
vrednost *= k
return vrednost
def alphabeta(self, globina, alpha, beta, maksimiziramo):
'''Glavna metoda AlphaBeta.
Vrne zmagovalno potezo in njeno vrednost, če jo najde, sicer (None, 0).'''
if self.prekinitev:
# Sporočili so nam, da moramo prekiniti
return (None, 0)
(zmagovalec, stirka) = self.igra.stanje_igre()
if zmagovalec in (IGRALEC_R, IGRALEC_Y, NEODLOCENO):
if isinstance(self.igra, Pop10_logika):
k = 0.984**(max(self.igra.stevilo_potez - 42, 0))
elif isinstance(self.igra, Pop_logika):
k = 0.984**self.igra.stevilo_potez
elif isinstance(self.igra, Powerup_logika):
k = 1 - self.igra.stevilo_potez / (2*58) # Kjer je 58 max število potez v tej igri
else:
k = 1 - self.igra.stevilo_potez / (2*6*7)
# Igre je konec, vrnemo njeno vrednost
if zmagovalec == self.jaz:
return (None, AlphaBeta.ZMAGA * k)
elif zmagovalec == nasprotnik(self.jaz):
return (None, -AlphaBeta.ZMAGA * k)
else:
return (None, 0)
elif zmagovalec == NI_KONEC:
# Igre ni konec
if globina == 0:
return (None, self.vrednost_pozicije())
else:
# Naredimo en korak alphabeta metode
if maksimiziramo:
# Maksimiziramo
najboljsa_poteza = None
for p in self.uredi_poteze(self.igra.veljavne_poteze()):
self.igra.povleci_potezo(p, True)
if (p > 70 and isinstance(self.igra, Powerup_logika)) or (isinstance(self.igra, Pop10_logika) and self.igra.faza == 2):
# Imamo dvojno potezo
for p2 in self.uredi_poteze(self.igra.veljavne_poteze()):
self.igra.povleci_potezo(p2, True)
vrednost = self.alphabeta(max(globina-2, 0), alpha, beta, not maksimiziramo)[1]
self.igra.razveljavi()
if vrednost > alpha:
najboljsa_poteza = [p, p2]
alpha = vrednost
if najboljsa_poteza is None:
najboljsa_poteza = [p, p2]
if beta <= alpha:
break
self.igra.razveljavi()
if beta <= alpha:
break
else:
vrednost = self.alphabeta(globina-1, alpha, beta, not maksimiziramo)[1]
self.igra.razveljavi()
if vrednost > alpha:
najboljsa_poteza = p
alpha = vrednost
if najboljsa_poteza is None:
najboljsa_poteza = p
if beta <= alpha:
break
else:
# Minimiziramo
najboljsa_poteza = None
for p in self.uredi_poteze(self.igra.veljavne_poteze()):
self.igra.povleci_potezo(p, True)
if (p > 70 and isinstance(self.igra, Powerup_logika)) or (isinstance(self.igra, Pop10_logika) and self.igra.faza == 2):
# Imamo dvojno potezo
for p2 in self.uredi_poteze(self.igra.veljavne_poteze()):
self.igra.povleci_potezo(p2, True)
vrednost = self.alphabeta(max(globina-2, 0), alpha, beta, not maksimiziramo)[1]
self.igra.razveljavi()
if vrednost < beta:
najboljsa_poteza = [p, p2]
beta = vrednost
if najboljsa_poteza is None:
najboljsa_poteza = [p, p2]
if beta <= alpha:
break
self.igra.razveljavi()
if beta <= alpha:
break
else:
vrednost = self.alphabeta(globina-1, alpha, beta, not maksimiziramo)[1]
self.igra.razveljavi()
if vrednost < beta:
najboljsa_poteza = p
beta = vrednost
if najboljsa_poteza is None:
najboljsa_poteza = p
if beta <= alpha:
break
assert (najboljsa_poteza is not None), 'alphabeta: izračunana poteza je None, veljavne_poteze={0}, globina={1}'.format(self.igra.veljavne_poteze(), globina)
return (najboljsa_poteza, alpha if maksimiziramo else beta)
else:
assert False, 'alphabeta: nedefinirano stanje igre'
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import os.path as osp
import numpy as np
from easydict import EasyDict as edict
__C = edict()
cfg = __C
__C.TRAIN = edict()
__C.TRAIN.LEARNING_RATE = 0.001
__C.TRAIN.MOMENTUM = 0.9
__C.TRAIN.WEIGHT_DECAY = 0.0005
__C.TRAIN.GAMMA = 0.1
__C.TRAIN.STEPSIZE = 30000
__C.TRAIN.CACHE_PATH = None
__C.TRAIN.DISPLAY = 10
__C.TRAIN.DOUBLE_BIAS = True
__C.TRAIN.TRUNCATED = False
__C.TRAIN.BIAS_DECAY = False
__C.TRAIN.USE_GT = False
__C.TRAIN.ASPECT_GROUPING = False
__C.TRAIN.SNAPSHOT_KEPT = 3
__C.TRAIN.SUMMARY_INTERVAL = 180
__C.TRAIN.SCALES = (600,)
__C.TRAIN.MAX_SIZE = 1000
__C.TRAIN.IMS_PER_BATCH = 1
__C.TRAIN.BATCH_SIZE = 128
__C.TRAIN.FG_FRACTION = 0.25
__C.TRAIN.FG_THRESH = 0.5
__C.TRAIN.BG_THRESH_HI = 0.5
__C.TRAIN.BG_THRESH_LO = 0.1
__C.TRAIN.USE_FLIPPED = True
__C.TRAIN.BBOX_REG = True
__C.TRAIN.BBOX_THRESH = 0.5
__C.TRAIN.SNAPSHOT_ITERS = 5000
__C.TRAIN.SNAPSHOT_PREFIX = 'res101_faster_rcnn'
__C.TRAIN.BBOX_NORMALIZE_TARGETS = True
__C.TRAIN.BBOX_INSIDE_WEIGHTS = (1.0, 1.0, 1.0, 1.0)
__C.TRAIN.BBOX_NORMALIZE_TARGETS_PRECOMPUTED = True
__C.TRAIN.BBOX_NORMALIZE_MEANS = (0.0, 0.0, 0.0, 0.0)
__C.TRAIN.BBOX_NORMALIZE_STDS = (0.1, 0.1, 0.2, 0.2)
__C.TRAIN.PROPOSAL_METHOD = 'gt'
__C.TRAIN.HAS_RPN = True
__C.TRAIN.RPN_POSITIVE_OVERLAP = 0.7
__C.TRAIN.RPN_NEGATIVE_OVERLAP = 0.3
__C.TRAIN.RPN_CLOBBER_POSITIVES = False
__C.TRAIN.RPN_FG_FRACTION = 0.5
__C.TRAIN.RPN_BATCHSIZE = 256
__C.TRAIN.RPN_NMS_THRESH = 0.7
__C.TRAIN.RPN_PRE_NMS_TOP_N = 12000
__C.TRAIN.RPN_POST_NMS_TOP_N = 2000
__C.TRAIN.RPN_BBOX_INSIDE_WEIGHTS = (1.0, 1.0, 1.0, 1.0)
__C.TRAIN.RPN_POSITIVE_WEIGHT = -1.0
__C.TRAIN.USE_ALL_GT = True
__C.TEST = edict()
__C.TEST.SCALES = (600,)
__C.TEST.MAX_SIZE = 1000
__C.TEST.NMS = 0.3
__C.TEST.SVM = False
__C.TEST.BBOX_REG = True
__C.TEST.HAS_RPN = False
__C.TEST.PROPOSAL_METHOD = 'gt'
__C.TEST.RPN_NMS_THRESH = 0.7
__C.TEST.RPN_PRE_NMS_TOP_N = 6000
__C.TEST.RPN_POST_NMS_TOP_N = 300
__C.TEST.MODE = 'nms'
__C.TEST.RPN_TOP_N = 5000
__C.RESNET = edict()
__C.RESNET.MAX_POOL = False
__C.RESNET.FIXED_BLOCKS = 1
__C.RESNET.BN_TRAIN = False
__C.DEDUP_BOXES = 1. / 16.
__C.PIXEL_MEANS = np.array([[[102.9801, 115.9465, 122.7717]]])
__C.RNG_SEED = 3
__C.EPS = 1e-14
__C.ROOT_DIR = osp.abspath(osp.join(osp.dirname(__file__), '..', '..'))
__C.DATA_DIR = osp.abspath(osp.join(__C.ROOT_DIR, 'data'))
__C.MATLAB = 'matlab'
__C.EXP_DIR = 'default'
__C.USE_GPU_NMS = True
__C.GPU_ID = 0
__C.POOLING_MODE = 'crop'
__C.POOLING_SIZE = 7
__C.ANCHOR_SCALES = [8,16,32]
__C.ANCHOR_RATIOS = [0.5,1,2]
def get_output_dir(imdb, weights_filename):
"""Return the directory where experimental artifacts are placed.
If the directory does not exist, it is created.
A canonical path is built using the name from an imdb and a network
(if not None).
"""
outdir = osp.abspath(osp.join(__C.ROOT_DIR, 'output', __C.EXP_DIR, imdb.name))
if weights_filename is None:
weights_filename = 'default'
outdir = osp.join(outdir, weights_filename)
if not os.path.exists(outdir):
os.makedirs(outdir)
return outdir
def get_output_tb_dir(imdb, weights_filename):
"""Return the directory where tensorflow summaries are placed.
If the directory does not exist, it is created.
A canonical path is built using the name from an imdb and a network
(if not None).
"""
outdir = osp.abspath(osp.join(__C.ROOT_DIR, 'tensorboard', __C.EXP_DIR, imdb.name))
if weights_filename is None:
weights_filename = 'default'
outdir = osp.join(outdir, weights_filename)
if not os.path.exists(outdir):
os.makedirs(outdir)
return outdir
def _merge_a_into_b(a, b):
"""Merge config dictionary a into config dictionary b, clobbering the
options in b whenever they are also specified in a.
"""
if type(a) is not edict:
return
for k, v in a.items():
# a must specify keys that are in b
if k not in b:
raise KeyError('{} is not a valid config key'.format(k))
# the types must match, too
old_type = type(b[k])
if old_type is not type(v):
if isinstance(b[k], np.ndarray):
v = np.array(v, dtype=b[k].dtype)
else:
raise ValueError(('Type mismatch ({} vs. {}) '
'for config key: {}').format(type(b[k]),
type(v), k))
# recursively merge dicts
if type(v) is edict:
try:
_merge_a_into_b(a[k], b[k])
except:
print(('Error under config key: {}'.format(k)))
raise
else:
b[k] = v
def cfg_from_file(filename):
"""Load a config file and merge it into the default options."""
import yaml
with open(filename, 'r') as f:
yaml_cfg = edict(yaml.load(f))
_merge_a_into_b(yaml_cfg, __C)
def cfg_from_list(cfg_list):
"""Set config keys via list (e.g., from command line)."""
from ast import literal_eval
assert len(cfg_list) % 2 == 0
for k, v in zip(cfg_list[0::2], cfg_list[1::2]):
key_list = k.split('.')
d = __C
for subkey in key_list[:-1]:
assert subkey in d
d = d[subkey]
subkey = key_list[-1]
assert subkey in d
try:
value = literal_eval(v)
except:
# handle the case when v is a string literal
value = v
assert type(value) == type(d[subkey]), \
'type {} does not match original type {}'.format(
type(value), type(d[subkey]))
d[subkey] = value
|
__VERSION__="ete2-2.2rev1056"
from PyQt4 import QtCore, QtGui
class Ui_OpenNewick(object):
def setupUi(self, OpenNewick):
OpenNewick.setObjectName("OpenNewick")
OpenNewick.resize(569, 353)
self.comboBox = QtGui.QComboBox(OpenNewick)
self.comboBox.setGeometry(QtCore.QRect(460, 300, 81, 23))
self.comboBox.setObjectName("comboBox")
self.widget = QtGui.QWidget(OpenNewick)
self.widget.setGeometry(QtCore.QRect(30, 10, 371, 321))
self.widget.setObjectName("widget")
self.retranslateUi(OpenNewick)
QtCore.QMetaObject.connectSlotsByName(OpenNewick)
def retranslateUi(self, OpenNewick):
OpenNewick.setWindowTitle(QtGui.QApplication.translate("OpenNewick", "Dialog", None, QtGui.QApplication.UnicodeUTF8))
|
from collections import Counter
from os.path import splitext
import matplotlib.pyplot as plt
from arcapix.fs.gpfs import ListProcessingRule, ManagementPolicy
def type_sizes(file_list):
c = Counter()
for f in file_list:
c.update({splitext(f.name): f.filesize})
return c
p = ManagementPolicy()
r = p.rules.new(ListProcessingRule, 'types', type_sizes)
result = p.run('mmfs1')['types']
plt.pie(list(result.values()), labels=list(result.keys()), autopct='%1.1f%%')
plt.axis('equal')
plt.show()
|
import timeit
def insertion_sort(nums):
"""Insertion Sort."""
for index in range(1, len(nums)):
val = nums[index]
left_index = index - 1
while left_index >= 0 and nums[left_index] > val:
nums[left_index + 1] = nums[left_index]
left_index -= 1
nums[left_index + 1] = val
return nums
def insertion_sort_tuples(nums):
"""Insertion Sort."""
for index in range(1, len(nums)):
val = nums[index]
left_index = index - 1
while left_index >= 0 and nums[left_index][1] > val[1]:
nums[left_index + 1] = nums[left_index]
left_index -= 1
nums[left_index + 1] = val
return nums
if __name__ == '__main__':
print("""
The insertion sort algorithm sorts each item sequentially and compares its value
to its neighbor, working its way to the end of the list and moving smaller items to the left.
Here are the best and worst case scenarios:
Input (Worst Case Scenario):
lst_one = [x for x in range(0, 2000)]
lst_one.reverse()
""")
lst_one = [x for x in range(0, 2000)]
lst_one.reverse()
time1 = timeit.timeit('insertion_sort(lst_one)', setup="from __main__ import insertion_sort, lst_one",number=500)
print("""
Number of runs = 500
Average Time = {}
Input (Best Case Scenario):
lst_two = [x for x in range(0, 2000)]
""".format(time1))
lst_two = [x for x in range(0, 2000)]
time2 = timeit.timeit('insertion_sort(lst_two)', setup="from __main__ import insertion_sort, lst_two",number=500)
print("""
Number of runs = 500
Average Time = {}
""".format(time2))
|
from django.db import models
class Citizen(models.Model):
"""
The insurance users.
"""
name = models.CharField(max_length=50)
last_name = models.CharField(max_length=50)
# Contact information
email = models.EmailField()
phone = models.CharField(max_length=50)
# Citizen documents
CC = 'CC'
PASSPORT = 'PP'
document_choices = (
(CC, 'cc'),
(PASSPORT, 'Passport')
)
document_type = models.CharField(max_length=5, choices=document_choices)
document_number = models.BigIntegerField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
|
import datetime
import typing
from . import helpers
from .tl import types, custom
Phone = str
Username = str
PeerID = int
Entity = typing.Union[types.User, types.Chat, types.Channel]
FullEntity = typing.Union[types.UserFull, types.messages.ChatFull, types.ChatFull, types.ChannelFull]
EntityLike = typing.Union[
Phone,
Username,
PeerID,
types.TypePeer,
types.TypeInputPeer,
Entity,
FullEntity
]
EntitiesLike = typing.Union[EntityLike, typing.Sequence[EntityLike]]
ButtonLike = typing.Union[types.TypeKeyboardButton, custom.Button]
MarkupLike = typing.Union[
types.TypeReplyMarkup,
ButtonLike,
typing.Sequence[ButtonLike],
typing.Sequence[typing.Sequence[ButtonLike]]
]
TotalList = helpers.TotalList
DateLike = typing.Optional[typing.Union[float, datetime.datetime, datetime.date, datetime.timedelta]]
LocalPath = str
ExternalUrl = str
BotFileID = str
FileLike = typing.Union[
LocalPath,
ExternalUrl,
BotFileID,
bytes,
typing.BinaryIO,
types.TypeMessageMedia,
types.TypeInputFile,
types.TypeInputFileLocation
]
try:
OutFileLike = typing.Union[
str,
typing.Type[bytes],
typing.BinaryIO
]
except TypeError:
OutFileLike = typing.Union[
str,
typing.BinaryIO
]
MessageLike = typing.Union[str, types.Message]
MessageIDLike = typing.Union[int, types.Message, types.TypeInputMessage]
ProgressCallback = typing.Callable[[int, int], None]
|
__author__ = 'yfauser'
from tests.config import *
from nsxramlclient.client import NsxClient
import time
client_session = NsxClient(nsxraml_file, nsxmanager, nsx_username, nsx_password, debug=True)
def test_segment_pools():
### Test Segment ID Pool Operations
# Get all configured Segment Pools
get_segment_resp = client_session.read('vdnSegmentPools')
client_session.view_response(get_segment_resp)
# Add a Segment Pool
segments_create_body = client_session.extract_resource_body_example('vdnSegmentPools', 'create')
client_session.view_body_dict(segments_create_body)
segments_create_body['segmentRange']['begin'] = '11002'
segments_create_body['segmentRange']['end'] = '11003'
segments_create_body['segmentRange']['name'] = 'legacy'
create_response = client_session.create('vdnSegmentPools', request_body_dict=segments_create_body)
client_session.view_response(create_response)
time.sleep(5)
# Update the new Segment Pool:
update_segment_body = client_session.extract_resource_body_example('vdnSegmentPool', 'update')
update_segment_body['segmentRange']['name'] = 'PythonTest'
update_segment_body['segmentRange']['end'] = '11005'
client_session.update('vdnSegmentPool', uri_parameters={'segmentPoolId': create_response['objectId']},
request_body_dict=update_segment_body)
time.sleep(5)
# Display a specific Segment pool (the new one)
specific_segement_resp = client_session.read('vdnSegmentPool', uri_parameters={'segmentPoolId':
create_response['objectId']})
client_session.view_response(specific_segement_resp)
time.sleep(5)
# Delete new Segment Pool
client_session.delete('vdnSegmentPool', uri_parameters={'segmentPoolId': create_response['objectId']})
def test_mcast_pools():
### Test Multicast Pool Operations
# Add a multicast Pool
mcastpool_create_body = client_session.extract_resource_body_example('vdnMulticastPools', 'create')
client_session.view_body_dict(mcastpool_create_body)
mcastpool_create_body['multicastRange']['desc'] = 'Test'
mcastpool_create_body['multicastRange']['begin'] = '235.0.0.0'
mcastpool_create_body['multicastRange']['end'] = '235.1.1.1'
mcastpool_create_body['multicastRange']['name'] = 'legacy'
create_response = client_session.create('vdnMulticastPools', request_body_dict=mcastpool_create_body)
client_session.view_response(create_response)
# Get all configured Multicast Pools
get_mcast_pools = client_session.read('vdnMulticastPools')
client_session.view_response(get_mcast_pools)
time.sleep(5)
# Update the newly created mcast pool
mcastpool_update_body = client_session.extract_resource_body_example('vdnMulticastPool', 'update')
mcastpool_update_body['multicastRange']['end'] = '235.3.1.1'
mcastpool_update_body['multicastRange']['name'] = 'Python'
update_response = client_session.update('vdnMulticastPool', uri_parameters={'multicastAddresssRangeId':
create_response['objectId']},
request_body_dict=mcastpool_update_body)
client_session.view_response(update_response)
# display a specific Multicast Pool
get_mcast_pool = client_session.read('vdnMulticastPool', uri_parameters={'multicastAddresssRangeId':
create_response['objectId']})
client_session.view_response(get_mcast_pool)
# Delete new mcast pool
client_session.delete('vdnMulticastPool', uri_parameters={'multicastAddresssRangeId': create_response['objectId']})
|
import numpy as np
import jarvis.helpers.helpers as helpers
from data_cleaner import DataCleaner
def get_data(csv=None, sep='|'):
dataset = create_dataset(csv, sep)
inputs = DataCleaner().clean(dataset[:, 0:1])
outputs = format_targets(dataset[:, 1])
train_data, test_data = inputs[::2], inputs[1::2]
train_targets, test_targets = outputs[::2], outputs[1::2]
return [(train_data, train_targets), (test_data, test_targets)]
def create_dataset(csv, sep):
if csv:
return helpers.read_csv(csv, sep=sep).values
else:
data = []
for f in helpers.csvs():
for row in helpers.read_csv(f, sep=sep).values:
data.append(list(row))
return np.array(data)
def format_targets(target_list):
target_map = {}
index = 0
actions = helpers.get_actions()
# Map targets to their index inside of actions array
for action in actions:
target_map[action] = index
index += 1
return map(lambda target: target_map[target], target_list)
|
from jaspyx.visitor import BaseVisitor
class Return(BaseVisitor):
def visit_Return(self, node):
self.indent()
if node.value is not None:
self.output('return ')
self.visit(node.value)
else:
self.output('return')
self.finish()
|
import os
import os.path
from raiden.constants import RAIDEN_DB_VERSION
def database_from_privatekey(base_dir, app_number):
""" Format a database path based on the private key and app number. """
dbpath = os.path.join(base_dir, f"app{app_number}", f"v{RAIDEN_DB_VERSION}_log.db")
os.makedirs(os.path.dirname(dbpath))
return dbpath
|
from django.apps import AppConfig
class ProxyConfig(AppConfig):
name = 'geoq.proxy'
verbose_name = 'GeoQ Proxy'
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import fnmatch
import os
import re
import sys
from setuptools import find_packages, setup, Command
from setuptools.command.install import install as InstallCommandBase
from setuptools.dist import Distribution
_VERSION = '1.8.0'
REQUIRED_PACKAGES = [
'absl-py >= 0.1.6',
'astor >= 0.6.0',
'gast >= 0.2.0',
'numpy >= 1.13.3',
'six >= 1.10.0',
'protobuf >= 3.4.0',
'tensorboard >= 1.8.0, < 1.9.0',
'termcolor >= 1.1.0',
]
if sys.byteorder == 'little':
# grpcio does not build correctly on big-endian machines due to lack of
# BoringSSL support.
# See https://github.com/tensorflow/tensorflow/issues/17882.
REQUIRED_PACKAGES.append('grpcio >= 1.8.6')
project_name = 'tensorflow'
if '--project_name' in sys.argv:
project_name_idx = sys.argv.index('--project_name')
project_name = sys.argv[project_name_idx + 1]
sys.argv.remove('--project_name')
sys.argv.pop(project_name_idx)
if sys.version_info.major == 3:
REQUIRED_PACKAGES.append('wheel >= 0.26')
else:
REQUIRED_PACKAGES.append('wheel')
# mock comes with unittest.mock for python3, need to install for python2
REQUIRED_PACKAGES.append('mock >= 2.0.0')
if 'tf_nightly' in project_name:
for i, pkg in enumerate(REQUIRED_PACKAGES):
if 'tensorboard' in pkg:
REQUIRED_PACKAGES[i] = 'tb-nightly >= 1.8.0a0, < 1.9.0a0'
break
if sys.version_info < (3, 4):
REQUIRED_PACKAGES.append('backports.weakref >= 1.0rc1')
REQUIRED_PACKAGES.append('enum34 >= 1.1.6')
CONSOLE_SCRIPTS = [
'freeze_graph = tensorflow.python.tools.freeze_graph:run_main',
'toco_from_protos = tensorflow.contrib.lite.toco.python.toco_from_protos:main',
'toco = tensorflow.contrib.lite.toco.python.toco_wrapper:main',
'saved_model_cli = tensorflow.python.tools.saved_model_cli:main',
# We need to keep the TensorBoard command, even though the console script
# is now declared by the tensorboard pip package. If we remove the
# TensorBoard command, pip will inappropriately remove it during install,
# even though the command is not removed, just moved to a different wheel.
'tensorboard = tensorboard.main:run_main',
]
if 'tf_nightly' in project_name:
CONSOLE_SCRIPTS.remove('tensorboard = tensorboard.main:run_main')
TEST_PACKAGES = [
'scipy >= 0.15.1',
]
class BinaryDistribution(Distribution):
def has_ext_modules(self):
return True
class InstallCommand(InstallCommandBase):
"""Override the dir where the headers go."""
def finalize_options(self):
ret = InstallCommandBase.finalize_options(self)
self.install_headers = os.path.join(self.install_purelib,
'tensorflow', 'include')
return ret
class InstallHeaders(Command):
"""Override how headers are copied.
The install_headers that comes with setuptools copies all files to
the same directory. But we need the files to be in a specific directory
hierarchy for -I <include_dir> to work correctly.
"""
description = 'install C/C++ header files'
user_options = [('install-dir=', 'd',
'directory to install header files to'),
('force', 'f',
'force installation (overwrite existing files)'),
]
boolean_options = ['force']
def initialize_options(self):
self.install_dir = None
self.force = 0
self.outfiles = []
def finalize_options(self):
self.set_undefined_options('install',
('install_headers', 'install_dir'),
('force', 'force'))
def mkdir_and_copy_file(self, header):
install_dir = os.path.join(self.install_dir, os.path.dirname(header))
# Get rid of some extra intervening directories so we can have fewer
# directories for -I
install_dir = re.sub('/google/protobuf_archive/src', '', install_dir)
# Copy eigen code into tensorflow/include.
# A symlink would do, but the wheel file that gets created ignores
# symlink within the directory hierarchy.
# NOTE(keveman): Figure out how to customize bdist_wheel package so
# we can do the symlink.
if 'external/eigen_archive/' in install_dir:
extra_dir = install_dir.replace('external/eigen_archive', '')
if not os.path.exists(extra_dir):
self.mkpath(extra_dir)
self.copy_file(header, extra_dir)
if not os.path.exists(install_dir):
self.mkpath(install_dir)
return self.copy_file(header, install_dir)
def run(self):
hdrs = self.distribution.headers
if not hdrs:
return
self.mkpath(self.install_dir)
for header in hdrs:
(out, _) = self.mkdir_and_copy_file(header)
self.outfiles.append(out)
def get_inputs(self):
return self.distribution.headers or []
def get_outputs(self):
return self.outfiles
def find_files(pattern, root):
"""Return all the files matching pattern below root dir."""
for path, _, files in os.walk(root):
for filename in fnmatch.filter(files, pattern):
yield os.path.join(path, filename)
matches = ['../' + x for x in find_files('*', 'external') if '.py' not in x]
so_lib_paths = [
i for i in os.listdir('.')
if os.path.isdir(i) and fnmatch.fnmatch(i, '_solib_*')
]
for path in so_lib_paths:
matches.extend(
['../' + x for x in find_files('*', path) if '.py' not in x]
)
if os.name == 'nt':
EXTENSION_NAME = 'python/_pywrap_tensorflow_internal.pyd'
else:
EXTENSION_NAME = 'python/_pywrap_tensorflow_internal.so'
headers = (list(find_files('*.h', 'tensorflow/core')) +
list(find_files('*.h', 'tensorflow/stream_executor')) +
list(find_files('*.h', 'google/protobuf_archive/src')) +
list(find_files('*', 'third_party/eigen3')) +
list(find_files('*', 'external/eigen_archive')))
setup(
name=project_name,
version=_VERSION.replace('-', ''),
description='TensorFlow helps the tensors flow',
long_description='',
url='https://www.tensorflow.org/',
author='Google Inc.',
author_email='opensource@google.com',
# Contained modules and scripts.
packages=find_packages(),
entry_points={
'console_scripts': CONSOLE_SCRIPTS,
},
headers=headers,
install_requires=REQUIRED_PACKAGES,
tests_require=REQUIRED_PACKAGES + TEST_PACKAGES,
# Add in any packaged data.
include_package_data=True,
package_data={
'tensorflow': [
EXTENSION_NAME,
] + matches,
},
zip_safe=False,
distclass=BinaryDistribution,
cmdclass={
'install_headers': InstallHeaders,
'install': InstallCommand,
},
# PyPI package information.
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
],
license='Apache 2.0',
keywords='tensorflow tensor machine learning',)
|
'''
Created on Jun 16, 2014
@author: lwoydziak
'''
import pexpect
import sys
from dynamic_machine.cli_commands import assertResultNotEquals, Command
class SshCli(object):
LOGGED_IN = 0
def __init__(self, host, loginUser, debug = False, trace = False, log=None, port=22, pexpectObject=None):
self.pexpect = pexpect if not pexpectObject else pexpectObject
self.debug = debug
self.trace = trace
self.host = host
self._port = port
self._connection = None
self.modeList = []
self._log = log
self._bufferedCommands = None
self._bufferedMode = None
self._loginUser = loginUser
self._resetExpect()
def __del__(self):
self.closeCliConnectionTo()
def showOutputOnScreen(self):
self.debug = True
self.trace = True
self._log = None
self._setupLog()
def connectWithSsh(self):
self._debugLog("Establishing connection to " + self.host)
self._connection = self.pexpect.spawn(
'ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no %s@%s -p %d' %
(self._loginUser.username, self.host, self._port))
if self._connection is None:
raise Exception("Unable to connect via SSH perhaps wrong IP!")
self._secure = True
self._setupLog()
self._loginUser.commandLine(self)
self.modeList = [self._loginUser]
def resetLoggingTo(self, log):
self._connection.logfile = log
def _setupLog(self):
if self.trace:
class Python3BytesToStdOut:
def write(self, s):
sys.stdout.buffer.write(s)
def flush(self):
sys.stdout.flush()
self._connection.logfile = Python3BytesToStdOut()
if self._log is not None:
self._connection.logfile = self._log
def loginSsh(self):
self._setupLog()
self._debugLog("Login in as "+self._loginUser.username)
try:
self._loginUser.sendPassword()
return True
except Exception as e:
self.forceCloseCliConnectionTo()
raise Exception('Exception ('+str(e)+') '+'Expected CLI response: "Password:"' + "\n Got: \n" + self._lastExpect())
def _exit_modes_beyond(self, thisMode):
if not self.modeList: return
while len(self.modeList) > thisMode + 1:
self.modeList.pop().exit()
def exitMode(self, mode):
if mode in self.modeList:
self.modeList.remove(mode)
def check_prereq(self, prereqMode = 0):
self._exit_modes_beyond(prereqMode)
if len(self.modeList) <= prereqMode:
raise Exception("Attempted to enter menu when prerequist mode was not entered, expected: %d" % prereqMode)
def execute_as(self, user):
self.check_prereq(self.LOGGED_IN)
self._exit_modes_beyond(self.LOGGED_IN)
user.commandLine(self)
user.login()
self.modeList.append(user)
return user
def closeCliConnectionTo(self):
if self._connection == None:
return
self._exit_modes_beyond(-1)
self.modeList = []
self._debugLog("Exited all modes.")
self.forceCloseCliConnectionTo()
def forceCloseCliConnectionTo(self):
self.modeList = None
if self._connection:
self._debugLog("Closing connection.")
self._connection.close()
self._connection = None
def _debugLog(self, message):
if self.debug:
print(message)
def _resetExpect(self):
self.previousExpectLine = ""
if self._connection is not None and isinstance(self._connection.buffer, str):
self.previousExpectLine = self._connection.buffer
self._connection.buffer = ""
def _lastExpect(self):
constructLine = self.previousExpectLine
if self._connection is not None and isinstance(self._connection.before, str):
constructLine += self._connection.before
if self._connection is not None and isinstance(self._connection.after, str):
constructLine += self._connection.after
return constructLine
def send(self, command):
if self._bufferedCommands is None:
self._bufferedCommands = command
else:
self._bufferedCommands += "\n" + command
if self._bufferedMode is None:
self.flush()
else:
self._debugLog("Buffering command " + command)
def flush(self):
if self._bufferedCommands is None:
return
self._connection.sendline(str(self._bufferedCommands))
self._bufferedCommands = None
def buffering(self):
return self._bufferedMode
def bufferedMode(self, mode = True):
if mode is None:
self.flush()
self._bufferedMode = mode
def compareReceivedAgainst(self, pattern, timeout=-1, searchwindowsize=None, indexOfSuccessfulResult=0):
if self._bufferedMode is None:
index = self._connection.expect(pattern, timeout, searchwindowsize)
self._debugLog("\nLooking for " + str(pattern) + " Found ("+str(index)+")")
self._debugLog(self._lastExpect())
return index
else:
return indexOfSuccessfulResult
|
import argparse
import logging
import json
import os
from bootstrap import bootstrap
from library import execute_command, tempdir
LOGGER_BASENAME = '''_CI.test'''
LOGGER = logging.getLogger(LOGGER_BASENAME)
LOGGER.addHandler(logging.NullHandler())
def get_arguments():
parser = argparse.ArgumentParser(description='Accepts stages for testing')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('--lint', help='Test the lint stage of the template', action='store_true')
group.add_argument('--test', help='Test the test stage of the template', action='store_true')
group.add_argument('--build', help='Test the build stage of the template', action='store_true')
group.add_argument('--document', help='Test the document stage of the template', action='store_true')
args = parser.parse_args()
return args
def _test(stage):
from cookiecutter.main import cookiecutter
template = os.path.abspath('.')
context = os.path.abspath('cookiecutter.json')
with tempdir():
cookiecutter(template,
extra_context=json.loads(open(context).read()),
no_input=True)
os.chdir(os.listdir('.')[0])
del os.environ['PIPENV_PIPFILE']
return execute_command(os.path.join('_CI', 'scripts', f'{stage}.py'))
def test(stage):
emojize = bootstrap()
exit_code = _test(stage)
success = not exit_code
if success:
LOGGER.info('%s Tested stage "%s" successfully! %s',
emojize(':white_heavy_check_mark:'),
stage,
emojize(':thumbs_up:'))
else:
LOGGER.error('%s Errors found testing stage "%s"! %s',
emojize(':cross_mark:'),
stage,
emojize(':crying_face:'))
raise SystemExit(exit_code)
if __name__ == '__main__':
args = get_arguments()
stage = next((argument for argument in ('lint', 'test', 'build', 'document')
if getattr(args, argument)), None)
test(stage)
|
from gramfuzz.fields import *
import names
TOP_CAT = "postal"
class PDef(Def):
cat = "postal_def"
class PRef(Ref):
cat = "postal_def"
EOL = "\n"
Def("postal_address",
PRef("name-part"), PRef("street-address"), PRef("zip-part"),
cat="postal")
PDef("name-part",
Ref("name", cat=names.TOP_CAT), EOL
)
PDef("street-address",
PRef("house-num"), PRef("street-name"), Opt(PRef("apt-num")), EOL,
sep=" ")
PDef("house-num", UInt)
PDef("street-name", Or(
"Sesame Street", "Yellow Brick Road", "Jump Street", "Evergreen Terrace",
"Elm Street", "Baker Street", "Paper Street", "Wisteria Lane",
"Coronation Street", "Rainey Street", "Spooner Street",
"0day Causeway", "Diagon Alley",
))
PDef("zip-part",
PRef("town-name"), ", ", PRef("state-code"), " ", PRef("zip-code"), EOL
)
PDef("apt-num",
UInt(min=0, max=10000), Opt(String(charset=String.charset_alpha_upper, min=1, max=2))
)
PDef("town-name", Or(
"Seoul", "São Paulo", "Bombay", "Jakarta", "Karachi", "Moscow",
"Istanbul", "Mexico City", "Shanghai", "Tokyo", "New York", "Bangkok",
"Beijing", "Delhi", "London", "HongKong", "Cairo", "Tehran", "Bogota",
"Bandung", "Tianjin", "Lima", "Rio de Janeiro" "Lahore", "Bogor",
"Santiago", "St Petersburg", "Shenyang", "Calcutta", "Wuhan", "Sydney",
"Guangzhou", "Singapore", "Madras", "Baghdad", "Pusan", "Los Angeles",
"Yokohama", "Dhaka", "Berlin", "Alexandria", "Bangalore", "Malang",
"Hyderabad", "Chongqing", "Ho Chi Minh City",
))
PDef("state-code", Or(
"AL", "AK", "AS", "AZ", "AR", "CA", "CO", "CT", "DE", "DC", "FL", "GA",
"GU", "HI", "ID", "IL", "IN", "IA", "KS", "KY", "LA", "ME", "MD", "MH",
"MA", "MI", "FM", "MN", "MS", "MO", "MT", "NE", "NV", "NH", "NJ", "NM",
"NY", "NC", "ND", "MP", "OH", "OK", "OR", "PW", "PA", "PR", "RI", "SC",
"SD", "TN", "TX", "UT", "VT", "VA", "VI", "WA", "WV", "WI", "WY",
))
PDef("zip-code",
String(charset="123456789",min=1,max=2), String(charset="0123456789",min=4,max=5),
Opt("-", String(charset="0123456789",min=4,max=5))
)
|
"""Combine logs from multiple bitcore nodes as well as the test_framework log.
This streams the combined log output to stdout. Use combine_logs.py > outputfile
to write to an outputfile."""
import argparse
from collections import defaultdict, namedtuple
import heapq
import itertools
import os
import re
import sys
TIMESTAMP_PATTERN = re.compile(r"^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{6}")
LogEvent = namedtuple('LogEvent', ['timestamp', 'source', 'event'])
def main():
"""Main function. Parses args, reads the log files and renders them as text or html."""
parser = argparse.ArgumentParser(usage='%(prog)s [options] <test temporary directory>', description=__doc__)
parser.add_argument('-c', '--color', dest='color', action='store_true', help='outputs the combined log with events colored by source (requires posix terminal colors. Use less -r for viewing)')
parser.add_argument('--html', dest='html', action='store_true', help='outputs the combined log as html. Requires jinja2. pip install jinja2')
args, unknown_args = parser.parse_known_args()
if args.color and os.name != 'posix':
print("Color output requires posix terminal colors.")
sys.exit(1)
if args.html and args.color:
print("Only one out of --color or --html should be specified")
sys.exit(1)
# There should only be one unknown argument - the path of the temporary test directory
if len(unknown_args) != 1:
print("Unexpected arguments" + str(unknown_args))
sys.exit(1)
log_events = read_logs(unknown_args[0])
print_logs(log_events, color=args.color, html=args.html)
def read_logs(tmp_dir):
"""Reads log files.
Delegates to generator function get_log_events() to provide individual log events
for each of the input log files."""
files = [("test", "%s/test_framework.log" % tmp_dir)]
for i in itertools.count():
logfile = "{}/node{}/regtest/debug.log".format(tmp_dir, i)
if not os.path.isfile(logfile):
break
files.append(("node%d" % i, logfile))
return heapq.merge(*[get_log_events(source, f) for source, f in files])
def get_log_events(source, logfile):
"""Generator function that returns individual log events.
Log events may be split over multiple lines. We use the timestamp
regex match as the marker for a new log event."""
try:
with open(logfile, 'r') as infile:
event = ''
timestamp = ''
for line in infile:
# skip blank lines
if line == '\n':
continue
# if this line has a timestamp, it's the start of a new log event.
time_match = TIMESTAMP_PATTERN.match(line)
if time_match:
if event:
yield LogEvent(timestamp=timestamp, source=source, event=event.rstrip())
event = line
timestamp = time_match.group()
# if it doesn't have a timestamp, it's a continuation line of the previous log.
else:
event += "\n" + line
# Flush the final event
yield LogEvent(timestamp=timestamp, source=source, event=event.rstrip())
except FileNotFoundError:
print("File %s could not be opened. Continuing without it." % logfile, file=sys.stderr)
def print_logs(log_events, color=False, html=False):
"""Renders the iterator of log events into text or html."""
if not html:
colors = defaultdict(lambda: '')
if color:
colors["test"] = "\033[0;36m" # CYAN
colors["node0"] = "\033[0;34m" # BLUE
colors["node1"] = "\033[0;32m" # GREEN
colors["node2"] = "\033[0;31m" # RED
colors["node3"] = "\033[0;33m" # YELLOW
colors["reset"] = "\033[0m" # Reset font color
for event in log_events:
print("{0} {1: <5} {2} {3}".format(colors[event.source.rstrip()], event.source, event.event, colors["reset"]))
else:
try:
import jinja2
except ImportError:
print("jinja2 not found. Try `pip install jinja2`")
sys.exit(1)
print(jinja2.Environment(loader=jinja2.FileSystemLoader('./'))
.get_template('combined_log_template.html')
.render(title="Combined Logs from testcase", log_events=[event._asdict() for event in log_events]))
if __name__ == '__main__':
main()
|
from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from rest_framework_swagger.views import get_swagger_view
from . import views, views_api
router = DefaultRouter()
router.register(r'election', views_api.ElectionInterface)
router.register(r'district', views_api.DistrictInterface)
router.register(r'municipality', views_api.MunicipalityInterface)
router.register(r'party', views_api.PartyInterface)
router.register(r'polling_station', views_api.PollingStationInterface)
router.register(r'list', views_api.ListInterface)
router.register(r'result', views_api.PollingStationResultInterface)
router.register(r'regional_electoral_district', views_api.RegionalElectoralDistrictInterface)
schema_view = get_swagger_view(title='Offene Wahlen API')
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^loaderio-eac9628bcae9be5601e1f3c62594d162.txt$', views.load_test, name='load_test'),
url(r'^api/', include(router.urls)),
url(r'^api/docs$', schema_view)
]
|
'''
Created on Jan 15, 2014
@author: Jose Borreguero
'''
from setuptools import setup
setup(
name = 'dsfinterp',
packages = ['dsfinterp','dsfinterp/test' ],
version = '0.1',
description = 'Cubic Spline Interpolation of Dynamics Structure Factors',
long_description = open('README.md').read(),
author = 'Jose Borreguero',
author_email = 'jose@borreguero.com',
url = 'https://github.com/camm-sns/dsfinterp',
download_url = 'http://pypi.python.org/pypi/dsfinterp',
keywords = ['AMBER', 'mdend', 'energy', 'molecular dynamics'],
classifiers = [
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Scientific/Engineering :: Physics',
],
)
|
from django.core.mail import EmailMultiAlternatives
from django.template import Context, Template
from django.template.loader import get_template
from helpers import ClientRouter, MailAssetsHelper, strip_accents
class UserMail:
"""
This class is responsible for firing emails for Users and Nonprofits
"""
from_email = 'Atados <site@atados.com.br>'
def __init__(self, user):
self.whole_user = user # This is the Nonprofit or Volunteer object
self.user = user.user if not type(user).__name__=='User' else user # This is the User object
self.global_context = {
"assets": {
"check": "https://s3.amazonaws.com/atados-us/images/check.png",
"iconFacebook": "https://s3.amazonaws.com/atados-us/images/icon-fb.png",
"iconInstagram": "https://s3.amazonaws.com/atados-us/images/icon-insta.png",
"logoAtadosSmall": "https://s3.amazonaws.com/atados-us/images/logo.small.png",
"logoAtadosSmall2": "https://s3.amazonaws.com/atados-us/images/mandala.png"
}
}
def sendEmail(self, template_name, subject, context, user_email=None):
text_content = get_template('email/{}.txt'.format(template_name)).render(context)
html_content = get_template('email/{}.html'.format(template_name)).render(context)
msg = EmailMultiAlternatives(subject, text_content, self.from_email, [user_email if user_email else self.user.email])
msg.attach_alternative(text_content, "text/plain")
msg.attach_alternative(html_content, "text/html")
return msg.send() > 0
def make_context(self, data):
context_data = self.global_context.copy()
context_data.update(data)
return Context(context_data)
def sendSignupConfirmation(self, site, token):
return self.sendEmail('emailVerification', 'Confirme seu email do Atados.', self.make_context({ 'token': token , 'site': site}))
class VolunteerMail(UserMail):
"""
This class contains all emails sent to volunteers
"""
def sendSignup(self):
"""
Email A/B from ruler
Sent when volunteer completes registration
"""
return self.sendEmail('volunteerSignup', 'Eba! Seu cadastro foi feito com sucesso', self.make_context({}))
def sendFacebookSignup(self): # pass by now
"""
Sent when volunteer completes registration from Facebook
"""
return self.sendEmail('volunteerFacebookSignup', 'Seja bem vindo ao Atados! \o/', self.make_context({}))
def sendAppliesToProject(self, project):
"""
Email for ruler C
Sent when volunteer applies to project
"""
return self.sendEmail('volunteerAppliesToProject', u'Você se inscreveu em uma vaga :)', self.make_context({'project': project}))
def askActInteractionConfirmation(self, project, volunteer):
"""
Email for ruler D
Sent when volunteer applies to project
"""
confirm_url = ClientRouter.mail_routine_monitoring_build_form_url(True, volunteer.user.email, project.nonprofit.name, "")
refute_url = ClientRouter.mail_routine_monitoring_build_form_url(False, volunteer.user.email, project.nonprofit.name, "")
return self.sendEmail('askActInteractionConfirmation', u'Acompanhamento de Rotina:)',
self.make_context({
'project': project,
'confirm_url': confirm_url,
'refute_url': refute_url
})
)
def sendAskAboutProjectExperience(self, apply):
"""
"""
subject = u"Como foi sua experiência com a Atados!"
feedback_form_url = ClientRouter.mail_ask_about_project_experience_url('volunteer', apply)
return self.sendEmail('volunteerAskAboutProjectExperience', subject, self.make_context({
'project_name': apply.project.name,
'feedback_form_url': feedback_form_url,
}), apply.volunteer.user.email)
#+ def sendAfterApply4Weeks(self): # new ruler
#+ """
#+ """
#+ context = Context({'user': self.user.name})
#+ return self.sendEmail('volunteerAfterApply4Weeks', '~ ~ ~ ~ ~', context)
#+ def send3DaysBeforePontual(self): # new ruler
#+ """
#+ """
#+ context = Context({'user': self.user.name})
#+ return self.sendEmail('volunteer3DaysBeforePontual', '~ ~ ~ ~ ~', context)
class NonprofitMail(UserMail):
"""
This class contains all emails sent to nonprofits
"""
def sendSignup(self):
"""
Email 1 from ruler
"""
return self.sendEmail('nonprofitSignup', 'Recebemos seu cadastro :)', self.make_context({
'review_profile_url': ClientRouter.edit_nonprofit_url(self.user.slug)
}))
def sendApproved(self):
"""
Email 2 from ruler
"""
return self.sendEmail('nonprofitApproved', 'Agora você tem um perfil no Atados', self.make_context({
'new_act_url': ClientRouter.new_act_url()
}))
def sendProjectPostingSuccessful(self, project):
"""
Email *NEW*
"""
return self.sendEmail('projectPostingSuccessful', 'Vaga criada com sucesso!', self.make_context({
'project': project,
'edit_project_url': ClientRouter.edit_project_url(project.slug)
}))
edit_nonprofit_act_url(self, act_slug)
def sendProjectApproved(self, project):
"""
Email 3 from ruler
"""
return self.sendEmail('projectApproved', 'Publicamos a sua vaga de voluntariado', self.make_context({
'project': project,
'act_url': ClientRouter.view_act_url(project.slug)
}))
def sendGetsNotifiedAboutApply(self, apply, message):
"""
Email 4 from ruler
"""
try:
subject = u'Novo voluntário para o {}'.format(apply.project.name)
except UnicodeEncodeError:
subject = u'Novo voluntário para o {}'.format(strip_accents(apply.project.name))
return self.sendEmail('nonprofitGetsNotifiedAboutApply', subject, self.make_context({
'apply': apply,
'volunteer_message': message,
'answer_volunteer_url': ClientRouter.view_volunteer_url(apply.volunteer.user.slug)
}), apply.project.email)
def sendAskAboutProjectExperience(self, project):
"""
"""
subject = u"Nos conta como foi sua experiência com a Atados!"
act_url = ClientRouter.edit_project_url(project.slug)
feedback_form_url = ClientRouter.mail_ask_about_project_experience_url('nonprofit', project)
return self.sendEmail('nonprofitAskAboutProjectExperience', subject, self.make_context({
'project_name': project.name,
'feedback_form_url': feedback_form_url,
'act_url': act_url,
}), project.email)
#+ def send1MonthInactive(self):
#+ """
#+ """
#+ return self.sendEmail('nonprofit1MonthInactive', '~ ~ ~ ~ ~', self.make_context({
#+ 'name': self.user.name
#+ }))
#+ def sendPontual(self):
#+ """
#+ """
#+ return self.sendEmail('nonprofitPontual', '~ ~ ~ ~ ~', self.make_context({
#+ 'name': self.user.name
#+ }))
#+ def sendRecorrente(self):
#+ """
#+ """
#+ return self.sendEmail('nonprofitRecorrente', '~ ~ ~ ~ ~', self.make_context({
#+ 'name': self.user.name
#+ }))
|
"""
flask.ext.babelex
~~~~~~~~~~~~~~~~~
Implements i18n/l10n support for Flask applications based on Babel.
:copyright: (c) 2013 by Serge S. Koval, Armin Ronacher and contributors.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import os
if os.environ.get('LC_CTYPE', '').lower() == 'utf-8':
os.environ['LC_CTYPE'] = 'en_US.utf-8'
from datetime import datetime
from flask import _request_ctx_stack
from babel import dates, numbers, support, Locale
from babel.support import NullTranslations
from werkzeug import ImmutableDict
try:
from pytz.gae import pytz
except ImportError:
from pytz import timezone, UTC
else:
timezone = pytz.timezone
UTC = pytz.UTC
from flask_babelex._compat import string_types
_DEFAULT_LOCALE = Locale.parse('en')
class Babel(object):
"""Central controller class that can be used to configure how
Flask-Babel behaves. Each application that wants to use Flask-Babel
has to create, or run :meth:`init_app` on, an instance of this class
after the configuration was initialized.
"""
default_date_formats = ImmutableDict({
'time': 'medium',
'date': 'medium',
'datetime': 'medium',
'time.short': None,
'time.medium': None,
'time.full': None,
'time.long': None,
'date.short': None,
'date.medium': None,
'date.full': None,
'date.long': None,
'datetime.short': None,
'datetime.medium': None,
'datetime.full': None,
'datetime.long': None,
})
def __init__(self, app=None, default_locale='en', default_timezone='UTC',
date_formats=None, configure_jinja=True, default_domain=None):
self._default_locale = default_locale
self._default_timezone = default_timezone
self._date_formats = date_formats
self._configure_jinja = configure_jinja
self.app = app
self._locale_cache = dict()
if default_domain is None:
self._default_domain = Domain()
else:
self._default_domain = default_domain
self.locale_selector_func = None
self.timezone_selector_func = None
if app is not None:
self.init_app(app)
def init_app(self, app):
"""Set up this instance for use with *app*, if no app was passed to
the constructor.
"""
self.app = app
app.babel_instance = self
if not hasattr(app, 'extensions'):
app.extensions = {}
app.extensions['babel'] = self
app.config.setdefault('BABEL_DEFAULT_LOCALE', self._default_locale)
app.config.setdefault('BABEL_DEFAULT_TIMEZONE', self._default_timezone)
if self._date_formats is None:
self._date_formats = self.default_date_formats.copy()
#: a mapping of Babel datetime format strings that can be modified
#: to change the defaults. If you invoke :func:`format_datetime`
#: and do not provide any format string Flask-Babel will do the
#: following things:
#:
#: 1. look up ``date_formats['datetime']``. By default ``'medium'``
#: is returned to enforce medium length datetime formats.
#: 2. ``date_formats['datetime.medium'] (if ``'medium'`` was
#: returned in step one) is looked up. If the return value
#: is anything but `None` this is used as new format string.
#: otherwise the default for that language is used.
self.date_formats = self._date_formats
if self._configure_jinja:
app.jinja_env.filters.update(
datetimeformat=format_datetime,
dateformat=format_date,
timeformat=format_time,
timedeltaformat=format_timedelta,
numberformat=format_number,
decimalformat=format_decimal,
currencyformat=format_currency,
percentformat=format_percent,
scientificformat=format_scientific,
)
app.jinja_env.add_extension('jinja2.ext.i18n')
app.jinja_env.install_gettext_callables(
lambda x: get_domain().get_translations().ugettext(x),
lambda s, p, n: get_domain().get_translations().ungettext(s, p, n),
newstyle=True
)
def localeselector(self, f):
"""Registers a callback function for locale selection. The default
behaves as if a function was registered that returns `None` all the
time. If `None` is returned, the locale falls back to the one from
the configuration.
This has to return the locale as string (eg: ``'de_AT'``, ''`en_US`'')
"""
assert self.locale_selector_func is None, \
'a localeselector function is already registered'
self.locale_selector_func = f
return f
def timezoneselector(self, f):
"""Registers a callback function for timezone selection. The default
behaves as if a function was registered that returns `None` all the
time. If `None` is returned, the timezone falls back to the one from
the configuration.
This has to return the timezone as string (eg: ``'Europe/Vienna'``)
"""
assert self.timezone_selector_func is None, \
'a timezoneselector function is already registered'
self.timezone_selector_func = f
return f
def list_translations(self):
"""Returns a list of all the locales translations exist for. The
list returned will be filled with actual locale objects and not just
strings.
.. versionadded:: 0.6
"""
dirname = os.path.join(self.app.root_path, 'translations')
if not os.path.isdir(dirname):
return []
result = []
for folder in os.listdir(dirname):
locale_dir = os.path.join(dirname, folder, 'LC_MESSAGES')
if not os.path.isdir(locale_dir):
continue
if filter(lambda x: x.endswith('.mo'), os.listdir(locale_dir)):
result.append(Locale.parse(folder))
if not result:
result.append(Locale.parse(self._default_locale))
return result
@property
def default_locale(self):
"""The default locale from the configuration as instance of a
`babel.Locale` object.
"""
return self.load_locale(self.app.config['BABEL_DEFAULT_LOCALE'])
@property
def default_timezone(self):
"""The default timezone from the configuration as instance of a
`pytz.timezone` object.
"""
return timezone(self.app.config['BABEL_DEFAULT_TIMEZONE'])
def load_locale(self, locale):
"""Load locale by name and cache it. Returns instance of a `babel.Locale`
object.
"""
rv = self._locale_cache.get(locale)
if rv is None:
self._locale_cache[locale] = rv = Locale.parse(locale)
return rv
def get_locale():
"""Returns the locale that should be used for this request as
`babel.Locale` object. This returns `None` if used outside of
a request. If flask-babel was not attached to the Flask application,
will return 'en' locale.
"""
ctx = _request_ctx_stack.top
if ctx is None:
return None
locale = getattr(ctx, 'babel_locale', None)
if locale is None:
babel = ctx.app.extensions.get('babel')
if babel is None:
locale = _DEFAULT_LOCALE
else:
if babel.locale_selector_func is not None:
rv = babel.locale_selector_func()
if rv is None:
locale = babel.default_locale
else:
locale = babel.load_locale(rv)
else:
locale = babel.default_locale
ctx.babel_locale = locale
return locale
def get_timezone():
"""Returns the timezone that should be used for this request as
`pytz.timezone` object. This returns `None` if used outside of
a request. If flask-babel was not attached to application, will
return UTC timezone object.
"""
ctx = _request_ctx_stack.top
tzinfo = getattr(ctx, 'babel_tzinfo', None)
if tzinfo is None:
babel = ctx.app.extensions.get('babel')
if babel is None:
tzinfo = UTC
else:
if babel.timezone_selector_func is None:
tzinfo = babel.default_timezone
else:
rv = babel.timezone_selector_func()
if rv is None:
tzinfo = babel.default_timezone
else:
if isinstance(rv, string_types):
tzinfo = timezone(rv)
else:
tzinfo = rv
ctx.babel_tzinfo = tzinfo
return tzinfo
def refresh():
"""Refreshes the cached timezones and locale information. This can
be used to switch a translation between a request and if you want
the changes to take place immediately, not just with the next request::
user.timezone = request.form['timezone']
user.locale = request.form['locale']
refresh()
flash(gettext('Language was changed'))
Without that refresh, the :func:`~flask.flash` function would probably
return English text and a now German page.
"""
ctx = _request_ctx_stack.top
for key in 'babel_locale', 'babel_tzinfo':
if hasattr(ctx, key):
delattr(ctx, key)
def _get_format(key, format):
"""A small helper for the datetime formatting functions. Looks up
format defaults for different kinds.
"""
babel = _request_ctx_stack.top.app.extensions.get('babel')
if babel is not None:
formats = babel.date_formats
else:
formats = Babel.default_date_formats
if format is None:
format = formats[key]
if format in ('short', 'medium', 'full', 'long'):
rv = formats['%s.%s' % (key, format)]
if rv is not None:
format = rv
return format
def to_user_timezone(datetime):
"""Convert a datetime object to the user's timezone. This automatically
happens on all date formatting unless rebasing is disabled. If you need
to convert a :class:`datetime.datetime` object at any time to the user's
timezone (as returned by :func:`get_timezone` this function can be used).
"""
if datetime.tzinfo is None:
datetime = datetime.replace(tzinfo=UTC)
tzinfo = get_timezone()
return tzinfo.normalize(datetime.astimezone(tzinfo))
def to_utc(datetime):
"""Convert a datetime object to UTC and drop tzinfo. This is the
opposite operation to :func:`to_user_timezone`.
"""
if datetime.tzinfo is None:
datetime = get_timezone().localize(datetime)
return datetime.astimezone(UTC).replace(tzinfo=None)
def format_datetime(datetime=None, format=None, rebase=True):
"""Return a date formatted according to the given pattern. If no
:class:`~datetime.datetime` object is passed, the current time is
assumed. By default rebasing happens which causes the object to
be converted to the users's timezone (as returned by
:func:`to_user_timezone`). This function formats both date and
time.
The format parameter can either be ``'short'``, ``'medium'``,
``'long'`` or ``'full'`` (in which cause the language's default for
that setting is used, or the default from the :attr:`Babel.date_formats`
mapping is used) or a format string as documented by Babel.
This function is also available in the template context as filter
named `datetimeformat`.
"""
format = _get_format('datetime', format)
return _date_format(dates.format_datetime, datetime, format, rebase)
def format_date(date=None, format=None, rebase=True):
"""Return a date formatted according to the given pattern. If no
:class:`~datetime.datetime` or :class:`~datetime.date` object is passed,
the current time is assumed. By default rebasing happens which causes
the object to be converted to the users's timezone (as returned by
:func:`to_user_timezone`). This function only formats the date part
of a :class:`~datetime.datetime` object.
The format parameter can either be ``'short'``, ``'medium'``,
``'long'`` or ``'full'`` (in which cause the language's default for
that setting is used, or the default from the :attr:`Babel.date_formats`
mapping is used) or a format string as documented by Babel.
This function is also available in the template context as filter
named `dateformat`.
"""
if rebase and isinstance(date, datetime):
date = to_user_timezone(date)
format = _get_format('date', format)
return _date_format(dates.format_date, date, format, rebase)
def format_time(time=None, format=None, rebase=True):
"""Return a time formatted according to the given pattern. If no
:class:`~datetime.datetime` object is passed, the current time is
assumed. By default rebasing happens which causes the object to
be converted to the users's timezone (as returned by
:func:`to_user_timezone`). This function formats both date and
time.
The format parameter can either be ``'short'``, ``'medium'``,
``'long'`` or ``'full'`` (in which cause the language's default for
that setting is used, or the default from the :attr:`Babel.date_formats`
mapping is used) or a format string as documented by Babel.
This function is also available in the template context as filter
named `timeformat`.
"""
format = _get_format('time', format)
return _date_format(dates.format_time, time, format, rebase)
def format_timedelta(datetime_or_timedelta, granularity='second'):
"""Format the elapsed time from the given date to now or the given
timedelta. This currently requires an unreleased development
version of Babel.
This function is also available in the template context as filter
named `timedeltaformat`.
"""
if isinstance(datetime_or_timedelta, datetime):
datetime_or_timedelta = datetime.utcnow() - datetime_or_timedelta
return dates.format_timedelta(datetime_or_timedelta, granularity,
locale=get_locale())
def _date_format(formatter, obj, format, rebase, **extra):
"""Internal helper that formats the date."""
locale = get_locale()
extra = {}
if formatter is not dates.format_date and rebase:
extra['tzinfo'] = get_timezone()
return formatter(obj, format, locale=locale, **extra)
def format_number(number):
"""Return the given number formatted for the locale in request
:param number: the number to format
:return: the formatted number
:rtype: unicode
"""
locale = get_locale()
return numbers.format_number(number, locale=locale)
def format_decimal(number, format=None):
"""Return the given decimal number formatted for the locale in request
:param number: the number to format
:param format: the format to use
:return: the formatted number
:rtype: unicode
"""
locale = get_locale()
return numbers.format_decimal(number, format=format, locale=locale)
def format_currency(number, currency, format=None):
"""Return the given number formatted for the locale in request
:param number: the number to format
:param currency: the currency code
:param format: the format to use
:return: the formatted number
:rtype: unicode
"""
locale = get_locale()
return numbers.format_currency(
number, currency, format=format, locale=locale
)
def format_percent(number, format=None):
"""Return formatted percent value for the locale in request
:param number: the number to format
:param format: the format to use
:return: the formatted percent number
:rtype: unicode
"""
locale = get_locale()
return numbers.format_percent(number, format=format, locale=locale)
def format_scientific(number, format=None):
"""Return value formatted in scientific notation for the locale in request
:param number: the number to format
:param format: the format to use
:return: the formatted percent number
:rtype: unicode
"""
locale = get_locale()
return numbers.format_scientific(number, format=format, locale=locale)
class Domain(object):
"""Localization domain. By default will use look for tranlations in Flask application directory
and "messages" domain - all message catalogs should be called ``messages.mo``.
"""
def __init__(self, dirname=None, domain='messages'):
self.dirname = dirname
self.domain = domain
self.cache = dict()
def as_default(self):
"""Set this domain as default for the current request"""
ctx = _request_ctx_stack.top
if ctx is None:
raise RuntimeError("No request context")
ctx.babel_domain = self
def get_translations_cache(self, ctx):
"""Returns dictionary-like object for translation caching"""
return self.cache
def get_translations_path(self, ctx):
"""Returns translations directory path. Override if you want
to implement custom behavior.
"""
return self.dirname or os.path.join(ctx.app.root_path, 'translations')
def get_translations(self):
"""Returns the correct gettext translations that should be used for
this request. This will never fail and return a dummy translation
object if used outside of the request or if a translation cannot be
found.
"""
ctx = _request_ctx_stack.top
if ctx is None:
return NullTranslations()
locale = get_locale()
cache = self.get_translations_cache(ctx)
translations = cache.get(str(locale))
if translations is None:
dirname = self.get_translations_path(ctx)
translations = support.Translations.load(dirname,
locale,
domain=self.domain)
cache[str(locale)] = translations
return translations
def gettext(self, string, **variables):
"""Translates a string with the current locale and passes in the
given keyword arguments as mapping to a string formatting string.
::
gettext(u'Hello World!')
gettext(u'Hello %(name)s!', name='World')
"""
t = self.get_translations()
return t.ugettext(string) % variables
def ngettext(self, singular, plural, num, **variables):
"""Translates a string with the current locale and passes in the
given keyword arguments as mapping to a string formatting string.
The `num` parameter is used to dispatch between singular and various
plural forms of the message. It is available in the format string
as ``%(num)d`` or ``%(num)s``. The source language should be
English or a similar language which only has one plural form.
::
ngettext(u'%(num)d Apple', u'%(num)d Apples', num=len(apples))
"""
variables.setdefault('num', num)
t = self.get_translations()
return t.ungettext(singular, plural, num) % variables
def pgettext(self, context, string, **variables):
"""Like :func:`gettext` but with a context.
.. versionadded:: 0.7
"""
t = self.get_translations()
return t.upgettext(context, string) % variables
def npgettext(self, context, singular, plural, num, **variables):
"""Like :func:`ngettext` but with a context.
.. versionadded:: 0.7
"""
variables.setdefault('num', num)
t = self.get_translations()
return t.unpgettext(context, singular, plural, num) % variables
def lazy_gettext(self, string, **variables):
"""Like :func:`gettext` but the string returned is lazy which means
it will be translated when it is used as an actual string.
Example::
hello = lazy_gettext(u'Hello World')
@app.route('/')
def index():
return unicode(hello)
"""
from speaklater import make_lazy_string
return make_lazy_string(self.gettext, string, **variables)
def lazy_pgettext(self, context, string, **variables):
"""Like :func:`pgettext` but the string returned is lazy which means
it will be translated when it is used as an actual string.
.. versionadded:: 0.7
"""
from speaklater import make_lazy_string
return make_lazy_string(self.pgettext, context, string, **variables)
domain = Domain()
def get_domain():
"""Return the correct translation domain that is used for this request.
This will return the default domain (e.g. "messages" in <approot>/translations")
if none is set for this request.
"""
ctx = _request_ctx_stack.top
if ctx is None:
return domain
try:
return ctx.babel_domain
except AttributeError:
pass
babel = ctx.app.extensions.get('babel')
if babel is not None:
d = babel._default_domain
else:
d = domain
ctx.babel_domain = d
return d
def gettext(*args, **kwargs):
return get_domain().gettext(*args, **kwargs)
_ = gettext
def ngettext(*args, **kwargs):
return get_domain().ngettext(*args, **kwargs)
def pgettext(*args, **kwargs):
return get_domain().pgettext(*args, **kwargs)
def npgettext(*args, **kwargs):
return get_domain().npgettext(*args, **kwargs)
def lazy_gettext(*args, **kwargs):
return get_domain().lazy_gettext(*args, **kwargs)
def lazy_pgettext(*args, **kwargs):
return get_domain().lazy_pgettext(*args, **kwargs)
|
try:
from tornado.websocket import WebSocketHandler
import tornado.ioloop
tornadoAvailable = True
except ImportError:
class WebSocketHandler(object): pass
tornadoAvailable = False
from json import loads as fromJS, dumps as toJS
from threading import Thread
from Log import console
import Settings
from utils import *
PORT = Settings.PORT + 1
handlers = []
channels = {}
class WebSocket:
@staticmethod
def available():
return tornadoAvailable
@staticmethod
def start():
if WebSocket.available():
WSThread().start()
@staticmethod
def broadcast(data):
for handler in handlers:
handler.write_message(toJS(data))
@staticmethod
def sendChannel(channel, data):
if not 'channel' in data:
data['channel'] = channel
for handler in channels.get(channel, []):
handler.write_message(toJS(data))
class WSThread(Thread):
def __init__(self):
Thread.__init__(self)
self.name = 'websocket'
self.daemon = True
def run(self):
app = tornado.web.Application([('/', WSHandler)])
app.listen(PORT, '0.0.0.0')
tornado.ioloop.IOLoop.instance().start()
class WSHandler(WebSocketHandler):
def __init__(self, *args, **kw):
super(WSHandler, self).__init__(*args, **kw)
self.channels = set()
def check_origin(self, origin):
return True
def open(self):
handlers.append(self)
console('websocket', "Opened")
def on_message(self, message):
console('websocket', "Message received: %s" % message)
try:
data = fromJS(message)
except:
return
if 'subscribe' in data and isinstance(data['subscribe'], list):
addChannels = (set(data['subscribe']) - self.channels)
self.channels |= addChannels
for channel in addChannels:
if channel not in channels:
channels[channel] = set()
channels[channel].add(self)
if 'unsubscribe' in data and isinstance(data['unsubscribe'], list):
rmChannels = (self.channels & set(data['unsubscribe']))
self.channels -= rmChannels
for channel in rmChannels:
channels[channel].remove(self)
if len(channels[channel]) == 0:
del channels[channel]
def on_close(self):
for channel in self.channels:
channels[channel].remove(self)
if len(channels[channel]) == 0:
del channels[channel]
handlers.remove(self)
console('websocket', "Closed")
verbs = {
'status': "Status set",
'name': "Renamed",
'goal': "Goal set",
'assigned': "Reassigned",
'hours': "Hours updated",
}
from Event import EventHandler, addEventHandler
class ShareTaskChanges(EventHandler):
def newTask(self, handler, task):
WebSocket.sendChannel("backlog#%d" % task.sprint.id, {'type': 'new'}); #TODO
def taskUpdate(self, handler, task, field, value):
if field == 'assigned': # Convert set of Users to list of usernames
value = [user.username for user in value]
elif field == 'goal': # Convert Goal to goal ID
value = value.id if value else 0
description = ("%s by %s" % (verbs[field], task.creator)) if field in verbs else None
WebSocket.sendChannel("backlog#%d" % task.sprint.id, {'type': 'update', 'id': task.id, 'revision': task.revision, 'field': field, 'value': value, 'description': description, 'creator': task.creator.username})
addEventHandler(ShareTaskChanges())
|
"""
For detailed documentation and examples, see the README.
"""
import networkx, matplotlib.pyplot, scipy
import numpy as np
import os
import pyximport
if os.name == 'nt':
if 'CPATH' in os.environ:
os.environ['CPATH'] = os.environ['CPATH'] + np.get_include()
else:
os.environ['CPATH'] = np.get_include()
# XXX: we're assuming that MinGW is installed in C:\MinGW (default)
if 'PATH' in os.environ:
os.environ['PATH'] = os.environ['PATH'] + ';C:\MinGW\bin'
else:
os.environ['PATH'] = 'C:\MinGW\bin'
mingw_setup_args = { 'options': { 'build_ext': { 'compiler': 'mingw32' } } }
pyximport.install(setup_args=mingw_setup_args)
elif os.name == 'posix':
if 'CFLAGS' in os.environ:
os.environ['CFLAGS'] = os.environ['CFLAGS'] + ' -I' + np.get_include()
else:
os.environ['CFLAGS'] = ' -I' + np.get_include()
pyximport.install()
from yabn import *
__version__ = '0.1.0'
|
"""
make_loaddata.py
Convert ken_all.csv to loaddata
"""
import argparse
import csv
def merge_separated_line(args):
"""
yields line
yields a line.
if two (or more) lines has same postalcode,
merge them.
"""
def is_dup(line, buff):
""" lines is duplicated or not """
# same postalcode
if line[2] != buff[2]:
return False
# include choume and not
if line[11] != buff[11]:
return False
# line contains touten(kana)
if line[5].count(u'、') != 0:
return True
if buff[5].count(u'、') != 0:
return True
# line contains touten(kanji)
if line[8].count(u'、') != 0:
return True
if buff[8].count(u'、') != 0:
return True
return False
def merge(line, buff):
""" merge address of two lines """
new_buff = []
idx = 0
for element in line:
if element[:len(buff[idx])] != buff[idx]:
new_buff.append(u''.join([buff[idx], element]))
else:
new_buff.append(buff[idx])
idx += 1
return new_buff
line_buffer = []
ken_all = csv.reader(open(args.source))
for line in ken_all:
unicode_line = [unicode(s, 'utf8') for s in line]
if not(line_buffer):
line_buffer = unicode_line
continue
if is_dup(unicode_line, line_buffer):
line_buffer = merge(unicode_line, line_buffer)
else:
yield line_buffer
line_buffer = unicode_line
yield line_buffer
def parse_args():
# parse aruguments
Parser = argparse.ArgumentParser(description='Make loaddata of postalcode.')
Parser.add_argument('source', help='input file of converting')
Parser.add_argument('area', help='data file for area-code')
Parser.add_argument('net', help='data file of net-code')
return Parser.parse_args()
def main(args):
# converting main
Areadata = csv.writer(open(args.area, 'w'),
delimiter=',',
quoting=csv.QUOTE_NONE)
Netdata = csv.writer(open(args.net, 'w'),
delimiter=',',
quoting=csv.QUOTE_NONE)
for line in merge_separated_line(args):
zipcode = line[2]
if zipcode[5:7] != '00':
Areadata.writerow([s.encode('utf8') for s in line])
else:
Netdata.writerow([s.encode('utf8') for s in line])
if __name__ == '__main__':
args = parse_args()
main(args)
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('events', '0003_auto_20171221_0336'),
]
operations = [
migrations.AlterField(
model_name='dailyproductivitylog',
name='source',
field=models.CharField(choices=[('api', 'Api'), ('ios', 'Ios'), ('android', 'Android'), ('mobile', 'Mobile'), ('web', 'Web'), ('user_excel', 'User_Excel'), ('text_message', 'Text_Message')], max_length=50),
),
migrations.AlterField(
model_name='sleeplog',
name='source',
field=models.CharField(choices=[('api', 'Api'), ('ios', 'Ios'), ('android', 'Android'), ('mobile', 'Mobile'), ('web', 'Web'), ('user_excel', 'User_Excel'), ('text_message', 'Text_Message')], max_length=50),
),
migrations.AlterField(
model_name='supplementlog',
name='source',
field=models.CharField(choices=[('api', 'Api'), ('ios', 'Ios'), ('android', 'Android'), ('mobile', 'Mobile'), ('web', 'Web'), ('user_excel', 'User_Excel'), ('text_message', 'Text_Message')], default='web', max_length=50),
),
migrations.AlterField(
model_name='useractivitylog',
name='source',
field=models.CharField(choices=[('api', 'Api'), ('ios', 'Ios'), ('android', 'Android'), ('mobile', 'Mobile'), ('web', 'Web'), ('user_excel', 'User_Excel'), ('text_message', 'Text_Message')], default='web', max_length=50),
),
migrations.AlterField(
model_name='usermoodlog',
name='source',
field=models.CharField(choices=[('api', 'Api'), ('ios', 'Ios'), ('android', 'Android'), ('mobile', 'Mobile'), ('web', 'Web'), ('user_excel', 'User_Excel'), ('text_message', 'Text_Message')], default='web', max_length=50),
),
]
|
import aaf
import os
from optparse import OptionParser
parser = OptionParser()
(options, args) = parser.parse_args()
if not args:
parser.error("not enough argements")
path = args[0]
name, ext = os.path.splitext(path)
f = aaf.open(path, 'r')
f.save(name + ".xml")
f.close()
|
"""
telemetry full tests.
"""
import platform
import sys
from unittest import mock
import pytest
import wandb
def test_telemetry_finish(runner, live_mock_server, parse_ctx):
with runner.isolated_filesystem():
run = wandb.init()
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
telemetry = ctx_util.telemetry
assert telemetry and 2 in telemetry.get("3", [])
def test_telemetry_imports_hf(runner, live_mock_server, parse_ctx):
with runner.isolated_filesystem():
run = wandb.init()
with mock.patch.dict("sys.modules", {"transformers": mock.Mock()}):
import transformers
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
telemetry = ctx_util.telemetry
# hf in finish modules but not in init modules
assert telemetry and 11 not in telemetry.get("1", [])
assert telemetry and 11 in telemetry.get("2", [])
def test_telemetry_imports_catboost(runner, live_mock_server, parse_ctx):
with runner.isolated_filesystem():
with mock.patch.dict("sys.modules", {"catboost": mock.Mock()}):
import catboost
run = wandb.init()
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
telemetry = ctx_util.telemetry
# catboost in both init and finish modules
assert telemetry and 7 in telemetry.get("1", [])
assert telemetry and 7 in telemetry.get("2", [])
@pytest.mark.skipif(
platform.system() == "Windows", reason="test suite does not build jaxlib on windows"
)
@pytest.mark.skipif(sys.version_info >= (3, 10), reason="jax has no py3.10 wheel")
def test_telemetry_imports_jax(runner, live_mock_server, parse_ctx):
with runner.isolated_filesystem():
import jax
wandb.init()
wandb.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
telemetry = ctx_util.telemetry
# jax in finish modules but not in init modules
assert telemetry and 12 in telemetry.get("1", [])
assert telemetry and 12 in telemetry.get("2", [])
def test_telemetry_run_organizing_init(runner, live_mock_server, parse_ctx):
with runner.isolated_filesystem():
wandb.init(name="test_name", tags=["my-tag"], config={"abc": 123}, id="mynewid")
wandb.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
telemetry = ctx_util.telemetry
assert telemetry and 13 in telemetry.get("3", []) # name
assert telemetry and 14 in telemetry.get("3", []) # id
assert telemetry and 15 in telemetry.get("3", []) # tags
assert telemetry and 16 in telemetry.get("3", []) # config
def test_telemetry_run_organizing_set(runner, live_mock_server, parse_ctx):
with runner.isolated_filesystem():
run = wandb.init()
run.name = "test-name"
run.tags = ["tag1"]
wandb.config.update = True
run.finish()
ctx_util = parse_ctx(live_mock_server.get_ctx())
telemetry = ctx_util.telemetry
assert telemetry and 17 in telemetry.get("3", []) # name
assert telemetry and 18 in telemetry.get("3", []) # tags
assert telemetry and 19 in telemetry.get("3", []) # config update
|
' 检查扩展名是否合法 '
__author__ = 'Ellery'
from app import app
import datetime, random
from PIL import Image
import os
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in app.config.get('ALLOWED_EXTENSIONS')
def unique_name():
now_time = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
random_num = random.randint(0, 100)
if random_num <= 10:
random_num = str(0) + str(random_num)
unique_num = str(now_time) + str(random_num)
return unique_num
def image_thumbnail(filename):
filepath = os.path.join(app.config.get('UPLOAD_FOLDER'), filename)
im = Image.open(filepath)
w, h = im.size
if w > h:
im.thumbnail((106, 106*h/w))
else:
im.thumbnail((106*w/h, 106))
im.save(os.path.join(app.config.get('UPLOAD_FOLDER'),
os.path.splitext(filename)[0] + '_thumbnail' + os.path.splitext(filename)[1]))
def image_delete(filename):
thumbnail_filepath = os.path.join(app.config.get('UPLOAD_FOLDER'), filename)
filepath = thumbnail_filepath.replace('_thumbnail', '')
os.remove(filepath)
os.remove(thumbnail_filepath)
def cut_image(filename, box):
filepath = os.path.join(app.config.get('UPLOAD_AVATAR_FOLDER'), filename)
im = Image.open(filepath)
new_im = im.crop(box)
new_im.save(os.path.join(app.config.get('UPLOAD_AVATAR_FOLDER'), filename))
|
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
from SimPEG import Mesh, Utils
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as patches
from scipy.sparse import spdiags,csr_matrix, eye,kron,hstack,vstack,eye,diags
import copy
from scipy.constants import mu_0
from SimPEG import SolverLU
from scipy.sparse.linalg import spsolve,splu
from SimPEG.EM import TDEM
from SimPEG.EM.Analytics.TDEM import hzAnalyticDipoleT,hzAnalyticCentLoopT
from scipy.interpolate import interp2d,LinearNDInterpolator
from scipy.special import ellipk,ellipe
def rectangular_plane_layout(mesh,corner, closed = False,I=1.):
"""
corner: sorted list of four corners (x,y,z)
2--3
| |
1--4
y
|
|--> x
Output:
Js
"""
Jx = np.zeros(mesh.nEx)
Jy = np.zeros(mesh.nEy)
Jz = np.zeros(mesh.nEz)
indy1 = np.logical_and( \
np.logical_and( \
np.logical_and(mesh.gridEy[:,0]>=corner[0,0],mesh.gridEy[:,0]<=corner[1,0]), \
np.logical_and(mesh.gridEy[:,1] >=corner[0,1] , mesh.gridEy[:,1]<=corner[1,1] )),
(mesh.gridEy[:,2] == corner[0,2]
)
)
indx1 = np.logical_and( \
np.logical_and( \
np.logical_and(mesh.gridEx[:,0]>=corner[1,0],mesh.gridEx[:,0]<=corner[2,0]), \
np.logical_and(mesh.gridEx[:,1] >=corner[1,1] , mesh.gridEx[:,1]<=corner[2,1] )),
(mesh.gridEx[:,2] == corner[1,2]
)
)
indy2 = np.logical_and( \
np.logical_and( \
np.logical_and(mesh.gridEy[:,0]>=corner[2,0],mesh.gridEy[:,0]<=corner[3,0]), \
np.logical_and(mesh.gridEy[:,1] <=corner[2,1] , mesh.gridEy[:,1]>=corner[3,1] )),
(mesh.gridEy[:,2] == corner[2,2]
)
)
if closed:
indx2 = np.logical_and( \
np.logical_and( \
np.logical_and(mesh.gridEx[:,0]>=corner[0,0],mesh.gridEx[:,0]<=corner[3,0]), \
np.logical_and(mesh.gridEx[:,1] >=corner[0,1] , mesh.gridEx[:,1]<=corner[3,1] )),
(mesh.gridEx[:,2] == corner[0,2]
)
)
else:
indx2 = []
Jy[indy1] = -I
Jx[indx1] = -I
Jy[indy2] = I
Jx[indx2] = I
J = np.hstack((Jx,Jy,Jz))
J = J*mesh.edge
return J
def BiotSavart(locs,mesh,Js):
"""
Compute the magnetic field generated by current discretized on a mesh using Biot-Savart law
Input:
locs: observation locations
mesh: mesh on which the current J is discretized
Js: discretized source current in A-m (Finite Volume formulation)
Output:
B: magnetic field [Bx,By,Bz]
"""
c = mu_0/(4*np.pi)
nwire = np.sum(Js!=0.)
ind= np.where(Js!=0.)
ind = ind[0]
B = np.zeros([locs.shape[0],3])
gridE = np.vstack([mesh.gridEx,mesh.gridEy,mesh.gridEz])
for i in range(nwire):
# x wire
if ind[i]<mesh.nEx:
r = locs-gridE[ind[i]]
I = Js[ind[i]]*np.hstack([np.ones([locs.shape[0],1]),np.zeros([locs.shape[0],1]),np.zeros([locs.shape[0],1])])
cr = np.cross(I,r)
rsq = np.linalg.norm(r,axis=1)**3.
B = B + c*cr/rsq[:,None]
# y wire
elif ind[i]<mesh.nEx+mesh.nEy:
r = locs-gridE[ind[i]]
I = Js[ind[i]]*np.hstack([np.zeros([locs.shape[0],1]),np.ones([locs.shape[0],1]),np.zeros([locs.shape[0],1])])
cr = np.cross(I,r)
rsq = np.linalg.norm(r,axis=1)**3.
B = B + c*cr/rsq[:,None]
# z wire
elif ind[i]<mesh.nEx+mesh.nEy+mesh.nEz:
r = locs-gridE[ind[i]]
I = Js[ind[i]]*np.hstack([np.zeros([locs.shape[0],1]),np.zeros([locs.shape[0],1]),np.ones([locs.shape[0],1])])
cr = np.cross(I,r)
rsq = np.linalg.norm(r,axis=1)**3.
B = B + c*cr/rsq[:,None]
else:
print('error: index of J out of bounds (number of edges in the mesh)')
return B
def analytic_infinite_wire(obsloc,wireloc,orientation,I=1.):
"""
Compute the response of an infinite wire with orientation 'orientation'
and current I at the obsvervation locations obsloc
Output:
B: magnetic field [Bx,By,Bz]
"""
n,d = obsloc.shape
t,d = wireloc.shape
d = np.sqrt(np.dot(obsloc**2.,np.ones([d,t]))+np.dot(np.ones([n,d]),(wireloc.T)**2.)
- 2.*np.dot(obsloc,wireloc.T))
distr = np.amin(d, axis=1, keepdims = True)
idxmind = d.argmin(axis=1)
r = obsloc - wireloc[idxmind]
orient = np.c_[[orientation for i in range(obsloc.shape[0])]]
B = (mu_0*I)/(2*np.pi*(distr**2.))*np.cross(orientation,r)
return B
def mag_dipole(m,obsloc):
"""
Compute the response of an infinitesimal mag dipole at location (0,0,0)
with orientation X and magnetic moment 'm'
at the obsvervation locations obsloc
Output:
B: magnetic field [Bx,By,Bz]
"""
loc = np.r_[[[0.,0.,0.]]]
n,d = obsloc.shape
t,d = loc.shape
d = np.sqrt(np.dot(obsloc**2.,np.ones([d,t]))+np.dot(np.ones([n,d]),(loc.T)**2.)
- 2.*np.dot(obsloc,loc.T))
d = d.flatten()
ind = np.where(d==0.)
d[ind] = 1e6
x = obsloc[:,0]
y = obsloc[:,1]
z = obsloc[:,2]
#orient = np.c_[[orientation for i in range(obsloc.shape[0])]]
Bz = (mu_0*m)/(4*np.pi*(d**3.))*(3.*((z**2.)/(d**2.))-1.)
By = (mu_0*m)/(4*np.pi*(d**3.))*(3.*(z*y)/(d**2.))
Bx = (mu_0*m)/(4*np.pi*(d**3.))*(3.*(x*z)/(d**2.))
B = np.vstack([Bx,By,Bz]).T
return B
def circularloop(a,obsloc,I=1.):
"""
From Simpson, Lane, Immer, Youngquist 2001
Compute the magnetic field B response of a current loop
of radius 'a' with intensity 'I'.
input:
a: radius in m
obsloc: obsvervation locations
Output:
B: magnetic field [Bx,By,Bz]
"""
x = np.atleast_2d(obsloc[:,0]).T
y = np.atleast_2d(obsloc[:,1]).T
z = np.atleast_2d(obsloc[:,2]).T
r = np.linalg.norm(obsloc,axis=1)
loc = np.r_[[[0.,0.,0.]]]
n,d = obsloc.shape
r2 = x**2.+y**2.+z**2.
rho2 = x**2.+y**2.
alpha2 = a**2.+r2-2*a*np.sqrt(rho2)
beta2 = a**2.+r2+2*a*np.sqrt(rho2)
k2 = 1-(alpha2/beta2)
lbda = x**2.-y**2.
C = mu_0*I/np.pi
Bx = ((C*x*z)/(2*alpha2*np.sqrt(beta2)*rho2))*\
((a**2.+r2)*ellipe(k2)-alpha2*ellipk(k2))
Bx[np.isnan(Bx)] = 0.
By = ((C*y*z)/(2*alpha2*np.sqrt(beta2)*rho2))*\
((a**2.+r2)*ellipe(k2)-alpha2*ellipk(k2))
By[np.isnan(By)] = 0.
Bz = (C/(2.*alpha2*np.sqrt(beta2)))*\
((a**2.-r2)*ellipe(k2)+alpha2*ellipk(k2))
Bz[np.isnan(Bz)] = 0.
#print(Bx.shape)
#print(By.shape)
#print(Bz.shape)
B = np.hstack([Bx,By,Bz])
return B
|
"""
Test the Multinet Class.
"""
import multinet as mn
import networkx as nx
class TestMultinet(object):
def test_build_multinet(self):
"""
Test building Multinet objects.
"""
mg = mn.Multinet()
assert mg.is_directed() == False
mg.add_edge(0, 1, 'L1')
mg.add_edge(0, 1, 'L2')
mg.add_edge(1, 0, 'L2')
mg.add_edge(1, 2, 'L2')
assert 'L1' in mg.layers()
assert 'L2' in mg.layers()
assert len(mg.edgelets) == 3
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 2
assert mg.number_of_layers() == 2
assert mg.number_of_edgelets() == 3
# Remove non-existed edge.
mg.remove_edgelet(2, 3, 'L3')
mg.remove_edgelet(0, 1, 'L2')
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 2
assert mg.number_of_layers() == 2
assert mg.number_of_edgelets() == 2
mg.remove_edgelet(0, 1, 'L1')
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 1
assert mg.number_of_layers() == 2
assert mg.number_of_edgelets() == 1
assert len(mg.empty_layers()) == 1
mg.remove_empty_layers()
assert mg.number_of_layers() == 1
def test_aggregate_edge(self):
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
assert mg[0][1][mg.cid]['L1'] == 5
assert mg[1][2][mg.cid]['L2'] == 6
mg.add_edge(0, 1, 'L1', weight=10)
assert mg[0][1][mg.cid]['L1'] == 10
mg.aggregate_edge(0, 1, 'L1', weight=5)
assert mg[0][1][mg.cid]['L1'] == 15
mg.aggregate_edge(2, 3, 'L2', weight=7)
assert mg[2][3][mg.cid]['L2'] == 7
def test_sub_layer(self):
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
sg = mg.sub_layer('L1')
assert type(sg) == nx.Graph
assert sg.number_of_nodes() == 3
assert sg.number_of_edges() == 1
sg = mg.sub_layer('L2', remove_isolates=True)
assert type(sg) == nx.Graph
assert sg.number_of_nodes() == 2
assert sg.number_of_edges() == 1
def test_sub_layers(self):
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
sg = mg.sub_layers(['L1', 'L2'])
assert type(sg) == mn.Multinet
assert sg.number_of_nodes() == 3
assert sg.number_of_edges() == 2
assert sg.number_of_layers() == 2
sg = mg.sub_layers(['L2', 'L3'], remove_isolates=True)
assert type(sg) == mn.Multinet
assert sg.number_of_nodes() == 2
assert sg.number_of_edges() == 1
assert sg.number_of_layers() == 2
def test_aggregated(self):
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
ag = mg.aggregated()
assert type(ag) == nx.Graph
assert ag.number_of_nodes() == 3
assert ag.number_of_edges() == 2
assert ag[1][2]['weight'] == 8
assert ag[1][2]['nlayer'] == 2
def test_merge_layers(self):
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
mg.merge_layers(['L1', 'L2'])
assert 'L1' not in mg.layers()
assert 'L2' not in mg.layers()
assert 'L1_L2' in mg.layers()
assert mg.number_of_layers() == 2
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 2
assert mg[0][1][mg.cid]['L1_L2'] == 5
assert mg[1][2][mg.cid]['L1_L2'] == 6
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
mg.merge_layers(['L2', 'L3'], new_name='LN')
assert 'L2' not in mg.layers()
assert 'L3' not in mg.layers()
assert 'LN' in mg.layers()
assert mg.number_of_layers() == 2
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 2
assert mg[0][1][mg.cid]['L1'] == 5
assert mg[1][2][mg.cid]['LN'] == 8
def test_add_layer(self):
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
sg = nx.Graph()
sg.add_edge(1, 2, weight=7)
sg.add_edge(2, 3)
mg.add_layer(sg, 'L3')
assert mg.number_of_nodes() == 4
assert mg.number_of_edges() == 3
assert mg.number_of_layers() == 3
assert mg[1][2][mg.cid]['L2'] == 6
assert mg[1][2][mg.cid]['L3'] == 7
assert mg[2][3][mg.cid]['L3'] == 1
def test_remove_layer(self):
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
mg.remove_layer('L3')
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 2
assert mg.number_of_layers() == 2
mg = mn.Multinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
mg.remove_layer('L1')
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 1
assert mg.number_of_layers() == 2
class TestDiMultinet(object):
def test_build_dimultinet(self):
"""
Test building Multinet objects.
"""
mg = mn.DiMultinet()
assert mg.is_directed() == True
mg.add_edge(0, 1, 'L1')
mg.add_edge(0, 1, 'L2')
mg.add_edge(1, 0, 'L2')
mg.add_edge(1, 2, 'L2')
assert 'L1' in mg.layers()
assert 'L2' in mg.layers()
assert len(mg.edgelets) == 4
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 3
assert mg.number_of_layers() == 2
assert mg.number_of_edgelets() == 4
# Remove non-existed edge.
mg.remove_edgelet(2, 3, 'L3')
mg.remove_edgelet(0, 1, 'L2')
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 3
assert mg.number_of_layers() == 2
assert mg.number_of_edgelets() == 3
mg.remove_edgelet(0, 1, 'L1')
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 2
assert mg.number_of_layers() == 2
assert mg.number_of_edgelets() == 2
assert len(mg.empty_layers()) == 1
mg.remove_empty_layers()
assert mg.number_of_layers() == 1
def test_aggregate_edge(self):
mg = mn.DiMultinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
assert mg[0][1][mg.cid]['L1'] == 5
assert mg[1][2][mg.cid]['L2'] == 6
mg.add_edge(0, 1, 'L1', weight=10)
assert mg[0][1][mg.cid]['L1'] == 10
mg.aggregate_edge(0, 1, 'L1', weight=5)
assert mg[0][1][mg.cid]['L1'] == 15
mg.aggregate_edge(2, 3, 'L2', weight=7)
assert mg[2][3][mg.cid]['L2'] == 7
def test_sub_layer(self):
mg = mn.DiMultinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
sg = mg.sub_layer('L1')
assert type(sg) == nx.DiGraph
assert sg.number_of_nodes() == 3
assert sg.number_of_edges() == 1
sg = mg.sub_layer('L2', remove_isolates=True)
assert type(sg) == nx.DiGraph
assert sg.number_of_nodes() == 2
assert sg.number_of_edges() == 1
def test_sub_layers(self):
mg = mn.DiMultinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
sg = mg.sub_layers(['L1', 'L2'])
assert type(sg) == mn.DiMultinet
assert sg.number_of_nodes() == 3
assert sg.number_of_edges() == 2
assert sg.number_of_layers() == 2
sg = mg.sub_layers(['L2', 'L3'], remove_isolates=True)
assert type(sg) == mn.DiMultinet
assert sg.number_of_nodes() == 2
assert sg.number_of_edges() == 1
assert sg.number_of_layers() == 2
def test_aggregated(self):
mg = mn.DiMultinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
ag = mg.aggregated()
assert type(ag) == nx.DiGraph
assert ag.number_of_nodes() == 3
assert ag.number_of_edges() == 2
assert ag[1][2]['weight'] == 8
assert ag[1][2]['nlayer'] == 2
def test_merge_layers(self):
mg = mn.DiMultinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
mg.merge_layers(['L1', 'L2'])
assert 'L1' not in mg.layers()
assert 'L2' not in mg.layers()
assert 'L1_L2' in mg.layers()
assert mg.number_of_layers() == 2
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 2
assert mg[0][1][mg.cid]['L1_L2'] == 5
assert mg[1][2][mg.cid]['L1_L2'] == 6
mg = mn.DiMultinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
mg.merge_layers(['L2', 'L3'], new_name='LN')
assert 'L2' not in mg.layers()
assert 'L3' not in mg.layers()
assert 'LN' in mg.layers()
assert mg.number_of_layers() == 2
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 2
assert mg[0][1][mg.cid]['L1'] == 5
assert mg[1][2][mg.cid]['LN'] == 8
def test_add_layer(self):
mg = mn.DiMultinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
sg = nx.Graph()
sg.add_edge(1, 2, weight=7)
sg.add_edge(2, 3)
mg.add_layer(sg, 'L3')
assert mg.number_of_nodes() == 4
assert mg.number_of_edges() == 3
assert mg.number_of_layers() == 3
assert mg[1][2][mg.cid]['L2'] == 6
assert mg[1][2][mg.cid]['L3'] == 7
assert mg[2][3][mg.cid]['L3'] == 1
def test_remove_layer(self):
mg = mn.DiMultinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
mg.remove_layer('L3')
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 2
assert mg.number_of_layers() == 2
mg = mn.DiMultinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(1, 2, 'L3', weight=2)
mg.remove_layer('L1')
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 1
assert mg.number_of_layers() == 2
def test_to_undirected(self):
mg = mn.DiMultinet()
mg.add_edge(0, 1, 'L1', weight=5)
mg.add_edge(1, 2, 'L2', weight=6)
mg.add_edge(2, 1, 'L3', weight=2)
assert mg.number_of_nodes() == 3
assert mg.number_of_edges() == 3
assert mg.number_of_layers() == 3
nmg = mg.to_undirected()
assert nmg.number_of_nodes() == 3
assert nmg.number_of_edges() == 2
assert nmg.number_of_layers() == 3
|
import hashlib
import unittest
from test import test_support
from test.test_support import _4G, precisionbigmemtest
def hexstr(s):
import string
h = string.hexdigits
r = ''
for c in s:
i = ord(c)
r = r + h[(i >> 4) & 0xF] + h[i & 0xF]
return r
class HashLibTestCase(unittest.TestCase):
supported_hash_names = ( 'md5', 'MD5', 'sha1', 'SHA1',
'sha224', 'SHA224', 'sha256', 'SHA256',
'sha384', 'SHA384', 'sha512', 'SHA512' )
def test_unknown_hash(self):
try:
hashlib.new('spam spam spam spam spam')
except ValueError:
pass
else:
self.assert_(0 == "hashlib didn't reject bogus hash name")
def test_hexdigest(self):
for name in self.supported_hash_names:
h = hashlib.new(name)
self.assert_(hexstr(h.digest()) == h.hexdigest())
def test_large_update(self):
aas = 'a' * 128
bees = 'b' * 127
cees = 'c' * 126
for name in self.supported_hash_names:
m1 = hashlib.new(name)
m1.update(aas)
m1.update(bees)
m1.update(cees)
m2 = hashlib.new(name)
m2.update(aas + bees + cees)
self.assertEqual(m1.digest(), m2.digest())
def check(self, name, data, digest):
# test the direct constructors
computed = getattr(hashlib, name)(data).hexdigest()
self.assert_(computed == digest)
# test the general new() interface
computed = hashlib.new(name, data).hexdigest()
self.assert_(computed == digest)
def test_case_md5_0(self):
self.check('md5', '', 'd41d8cd98f00b204e9800998ecf8427e')
def test_case_md5_1(self):
self.check('md5', 'abc', '900150983cd24fb0d6963f7d28e17f72')
def test_case_md5_2(self):
self.check('md5', 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789',
'd174ab98d277d9f5a5611c2c9f419d9f')
@precisionbigmemtest(size=_4G + 5, memuse=1)
def test_case_md5_huge(self, size):
if size == _4G + 5:
try:
self.check('md5', 'A'*size, 'c9af2dff37468ce5dfee8f2cfc0a9c6d')
except OverflowError:
pass # 32-bit arch
@precisionbigmemtest(size=_4G - 1, memuse=1)
def test_case_md5_uintmax(self, size):
if size == _4G - 1:
try:
self.check('md5', 'A'*size, '28138d306ff1b8281f1a9067e1a1a2b3')
except OverflowError:
pass # 32-bit arch
# use the three examples from Federal Information Processing Standards
# Publication 180-1, Secure Hash Standard, 1995 April 17
# http://www.itl.nist.gov/div897/pubs/fip180-1.htm
def test_case_sha1_0(self):
self.check('sha1', "",
"da39a3ee5e6b4b0d3255bfef95601890afd80709")
def test_case_sha1_1(self):
self.check('sha1', "abc",
"a9993e364706816aba3e25717850c26c9cd0d89d")
def test_case_sha1_2(self):
self.check('sha1', "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
"84983e441c3bd26ebaae4aa1f95129e5e54670f1")
def test_case_sha1_3(self):
self.check('sha1', "a" * 1000000,
"34aa973cd4c4daa4f61eeb2bdbad27316534016f")
# use the examples from Federal Information Processing Standards
# Publication 180-2, Secure Hash Standard, 2002 August 1
# http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf
def test_case_sha224_0(self):
self.check('sha224', "",
"d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f")
def test_case_sha224_1(self):
self.check('sha224', "abc",
"23097d223405d8228642a477bda255b32aadbce4bda0b3f7e36c9da7")
def test_case_sha224_2(self):
self.check('sha224',
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
"75388b16512776cc5dba5da1fd890150b0c6455cb4f58b1952522525")
def test_case_sha224_3(self):
self.check('sha224', "a" * 1000000,
"20794655980c91d8bbb4c1ea97618a4bf03f42581948b2ee4ee7ad67")
def test_case_sha256_0(self):
self.check('sha256', "",
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855")
def test_case_sha256_1(self):
self.check('sha256', "abc",
"ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad")
def test_case_sha256_2(self):
self.check('sha256',
"abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq",
"248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1")
def test_case_sha256_3(self):
self.check('sha256', "a" * 1000000,
"cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0")
def test_case_sha384_0(self):
self.check('sha384', "",
"38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da"+
"274edebfe76f65fbd51ad2f14898b95b")
def test_case_sha384_1(self):
self.check('sha384', "abc",
"cb00753f45a35e8bb5a03d699ac65007272c32ab0eded1631a8b605a43ff5bed"+
"8086072ba1e7cc2358baeca134c825a7")
def test_case_sha384_2(self):
self.check('sha384',
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmn"+
"hijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
"09330c33f71147e83d192fc782cd1b4753111b173b3b05d22fa08086e3b0f712"+
"fcc7c71a557e2db966c3e9fa91746039")
def test_case_sha384_3(self):
self.check('sha384', "a" * 1000000,
"9d0e1809716474cb086e834e310a4a1ced149e9c00f248527972cec5704c2a5b"+
"07b8b3dc38ecc4ebae97ddd87f3d8985")
def test_case_sha512_0(self):
self.check('sha512', "",
"cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce"+
"47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e")
def test_case_sha512_1(self):
self.check('sha512', "abc",
"ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a"+
"2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f")
def test_case_sha512_2(self):
self.check('sha512',
"abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmn"+
"hijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu",
"8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018"+
"501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909")
def test_case_sha512_3(self):
self.check('sha512', "a" * 1000000,
"e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973eb"+
"de0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b")
def test_main():
test_support.run_unittest(HashLibTestCase)
if __name__ == "__main__":
test_main()
|
from .sub_resource import SubResource
class VirtualNetworkPeering(SubResource):
"""Peerings in a virtual network resource.
:param id: Resource ID.
:type id: str
:param allow_virtual_network_access: Whether the VMs in the linked virtual
network space would be able to access all the VMs in local Virtual network
space.
:type allow_virtual_network_access: bool
:param allow_forwarded_traffic: Whether the forwarded traffic from the VMs
in the remote virtual network will be allowed/disallowed.
:type allow_forwarded_traffic: bool
:param allow_gateway_transit: If gateway links can be used in remote
virtual networking to link to this virtual network.
:type allow_gateway_transit: bool
:param use_remote_gateways: If remote gateways can be used on this virtual
network. If the flag is set to true, and allowGatewayTransit on remote
peering is also true, virtual network will use gateways of remote virtual
network for transit. Only one peering can have this flag set to true. This
flag cannot be set if virtual network already has a gateway.
:type use_remote_gateways: bool
:param remote_virtual_network: The reference of the remote virtual
network. The remote virtual network can be in the same or different region
(preview). See here to register for the preview and learn more
(https://docs.microsoft.com/en-us/azure/virtual-network/virtual-network-create-peering).
:type remote_virtual_network:
~azure.mgmt.network.v2017_11_01.models.SubResource
:param remote_address_space: The reference of the remote virtual network
address space.
:type remote_address_space:
~azure.mgmt.network.v2017_11_01.models.AddressSpace
:param peering_state: The status of the virtual network peering. Possible
values are 'Initiated', 'Connected', and 'Disconnected'. Possible values
include: 'Initiated', 'Connected', 'Disconnected'
:type peering_state: str or
~azure.mgmt.network.v2017_11_01.models.VirtualNetworkPeeringState
:param provisioning_state: The provisioning state of the resource.
:type provisioning_state: str
:param name: The name of the resource that is unique within a resource
group. This name can be used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'allow_virtual_network_access': {'key': 'properties.allowVirtualNetworkAccess', 'type': 'bool'},
'allow_forwarded_traffic': {'key': 'properties.allowForwardedTraffic', 'type': 'bool'},
'allow_gateway_transit': {'key': 'properties.allowGatewayTransit', 'type': 'bool'},
'use_remote_gateways': {'key': 'properties.useRemoteGateways', 'type': 'bool'},
'remote_virtual_network': {'key': 'properties.remoteVirtualNetwork', 'type': 'SubResource'},
'remote_address_space': {'key': 'properties.remoteAddressSpace', 'type': 'AddressSpace'},
'peering_state': {'key': 'properties.peeringState', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, id=None, allow_virtual_network_access=None, allow_forwarded_traffic=None, allow_gateway_transit=None, use_remote_gateways=None, remote_virtual_network=None, remote_address_space=None, peering_state=None, provisioning_state=None, name=None, etag=None):
super(VirtualNetworkPeering, self).__init__(id=id)
self.allow_virtual_network_access = allow_virtual_network_access
self.allow_forwarded_traffic = allow_forwarded_traffic
self.allow_gateway_transit = allow_gateway_transit
self.use_remote_gateways = use_remote_gateways
self.remote_virtual_network = remote_virtual_network
self.remote_address_space = remote_address_space
self.peering_state = peering_state
self.provisioning_state = provisioning_state
self.name = name
self.etag = etag
|
import sys
sys.path.append('../browser_interface/browser')
class BrowserFactory(object):
def create(self, type, *args, **kwargs):
return getattr(__import__(type), type)(*args, **kwargs)
|
import sqlite3
VERBOSE = 0
CTABLE_DOMAIN = '''
CREATE TABLE IF NOT EXISTS Domains(
did INTEGER PRIMARY KEY AUTOINCREMENT,
domain VARCHAR(64) UNIQUE,
indegree INTEGER,
outdegree INTEGER
)'''
CTABLE_WEBSITE = '''
CREATE TABLE IF NOT EXISTS Websites(
wid INTEGER PRIMARY KEY AUTOINCREMENT,
did INTEGER,
url VARCHAR(256) NOT NULL UNIQUE,
title VARCHAR(100),
visited bit,
FOREIGN KEY (did) REFERENCES Domains(did)
)'''
CTABLE_RULESETS = '''
CREATE TABLE IF NOT EXISTS Rulesets(
rid INTEGER PRIMARY KEY AUTOINCREMENT,
did INTEGER,
rules VARCHAR(512),
FOREIGN KEY (did) REFERENCES Domains(did)
)'''
class DatabaseHelper(object):
def __init__(self):
'''创建表'''
self.conn = sqlite3.connect("./items.db")
if VERBOSE:
print 'Database connection OPEN.'
# Domain 表
self.conn.execute(CTABLE_DOMAIN)
# Website 表
self.conn.execute(CTABLE_WEBSITE)
# Rule 表
self.conn.execute(CTABLE_RULESETS)
self.conn.commit()
if VERBOSE:
cur = self.conn.cursor()
print 'Tables:',cur.execute("SELECT name FROM sqlite_master WHERE type = 'table'").fetchall()
def close(self):
'''关闭与数据库的连接'''
if VERBOSE:
print 'Database connection CLOSE.'
self.conn.close()
def insertDomain(self, domain, indegree=0, outdegree=0):
'''增加一个域名'''
cur = self.conn.cursor()
cur.execute("INSERT INTO Domains VALUES (NULL,?,?,?)", (domain, indegree, outdegree))
# 写入到文件中
self.conn.commit()
def insertRuleset(self, ruleset, domain):
'''增加一个robots.txt规则集'''
cur = self.conn.cursor()
cur.execute("SELECT did FROM Domains WHERE domain=?", (domain,))
did = cur.fetchone()[0]
cur.execute("INSERT INTO Rulesets VALUES (NULL,?,?)",(did, ruleset))
# 写入到文件
self.conn.commit()
def insertWebsite(self, url, domain):
'''增加一个网页,标记为未访问,并对相应的domain增加其入度'''
cur = self.conn.cursor()
cur.execute("SELECT 1 FROM Domains WHERE domain=?", (domain,))
result = cur.fetchone()
if not result:
# 未有对应domain记录, 先创建domain, 把入度设为1
if VERBOSE:
print 'Spot Domain:',domain
self.insertDomain(domain, indegree=1)
cur.execute("SELECT did FROM Domains WHERE domain=?", (domain,))
did = cur.fetchone()[0]
else:
did = result[0]
# 对应的domain记录已经存在, 对其入度+1
cur.execute("UPDATE Domains SET outdegree=outdegree+1 WHERE domain=?", (domain,))
cur.execute("INSERT INTO Websites VALUES (NULL,?,?,NULL,0)", (did, url,))
# 写入到文件
self.conn.commit()
def updateInfo(self, item, newlinks, oldlinks):
'''爬虫爬完之后对数据库内容进行更新'''
cur = self.conn.cursor()
cur.execute("SELECT wid,did FROM Websites WHERE url=?", (item['url'],))
wid, did = cur.fetchone()
# website记录更新
cur.execute("UPDATE Websites SET title=?,visited=1 WHERE wid=?", (item['title'], wid,))
# 对应的domain记录中出度也需要更新
cur.execute("UPDATE Domains SET outdegree=outdegree+? WHERE did=?", (len(item['links']), did,))
# 对该网页中所有链接涉及的记录进行更新
# 外部判断未出现过的链接
for link,domain in newlinks:
self.insertWebsite(link, domain)
# 外部判断出现过的链接
for link,domain in oldlinks:
# 对对应的domain记录入度增加
cur.execute("UPDATE Domains SET outdegree=outdegree+1 WHERE domain=?", (domain,))
# 写入到文件
self.conn.commit()
def robotsrulesetOfDomain(self, domain):
'''检查domain是否在数据库中,
否 --> (False, None)
是 --> (True, 数据库中存储的robots.txt内容)
'''
exist = False
cur = self.conn.cursor()
# 是否存在
cur.execute("SELECT 1 FROM Domains WHERE domain=?", (domain,))
if cur.fetchone() :
exist = True
# 存在的话,结果是什么
cur.execute("SELECT rules FROM Domains,Rulesets "
"WHERE domain=? AND Domains.did=Rulesets.did"
,(domain,) )
ruleset = cur.fetchone()
return (exist, ruleset)
def rollback(self):
self.conn.rollback()
def showAll(self):
self.conn.commit()
cur = self.conn.cursor()
cur.execute("SELECT * FROM Domains")
print cur.fetchall()
cur.execute("SELECT * FROM Websites")
print cur.fetchall()
_dbcli = None
def getCliInstance():
global _dbcli
if not _dbcli:
_dbcli = DatabaseHelper()
return _dbcli
def test():
dbcli = getCliInstance()
# dbcli.insertDomain('jaysonhwang.com')
# dbcli.insertRuleset('test','jaysonhwang.com')
print dbcli.robotsrulesetOfDomain('www.zol.com')
print dbcli.robotsrulesetOfDomain('jayson.com')
dbcli.showAll()
dbcli.close()
if __name__ == '__main__':
test()
|
import re
from crossword import *
class Crossword2(Crossword):
def __init__(self):
self.grid = OpenGrid()
self.connected = {}
self.used_words = []
def copy(self):
copied = Crossword2()
copied.grid = self.grid.copy()
copied.connected = self.connected.copy()
copied.used_words = self.used_words[:]
return copied
def embed(self, pos, direction, word):
assert word not in self.used_words
super(Crossword2, self).embed(pos, direction, word)
self.used_words.append(word)
def all_disconnected_sequences(self):
'''
>>> c = Crossword2()
>>> c.embed((0, 0), HORIZONTAL, 'ANT')
>>> c.embed((0, 0), VERTICAL, 'ATOM')
>>> c.embed((1, 2), HORIZONTAL, 'IT')
>>> c.embed((3, 0), HORIZONTAL, 'MEET')
>>> c.dump()
_#____
#ANT#_
_T#IT#
_O____
#MEET#
_#____
>>> c.all_disconnected_sequences()
[((0, 2), 2, 'T'), ((1, 0), 2, 'T'), ((2, 0), 2, 'O'), ((0, 1), 1, 'N'), ((3, 1), 1, 'E'), ((0, 2), 1, 'TI'), ((0, 2), 1, 'TI.E'), ((3, 2), 1, 'E'), ((1, 3), 1, 'T'), ((1, 3), 1, 'T.T'), ((3, 3), 1, 'T')]
'''
sequences = []
for pos, direction, length in [((r, self.grid.colmin), HORIZONTAL, self.grid.width) for r in range(self.grid.rowmin, self.grid.rowmax + 1)] + [((self.grid.rowmin, c), VERTICAL, self.grid.height) for c in range(self.grid.colmin, self.grid.colmax + 1)]:
line = self.grid.get_word(pos, direction, length)
poslist = self.grid.poslist(pos, direction, length)
sequences += self.extract_sequences(line, poslist, direction)
return [(p, d, w) for (p, d, w) in sequences if not w.endswith('.')]
def extract_sequences(self, line, poslist, direction, idx=0, current_seq=None):
'''
>>> c = Crossword2()
>>> c.extract_sequences('ABC', [(0, 0), (0, 1), (0, 2)], HORIZONTAL)
[((0, 0), 2, 'ABC')]
>>> c.extract_sequences('_A_', [(0, 0), (0, 1), (0, 2)], HORIZONTAL)
[((0, 1), 2, 'A'), ((0, 1), 2, 'A.')]
>>> c.extract_sequences('A_C', [(0, 0), (0, 1), (0, 2)], HORIZONTAL)
[((0, 0), 2, 'A'), ((0, 0), 2, 'A.C'), ((0, 2), 2, 'C')]
>>> c.extract_sequences('A#C', [(0, 0), (0, 1), (0, 2)], HORIZONTAL)
[((0, 0), 2, 'A'), ((0, 2), 2, 'C')]
>>> c.extract_sequences('A_#B_C', [(0, 0), (0, 1), (0, 2), (0, 3), (0, 4), (0,5)], HORIZONTAL)
[((0, 0), 2, 'A'), ((0, 0), 2, 'A.'), ((0, 3), 2, 'B'), ((0, 3), 2, 'B.C'), ((0, 5), 2, 'C')]
>>> c.extract_sequences('A_B__C', [(0, 0), (0, 1), (0, 2), (0, 3), (0, 4), (0,5)], HORIZONTAL)
[((0, 0), 2, 'A'), ((0, 0), 2, 'A.B'), ((0, 2), 2, 'B'), ((0, 0), 2, 'A.B.'), ((0, 2), 2, 'B.'), ((0, 0), 2, 'A.B..C'), ((0, 2), 2, 'B..C'), ((0, 5), 2, 'C')]
'''
if not current_seq: current_seq = []
if idx >= len(line): return current_seq
c = line[idx]
pos = poslist[idx]
if c == FILLED:
return current_seq + self.extract_sequences(line, poslist, direction, idx + 1, [])
if c == EMPTY:
new_current_seq = [(p, d, s + '.') for (p, d, s) in current_seq]
return current_seq + self.extract_sequences(line, poslist, direction, idx + 1, new_current_seq)
if current_seq:
new_current_seq = [(p, d, s + c) for (p, d, s) in current_seq if not self.is_connected(poslist[idx - 1], pos)]
if any([s.endswith('.') for (p, d, s) in current_seq]):
new_current_seq.append((pos, direction, c))
return self.extract_sequences(line, poslist, direction, idx + 1, new_current_seq)
else:
new_current_seq = [(pos, direction, c)]
return self.extract_sequences(line, poslist, direction, idx + 1, new_current_seq)
def build_crossword2(words, monitor=False):
'''
>>> ans = list(build_crossword2(['ANT', 'ART', 'RAT']))
>>> ans[0].dump()
#ANT#
>>> ans[1].dump()
_#___
#ANT#
_R___
_T___
_#___
>>> ans[2].dump()
___#___
__#ANT#
___R___
#RAT#__
___#___
>>> ans[3].dump()
___#_
___R_
_#_A_
#ANT#
_R_#_
_T___
_#___
>>> ans[4].dump()
_#___
_R___
#ANT#
_T___
_#___
>>> ans[5].dump()
___#_
_#_A_
_R_R_
#ANT#
_T_#_
_#___
>>> ans[6].dump()
___#___
___R___
__#ANT#
#ART#__
___#___
>>> ans[7].dump()
___#_
___A_
___R_
#ANT#
___#_
>>> ans[8].dump()
___#__
_#RAT#
___R__
#ANT#_
___#__
>>> ans[9].dump()
___#_
_#_A_
_R_R_
#ANT#
_T_#_
_#___
>>> ans[10].dump()
___#___
___A___
__#RAT#
#ANT#__
___#___
>>> ans[11].dump()
___#_
___R_
___A_
#ANT#
___#_
>>> ans[12].dump()
___#__
_#ART#
___A__
#ANT#_
___#__
>>> ans[13].dump()
___#___
___R___
__#ART#
#ANT#__
___#___
>>> ans[14].dump()
___#_
___R_
_#_A_
#ANT#
_R_#_
_T___
_#___
>>> len(ans)
15
'''
crosswords = [Crossword2()]
crosswords[0].embed((0, 0), HORIZONTAL, words[0])
while True:
if not crosswords: break
crosswords = sorted(crosswords, key=lambda c: evaluate_crossword(c))
base = crosswords.pop(0)
if monitor:
print ('%d candidates...'%(len(crosswords)))
if isinstance(monitor, dict):
base.dump(empty=monitor['EMPTY'], filled=monitor['FILLED'])
else:
base.dump()
print ('')
try:
sequences = base.all_disconnected_sequences()
if is_valid_crossword(sequences):
yield base
candidates = generate_candidates(words, base, sequences)
crosswords += candidates
except ValueError:
# discard this base
pass
def is_valid_crossword(sequences):
return all([len(s) <= 1 or s.find('.') > -1 for _, _, s in sequences])
def generate_candidates(words, base, sequences):
fit_words = []
for sequence in sequences:
available_words = [w for w in words if w not in base.used_words]
fit_words_for_seq = [(p, d, w) for (p, d, w) in propose_words(sequence, available_words) if base.is_fit(p, d, w)]
_, _, s = sequence
if not fit_words_for_seq and len(s) > 1 and s.find('.') == -1:
# dead end; discard this base
raise ValueError('no candidates found')
fit_words += fit_words_for_seq
candidates = []
for p, d, w in fit_words:
copy = base.copy()
copy.embed(p, d, w)
candidates.append(copy)
return candidates
def propose_words(sequence, words):
(p, d, seq) = sequence
proposed_words = []
for word in words:
idx = 0
while True:
m = re.search(seq, word[idx:])
if not m: break
proposed_words.append((OpenGrid.pos_inc(p, -(m.start() + idx), d), d, word))
idx += m.start() + 1
return proposed_words
def evaluate_crossword(c):
# return -len(c.used_words)
return (c.grid.width + c.grid.height) * 1.0 / len(c.used_words) ** 2
# return (c.grid.width * c.grid.height) * 1.0 / sum([len(w) for w in c.used_words])
def pickup_crosswords(words, dump_option=None, monitor=False):
best = 9999
for c in build_crossword2(words, monitor=monitor):
if evaluate_crossword(c) < best:
if dump_option:
c.dump(empty=dump_option['EMPTY'], filled=dump_option['FILLED'])
else:
c.dump()
best = evaluate_crossword(c)
print ('score: %f'%(best))
print ('')
if __name__ == '__main__':
import doctest
doctest.testmod()
|
import math
class Point:
def __init__(self, x, y):
self.x = x
self.y = y
def rssToEstimatedDistance(rss):
freq = 2462 # freq of WiFi channel 6
origDBm = -20 # estimate this value
loss = abs(origDBm - rss)
dist = 10 ** ( ( loss + 27.55 - 20 * math.log10(freq) ) / 20 )
return dist
def trilaterate(inSources, rss):
distances = []
distances.append( rssToEstimatedDistance(rss[0]) )
distances.append( rssToEstimatedDistance(rss[1]) )
distances.append( rssToEstimatedDistance(rss[2]) )
# find the three intersection points
tp1 = _findEqualPerp(inSources[0], inSources[1], distances[0], distances[1])
tp2 = _findEqualPerp(inSources[0], inSources[2], distances[0], distances[2])
tp3 = _findEqualPerp(inSources[1], inSources[2], distances[1], distances[2])
p = Point( (tp1.x + tp2.x + tp3.x) / 3, (tp1.y + tp2.y + tp3.y) / 3 )
return p
def _findEqualPerp(p1, p2, r1, r2):
# swap points if p2 is behind p2
if p2.x < p1.x:
temp = p2
p2 = p1
p1 = temp
# compute the equation for the line
deltaX = p2.x - p1.x
deltaY = p2.y - p1.y
if deltaX == 0:
slope = 999999999
else:
slope = deltaY / deltaX
intercept = p2.y - slope * p2.x
# compute the constant multiplier
lineLen = math.sqrt((p2.x - p1.x)**2 + (p2.y - p1.y)**2)
c = lineLen / (r1 + r2)
posOnLine = c * r1
angle = math.atan(slope)
touchingPoint = Point(math.cos(angle) * posOnLine + p1.x, math.sin(angle) * posOnLine + p1.y)
return touchingPoint
def main():
a = Point(1, 6)
b = Point(2, 3)
c = Point(5, 7)
t = trilaterate([a,b,c], [2,3,5])
print(t.x)
print(t.y)
if __name__ == '__main__':
main()
|
import pytest
from swimlane.exceptions import ValidationError
def test_getattr_fallback(mock_record):
"""Verify cursor __getattr__ falls back to AttributeError for unknown cursor + list methods"""
with pytest.raises(AttributeError):
getattr(mock_record['Text List'], 'unknown_method')
def test_set_validation(mock_record):
"""Test directly setting a ListField value for validation"""
mock_record['Text List'] = ['text']
with pytest.raises(ValidationError):
mock_record['Text List'] = [123]
with pytest.raises(ValidationError):
mock_record['Text List'] = 123
with pytest.raises(ValidationError):
mock_record['Text List'] = 'text'
def test_modification_validation(mock_record):
"""Test calling list methods on cursor respects validation"""
mock_record['Text List'].append('text')
with pytest.raises(ValidationError):
mock_record['Text List'].append(123)
def test_numeric_range(mock_record):
"""Test item numeric range restrictions"""
key = 'Numeric List Range Limit'
mock_record[key] = [5]
with pytest.raises(ValidationError):
mock_record[key] = [3]
with pytest.raises(ValidationError):
mock_record[key] = [12]
def test_list_length_validation(mock_record):
"""List length validation check"""
key = 'Numeric List Range Limit'
mock_record[key] = [5, 6, 7]
with pytest.raises(ValidationError):
mock_record[key].append(8)
with pytest.raises(ValidationError):
mock_record[key] = []
def test_item_type_validation(mock_record):
"""Validate correct item type for text/numeric values"""
key = 'Numeric List Range Limit'
with pytest.raises(ValidationError):
mock_record[key] = ['text']
def test_min_max_word_validation(mock_record):
"""Validate against min/max word restrictions"""
key = 'Text List Word Limit'
with pytest.raises(ValidationError):
mock_record[key] = ['word ' * 10]
with pytest.raises(ValidationError):
mock_record[key] = ['word']
def test_min_max_char_validation(mock_record):
"""Min/max characters restriction validation"""
key = 'Text List Char Limit'
with pytest.raises(ValidationError):
mock_record[key] = ['defg', 'hijkl', 'mno pqr']
with pytest.raises(ValidationError):
mock_record[key] = ['']
def test_list_field_bulk_modify_value(mock_record):
"""Pass-through bulk_modify value"""
value = ['Test', 'Value']
assert mock_record.get_field('Text List').get_bulk_modify(value) == value
|
callback_functions = ["collision_enter", "collision_stay", "collision_exit"]
length_area_world = 75
raise_exception = False
from game import *
from gameobject import *
from contracts import *
from configuration import *
from component import *
from loader import *
from physics import *
from scene import *
from timeutils import *
from builtincomponents import *
from builtincomponents.camera import *
from builtincomponents.collider import *
from builtincomponents.sprite_renderer import *
from builtincomponents.transform import *
|
from functools import reduce
mask1 = mask2 = polyred = None
def setGF2(degree, irPoly):
"""Define parameters of binary finite field GF(2^m)/g(x)
- degree: extension degree of binary field
- irPoly: coefficients of irreducible polynomial g(x)
"""
def i2P(sInt):
"""Convert an integer into a polynomial"""
return [(sInt >> i) & 1
for i in reversed(range(sInt.bit_length()))]
global mask1, mask2, polyred
mask1 = mask2 = 1 << degree
mask2 -= 1
polyred = reduce(lambda x, y: (x << 1) + y, i2P(irPoly)[1:])
def multGF2(p1, p2):
"""Multiply two polynomials in GF(2^m)/g(x)"""
p = 0
while p2:
if p2 & 1:
p ^= p1
p1 <<= 1
if p1 & mask1:
p1 ^= polyred
p2 >>= 1
return p & mask2
if __name__ == "__main__":
# Define binary field GF(2^3)/x^3 + x + 1
setGF2(127, 2**127 + 2**63 + 1)
# Evaluate the product (x^2 + x + 1)(x^2 + 1)
print("{:02x}".format(multGF2(0x3f7e0000000000000000000000000000L, 0x3f7e00000000000000000000L)))
|
from __future__ import unicode_literals
import unittest
import os
import sys
from flake8.api import legacy as engine
if sys.version_info[0] == 3:
unicode = str
if sys.version_info[:2] == (2, 6):
# Monkeypatch to make tests work on 2.6
def assert_less(first, second, msg=None):
assert first > second
unittest.TestCase.assertLess = assert_less
class TestCodeComplexity(unittest.TestCase):
def test_flake8_conformance(self):
flake8style = engine.get_style_guide(
ignore=['E501'],
max_complexity=6
)
directory = 'flask_rollbar'
self.assertEqual(os.path.isdir(directory), True,
"Invalid test directory '%s'. You need to update test_flake8.py" % directory)
# Get all the files to check
files = []
for dirpath, dirnames, filenames in os.walk(directory):
for filename in [f for f in filenames if f.endswith(".py")]:
files += [os.path.join(dirpath, filename)]
result = flake8style.check_files(files)
self.assertEqual(result.total_errors, 0,
"Code found to be too complex or failing PEP8")
if __name__ == '__main__':
unittest.main()
|
import pyak
import yikbot
import time
yLocation = pyak.Location("42.270340", "-83.742224")
yb = yikbot.YikBot("yikBot", yLocation)
print "DEBUG: Registered yikBot with handle %s and id %s" % (yb.handle, yb.id)
print "DEBUG: Going to sleep, new yakkers must wait ~90 seconds before they can act"
time.sleep(90)
print "DEBUG: yikBot instance 90 seconds after initialization"
print vars(yb)
yb.boot()
|
import prosper.datareader.exceptions
import prosper.datareader._version
|
"""
Visualization module.
"""
import numpy as np
from matplotlib import animation
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap
from mpl_toolkits.axes_grid1.inset_locator import zoomed_inset_axes
from mpl_toolkits.axes_grid1.inset_locator import mark_inset
from pca import create_handles
import warnings
warnings.filterwarnings('ignore')
def get_temp_markers(year, attacks):
"""
Gives all the information about the markers needed for the
year passed in argument.
"""
data_given_year = attacks[attacks['Year'] == year].reset_index()
num_markers = data_given_year.shape[0]
markers = np.zeros(num_markers, dtype=[('Longitude', float, 1),
('Latitude', float, 1),
('Size', float, 1),
('Color', float, 1)])
killed = data_given_year['Killed']
_MIN, _MAX, _MEDIAN = killed.min(), killed.max(), killed.median()
markers['Longitude'] = data_given_year['Longitude']
markers['Latitude'] = data_given_year['Latitude']
markers['Size'] = 10* np.abs(killed - _MEDIAN) + 1
markers['Color'] = (killed - _MIN)/(_MAX - _MIN)
return markers, _MAX
def world_view(attacks):
"""
Creates an animation where we see the evolution of the worldwide terrorist attacks
among the available years.
"""
fig = plt.figure(figsize=(10, 10))
cmap = plt.get_cmap('inferno')
# create the map
map = Basemap(projection='cyl')
map.drawmapboundary()
map.fillcontinents(color='lightgray', zorder=0)
# define the frame values (as 1993 is not contained in the database
# we have to remove it, otherwise we will have an empty frame)
frames = np.append(np.arange(1970, 1993), np.arange(1994, 2017))
# create the plot structure
temp_markers, _MAX = get_temp_markers(frames[0], attacks)
xs, ys = map(temp_markers['Longitude'], temp_markers['Latitude'])
scat = map.scatter(xs, ys, s=temp_markers['Size'], c=temp_markers['Color'], cmap=cmap, marker='o',
alpha=0.3, zorder=10)
year_text = plt.text(-170, 80, str(frames[0]),fontsize=15)
cbar = map.colorbar(scat, location='bottom')
cbar.set_label('number of killed people 0.0 = min [0] 1.0 = max [{}]' .format(_MAX))
plt.title('Activity of terrorism attacks from 1970 to 2016')
plt.savefig('world_view.pdf', bbox_inches='tight')
plt.show()
def update(year):
"""
Updates the content of each frame during the animation for
the year passed in argument.
"""
# retrieve necessary information from the markers
temp_markers, _MAX = get_temp_markers(year, attacks)
# update the map content
xs, ys = map(temp_markers['Longitude'], temp_markers['Latitude'])
scat.set_offsets(np.hstack((xs[:,np.newaxis], ys[:, np.newaxis])))
scat.set_color(cmap(temp_markers['Color']))
scat.set_sizes(temp_markers['Size'])
year_text.set_text(str(year))
cbar.set_label('number of killed people 0.0 = min [0] 1.0 = max [{}]' .format(_MAX))
return scat,
# create animation
ani = animation.FuncAnimation(fig, update, interval=1000, frames=frames, blit=True)
ani.save('visualization.mp4', writer = 'ffmpeg', fps=1, bitrate=-1)
plt.show()
def get_group_markers(attacks, group):
"""
Gives all the information about the markers for the
group passed in argument.
"""
data_given_group = attacks[attacks['Group'] == group]
num_markers = data_given_group.shape[0]
markers = np.zeros(num_markers, dtype=[('Longitude', float, 1),
('Latitude', float, 1),
('Size', float, 1),
('Color', float, 1)])
killed = data_given_group['Killed']
_MIN, _MAX, _MEDIAN = killed.min(), killed.max(), killed.median()
markers['Longitude'] = data_given_group['Longitude']
markers['Latitude'] = data_given_group['Latitude']
markers['Size'] = 10* np.abs(killed - _MEDIAN) + 1
markers['Color'] = (killed - _MIN)/(_MAX - _MIN)
return markers, _MAX
def zoom_taliban_intensity(attacks):
"""
Zooms in the particular location of the attacks perpetrated by the Taliban group
showing the intensity of the attacks.
"""
fig = plt.figure(figsize=(15,15))
ax = fig.add_subplot(111)
cmap = plt.get_cmap('inferno')
plt.title('Intensity of attacks perpetrated by the Taliban group\n')
# create the map
map = Basemap(projection='cyl',lat_0=0, lon_0=0)
map.drawmapboundary()
map.fillcontinents(color='lightgray', zorder=0)
# create the plot structure
temp_markers, _MAX = get_group_markers(attacks, 'Taliban')
xs, ys = map(temp_markers['Longitude'], temp_markers['Latitude'])
scat = map.scatter(xs, ys, s=temp_markers['Size'], c=temp_markers['Color'], cmap=cmap, marker='o',
alpha=0.3, zorder=10)
axins = zoomed_inset_axes(ax, 9, loc=2)
axins.set_xlim(25, 40)
axins.set_ylim(60, 75)
plt.xticks(visible=False)
plt.yticks(visible=False)
map2 = Basemap(llcrnrlon=55,llcrnrlat=25,urcrnrlon=75,urcrnrlat=40, ax=axins)
map2.drawmapboundary()
map2.fillcontinents(color='lightgray', zorder=0)
map2.drawcoastlines()
map2.drawcountries()
map2.scatter(xs, ys, s=temp_markers['Size']/5., c=cmap(temp_markers['Color']), alpha=0.5)
mark_inset(ax, axins, loc1=2, loc2=4, fc="none", ec="0.5")
plt.savefig('taliban_zoom_intensity.pdf', bbox_inches='tight')
plt.show()
def get_group_attack_types_markers(attacks, group):
"""
Gives the description of the attack types about the markers for the
group passed in argument.
"""
data_given_year = attacks[attacks['Group'] == group]
list_attack_type_unique = data_given_year['Attack_type'].unique().tolist()
list_attack_type = data_given_year['Attack_type'].tolist()
# assign each attack to the corresponding color
colors_attack_type = plt.cm.tab20(list(range(1,len(list_attack_type_unique)+1)))
label_color_dict_attack_type = dict(zip(list_attack_type_unique, colors_attack_type))
cvec_attack_type = [label_color_dict_attack_type[label] for label in list_attack_type]
num_markers = data_given_year.shape[0]
markers = np.zeros(num_markers, dtype=[('Longitude', float, 1),
('Latitude', float, 1),
('Size', float, 1),
('Color', float, 4)])
killed = data_given_year['Killed']
_MIN, _MAX, _MEDIAN = killed.min(), killed.max(), killed.median()
markers['Longitude'] = data_given_year['Longitude']
markers['Latitude'] = data_given_year['Latitude']
markers['Size'] = 100
markers['Color'] = np.array(cvec_attack_type)
return markers, label_color_dict_attack_type
def zoom_taliban_attack_types(attacks):
"""
Zooms in the particular location of the attacks perpetrated by the Taliban group
showing the different attack types.
"""
group = 'Taliban'
fig = plt.figure(figsize=(15,15))
ax = fig.add_subplot(111)
cmap = plt.get_cmap('inferno')
plt.title('Attack types perpetrated by the Taliban group\n')
# create the map
map = Basemap(projection='cyl',lat_0=0, lon_0=0)
map.drawmapboundary()
map.fillcontinents(color='lightgray', zorder=0)
# create the plot structure
temp_markers, _MAX = get_group_markers(attacks, group)
xs, ys = map(temp_markers['Longitude'], temp_markers['Latitude'])
scat = map.scatter(xs, ys, s=temp_markers['Size'], c=temp_markers['Color'], cmap=cmap, marker='o',
alpha=0.5, zorder=10)
axins = zoomed_inset_axes(ax, 9, loc=2)
axins.set_xlim(25, 40)
axins.set_ylim(60, 75)
plt.xticks(visible=False)
plt.yticks(visible=False)
map2 = Basemap(llcrnrlon=55,llcrnrlat=25,urcrnrlon=75,urcrnrlat=40, ax=axins)
map2.drawmapboundary()
map2.fillcontinents(color='lightgray', zorder=0)
map2.drawcoastlines()
map2.drawcountries()
temp_markers, label_color_dict_attack_type = get_group_attack_types_markers(attacks, group)
map2.scatter(xs, ys, s=temp_markers['Size']/5., c=temp_markers['Color'], alpha=0.5)
mark_inset(ax, axins, loc1=2, loc2=4, fc="none", ec="0.5")
handles = create_handles(label_color_dict_attack_type, ax)
labels = [h.get_label() for h in handles]
ax.legend(loc='upper left', bbox_to_anchor=(1, 1), handles=handles, labels=labels)
plt.savefig('taliban_zoom_attack_types.pdf', bbox_inches='tight')
plt.show()
|
__author__ = "Ricardo Ribeiro"
__credits__ = ["Ricardo Ribeiro"]
__license__ = "MIT"
__version__ = "0.0"
__maintainer__ = "Ricardo Ribeiro"
__email__ = "ricardojvr@gmail.com"
__status__ = "Development"
import time
from datetime import datetime, timedelta
def timeit(method):
def timed(*args, **kw):
ts = time.time()
result = method(*args, **kw)
te = time.time()
time_elapsed = datetime(1,1,1) + timedelta(seconds=(te-ts) )
print("%s: %d:%d:%d:%d;%d" % (method.__name__, time_elapsed.day-1, time_elapsed.hour, time_elapsed.minute, time_elapsed.second, time_elapsed.microsecond))
return result
return timed
|
from app.app_and_db import app
from flask import Blueprint, jsonify, render_template
import datetime
import random
import requests
dashboard = Blueprint('dashboard', __name__)
cumtd_endpoint = 'https://developer.cumtd.com/api/{0}/{1}/{2}'
cumtd_endpoint = cumtd_endpoint.format('v2.2', 'json', 'GetDeparturesByStop')
wunderground_endpoint = 'http://api.wunderground.com/api/{0}/hourly/q/{1}/{2}.json'
wunderground_endpoint = wunderground_endpoint.format(app.config['WUNDERGROUND_API_KEY'], 'IL', 'Champaign')
@dashboard.route('/')
def index():
time=datetime.datetime.now().time().strftime('%I:%M').lstrip('0')
return render_template('pages/dashboard.html', image_number=random.randrange(1, 9), time=time)
@dashboard.route('/bus')
def bus_schedule():
params = {'key' : app.config['CUMTD_API_KEY'],
'stop_id' : 'GRN4TH',
'count' : '5'}
response = requests.get(cumtd_endpoint, params=params)
json = response.json()
departures = []
for departure in json['departures'] :
if departure['trip']['direction'] == 'East':
departures.append(departure)
return jsonify(departures=departures)
@dashboard.route('/weather')
def weather():
response = requests.get(wunderground_endpoint)
json = response.json()
return jsonify(json)
app.register_blueprint(dashboard, url_prefix='/dashboard')
|
from __future__ import print_function
import sys
sys.path.append('..') # help python find cyton.py relative to scripts folder
from openbci import cyton as bci
import logging
import time
def printData(sample):
# os.system('clear')
print("----------------")
print("%f" % (sample.id))
print(sample.channel_data)
print(sample.aux_data)
print("----------------")
if __name__ == '__main__':
# port = '/dev/tty.OpenBCI-DN008VTF'
port = '/dev/tty.usbserial-DB00JAM0'
# port = '/dev/tty.OpenBCI-DN0096XA'
baud = 115200
logging.basicConfig(filename="test.log", format='%(asctime)s - %(levelname)s : %(message)s', level=logging.DEBUG)
logging.info('---------LOG START-------------')
board = bci.OpenBCICyton(port=port, scaled_output=False, log=True)
print("Board Instantiated")
board.ser.write('v')
time.sleep(10)
board.start_streaming(printData)
board.print_bytes_in()
|
__author__ = 'mengpeng'
import os
from unittest import TestCase
from pycrawler.scraper import DefaultScraper
from pycrawler.handler import Handler
from pycrawler.utils.tools import gethash
from test_scraper import SpiderTest
class TestTempHandler(TestCase):
def test_setargs(self):
h = Handler.get('TempHandler')(SpiderTest('testspider'))
self.assertEqual('./tmp/testspider/', h.args['path'])
args = {'path': './newpath/'}
h.setargs(args)
self.assertEqual('./newpath/testspider/', h.args['path'])
def test_parse(self):
h = Handler.get('TempHandler')(SpiderTest('testspider'))
h.parse('conent', 'testurl1')
self.assertTrue(os.path.exists(h._tmpfilename('testurl1')))
def test__tmpfilename(self):
h = Handler.get('TempHandler')(SpiderTest('testspider'))
self.assertEqual('./tmp/testspider/' + str(gethash('sample')) + '.html', h._tmpfilename('sample'))
self.assertTrue(os.path.exists('./tmp/'))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.