code
stringlengths 1
199k
|
|---|
import sys
import json
import urllib2
def intersect(*args):
""" return the intersection of lists """
if len(args) == 0:
return []
s = set(args[0])
for arg in args[1:]:
s = s & set(arg)
return list(s)
def union(*args):
""" return the union of lists """
s = set()
for arg in args:
s = s | set(arg)
return list(s)
def normalize_player_name(name):
normalize_map = {"0" : "o",
"1" : "i",
"2" : "z",
"3" : "e",
"4" : "a",
"@" : "a",
"5" : "s",
"6" : "a",
"7" : "z",
"8" : "b",
"9" : "a"
}
name = name.lower()
name = name.encode("utf8")
normalized_name = u""
for c in name:
if c in normalize_map:
normalized_name += normalize_map[c]
else:
try:
normalized_name += c
except:
pass
return normalized_name
class SteamAPI(object):
def __init__(self, key):
self.key = key
def get_steam_url(self, interface, method, version, **kwargs):
args = "&".join(["{}={}".format(key, value) for key, value in kwargs.items()])
return "http://api.steampowered.com/{}/{}/{}/?key={}&{}"\
.format(interface, method, version, self.key, args)
def get_owned_games_list(self, steamid):
data = self.get_owned_games(steamid)
try:
return [d["name"] for d in data["response"]["games"]]
except KeyError:
return []
def get_owned_games(self, steamid):
return self.call("IPlayerService", "GetOwnedGames", "v0001",\
steamid=steamid, include_appinfo=1, include_played_free_games=1)
def get_user_stats_for_game(self, gameid, steamid):
return self.call("ISteamUserStats", "GetUserStatsForGame",\
"v0002", steamid=steamid, appid=gameid)
def get_schema_for_game(self, appid):
return self.call("ISteamUserStats", "GetSchemaForGame", "v2", appid=appid)
def get_friend_map(self, steamid):
friend_list = self.get_friend_list(steamid)
friend_ids = [friend["steamid"] for friend in friend_list["friendslist"]["friends"]]
summaries = self.get_player_summaries(friend_ids)
return {friend["personaname"]:friend["steamid"] for friend in summaries["response"]["players"]}
def get_friend_list(self, steamid):
return self.call("ISteamUser", "GetFriendList", "v0001", steamid=steamid, relationship="friend")
def get_player_summaries(self, steamids):
return self.call("ISteamUser", "GetPlayerSummaries", "v0002", steamids=",".join(steamids))
def call(self, interface, method, version, **kwargs):
url = self.get_steam_url(interface, method, version, **kwargs)
try:
return json.loads(urllib2.urlopen(url).read())
except urllib2.HTTPError, e:
print "Error reading from", url
print e.msg
sys.exit(1)
def main():
if len(sys.argv) < 2:
print "Usage: {} <your_steam_id> <friend_1_name> [<friend_2_name> ...]".format(sys.argv[0])
sys.exit(1)
steamid = sys.argv[1]
friends = sys.argv[2:]
# Load steam api key
steamapi_key = None
try:
with open("steamapi.key", "r") as f:
steamapi_key = f.read()
except IOError, e:
print e
print "Did you create the file containing your steam api key?"
sys.exit(1)
# Load default steamid
if steamid == "self":
try:
with open("steam.id", "r") as f:
steamid = f.read()
except IOError, e:
print e
print "Did you create the file containing your steam id?"
sys.exit(1)
# Create steam api wrapper
api = SteamAPI(steamapi_key)
# Get friend list
friend_map = api.get_friend_map(steamid)
# Get ids for matching friends
steamids = {}
for requested_friend in friends:
candidates = {}
for friend, id in friend_map.items():
if normalize_player_name(friend).count(normalize_player_name(requested_friend)) > 0:
candidates[friend] = id
if len(candidates) > 1:
print "Ambiguous friend {}, possible candidates: {}".format(requested_friend,\
", ".join([normalize_player_name(player) for player in candidates]))
sys.exit(1)
if len(candidates) == 0:
print "Unknown friend {}".format(requested_friend)
sys.exit(1)
friend, id = candidates.popitem()
steamids[friend] = id
# Get list of games lists
games_list = []
for friend, id in steamids.items():
owned_games = api.get_owned_games_list(id)
if len(owned_games) == 0:
print "No games found for", friend, "(ignoring...)"
else:
games_list.append(owned_games)
# Find common games
print "Common games for {}".format(", ".join(steamids.keys()))
print "\n".join(sorted(intersect(*games_list)))
if __name__ == "__main__":
main()
|
from django.db import models
class Plan(models.Model):
pass
|
"""
:Resource:
==========
:
This is the managed resource between processes.
Resources such as queues, locks and data are housed
here to allow for synchronization to occur.
:
:copyright: (c) 9/30/2015 by gammaRay.
:license: BSD, see LICENSE for more details.
Author: gammaRay
Version: :1.0:
Date: 9/30/2015
"""
"""
=============================================
Imports
=============================================
"""
import datetime
from uuid import uuid4
"""
=============================================
Constants
=============================================
"""
__author__ = "gammaRay"
__version__ = "1.0"
__date__ = "9/28/2015"
RESOURCE_DEFAULT = 0
RESOURCE_TEST = 1
"""
=============================================
Source
=============================================
"""
RESOURCE_TYPES = [
"DEFAULT",
"TEST"
]
def add_type(type):
"""
Adds a type to monitor.
"""
RESOURCE_TYPES.append(type)
return
class ManagedResource(object):
"""
This is the wrapper class that is used to combine all
resources into one cohesive object. In this case,
we attribute resources based on tasks and interfaces to the
application.
i.e. Ip motoring task, Arp monitoring task
"""
# Name of the resource
name = None
# Tag for the resource
tag = None
# Tracking
uuid = None
# The resource to manage
resource = None
# Time at which the resource is set
time = None
# The queue name where the resource will be published to.
__queue = None
def __init__(self, name=None, tag=None):
"""
This is the default constructor for the class object.
:param name: Name of the plugin
:param tag: Tag for the resource
:param sync: Synchronization enabled
:return:
"""
# Set the internals of the class
self.name = name
self.tag = tag
self.uuid = str(uuid4())
self.__queue = "{plugin}.{app}".format(
plugin = name,
app = tag
)
return
def setObj(self, obj):
"""
Sets the object in the resource.
:param obj: The object to manage.
:return:
"""
# Set the object
self.resource = obj
# Set the time at which the object is set
self.time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
return
def getObj(self):
"""
Gets the object within the resource.
:return:
"""
return self.resource
__obj = property(getObj, setObj)
|
import sys
print(("Running on python version %s" % sys.version ))
def get_first_name(full_name):
return full_name.split(" ")[0]
fallback_name = {
"first_name": "UserFirstName",
"last_name": "UserLastName"
}
raw_name = input("Please enter your name: ")
first_name = get_first_name(raw_name)
if not first_name:
first_name = get_first_name(fallback_name)
print(("Hi, %s!" % first_name ))
|
"""
Copyright (C) 2015 Stuart W.D Grieve 2015
Developer can be contacted by s.grieve _at_ ed.ac.uk
This program is free software;
you can redistribute it and/or modify it under the terms of the
GNU General Public License as published by the Free Software Foundation;
either version 2 of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY;
without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the
GNU General Public License along with this program;
if not, write to:
Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor,
Boston, MA 02110-1301
USA
Script to generate Figure 7 from Grieve et al. (2015)
Input data is generated using LH_Driver.cpp
Parameters to be modified are highlighted by comments
@author: SWDG
"""
def mm_to_inch(mm):
return mm*0.0393700787
import matplotlib.pyplot as plt
from matplotlib import rcParams
import MuddPyStatsTools as mpy
import numpy as np
import string
rcParams['font.family'] = 'sans-serif'
rcParams['font.sans-serif'] = ['arial']
rcParams['font.size'] = 10
rcParams['xtick.direction'] = 'out'
rcParams['ytick.direction'] = 'out'
path = '' #path to the folder contaning the hilltopdata files
Filenames = ['C:/Users/Stuart/Dropbox/final_data_for_plotting/nc/NC_PaperData.txt','C:/Users/Stuart/Dropbox/final_data_for_plotting/or/OR_PaperData.txt','C:/Users/Stuart/Dropbox/final_data_for_plotting/gm/GM_PaperData.txt','C:/Users/Stuart/Dropbox/final_data_for_plotting/cr/CR2_PaperData.txt'] #names of the hilltopdata files
figpath = 'C:/Users/Stuart/Desktop/FR/final_figures/' #path to save the final figures
xmaxes = [250,200,500,1800]
ymaxes = [13,140,12,180]
xsteps = [100,100,100,300]
ysteps = [3,40,3,40]
plot_filters = [350.,400.,550,2000.]
locations = ['Coweeta','Oregon Coast Range','Gabilan Mesa','Sierra Nevada']
fig_labels = list(string.ascii_lowercase)[:4] #generate subplot labels
nbins = 20
fig = plt.figure()
for subplot_count, (filename,location,xmax,ymax,xstep,ystep,labels,plot_filter) in enumerate(zip(Filenames,locations,xmaxes,ymaxes,xsteps,ysteps,fig_labels,plot_filters)):
#load the paperdata file to get the LH data
with open(path+filename,'r') as f:
f.readline()
data = f.readlines()
SA = []
SA_Plot = []
for d in data:
split = d.split()
sa_lh = float(split[9])
if (sa_lh > 2.0):
SA.append(sa_lh)
if (sa_lh < plot_filter):
SA_Plot.append(sa_lh)
#get the median absolute devaition
MAD = mpy.calculate_MedianAbsoluteDeviation(SA)
#set up the 4 subplots
ax = plt.subplot(2,2,subplot_count + 1)
#Add a title with the location name
ax.text(.5,.9,location, horizontalalignment='center', transform=ax.transAxes,fontsize=12)
#plot the histogram and get the patches so we can colour them
n,bins,patches = plt.hist(SA_Plot,bins=nbins,color='k',linewidth=0)
#get the median -/+ median devaition
MinMAD = np.median(SA)-MAD
MaxMAD = np.median(SA)+MAD
#color the bins that fall within +/- MAD of the median
#http://stackoverflow.com/questions/6352740/matplotlib-label-each-bin
for patch, rightside, leftside in zip(patches, bins[1:], bins[:-1]):
if rightside < MinMAD:
patch.set_alpha(0.4)
elif leftside > MaxMAD:
patch.set_alpha(0.4)
#Insert dashed red line at median
plt.vlines(np.median(SA),0,ymax,label='Median', color='r',linewidth=1,linestyle='dashed')
#set x axis limits
plt.xlim(0,xmax)
plt.ylim(0,ymax)
#format the ticks to only appear on the bottom and left axes
plt.tick_params(axis='x', which='both', top='off',length=2)
plt.tick_params(axis='y', which='both', right='off',length=2)
#configure tick spacing based on the defined spacings given
ax.xaxis.set_ticks(np.arange(0,xmax+1,xstep))
ax.yaxis.set_ticks(np.arange(0,ymax+1,ystep))
#annotate the plot with the median and MAD and the subplot label
plt.annotate('Median = '+str(int(round(np.median(SA),0)))+' m\nMAD = '+str(int(round(MAD,0)))+' m', xy=(0.6, 0.7), xycoords='axes fraction', fontsize=10, horizontalalignment='left', verticalalignment='top')
plt.annotate(labels, xy=(0.95, 0.95), xycoords='axes fraction', fontsize=10, horizontalalignment='left', verticalalignment='top')
plt.subplots_adjust(hspace = 0.25)
fig.text(0.5, 0.02, 'Hillslope length from slope-area plots (m)', ha='center', va='center', size=12)
fig.text(0.06, 0.5, 'Count', ha='center', va='center', rotation='vertical', size=12)
fig.set_size_inches(mm_to_inch(190), mm_to_inch(115))
plt.savefig(figpath+'Figure_7.png', dpi = 500) #change to *.tif for submission
|
import os
import sys
import time
import logging
import errno
import xml.dom.minidom as DOM
try:
import hashlib
except ImportError:
import md5 as hashlib
try:
import xml.etree.ElementTree as ET
except ImportError:
import elementtree.ElementTree as ET
from icbuild.utils import fileutils
def _parse_isotime(string):
if string[-1] != 'Z':
return time.mktime(time.strptime(string, '%Y-%m-%dT%H:%M:%S'))
tm = time.strptime(string, '%Y-%m-%dT%H:%M:%SZ')
return time.mktime(tm[:8] + (0,)) - time.timezone
def _format_isotime(tm):
return time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime(tm))
class PackageEntry:
def __init__(self, package, version, metadata, dirname):
self.package = package # string
self.version = version # string
self.metadata = metadata # hash of string to value
self.dirname = dirname
_manifest = None
def get_manifest(self):
if self._manifest:
return self._manifest
if not os.path.exists(os.path.join(self.dirname, 'manifests', self.package)):
return None
self._manifest = []
for line in file(os.path.join(self.dirname, 'manifests', self.package)):
self._manifest.append(line.strip())
return self._manifest
def set_manifest(self, value):
if value is None:
self._manifest = value
return
self._manifest = [x.strip() for x in value if not '\n' in value]
if len(self._manifest) != len(value):
logging.error('package %s has files with embedded new lines' % self.package)
manifest = property(get_manifest, set_manifest)
def write(self):
# write info file
fileutils.mkdir_with_parents(os.path.join(self.dirname, 'info'))
writer = fileutils.SafeWriter(os.path.join(self.dirname, 'info', self.package))
ET.ElementTree(self.to_xml()).write(writer.fp)
writer.fp.write('\n')
writer.commit()
# write manifest
fileutils.mkdir_with_parents(os.path.join(self.dirname, 'manifests'))
writer = fileutils.SafeWriter(os.path.join(self.dirname, 'manifests', self.package))
writer.fp.write('\n'.join(self.manifest) + '\n')
writer.commit()
def remove(self):
# remove info file
fileutils.ensure_unlinked(os.path.join(self.dirname, 'info', self.package))
# remove manifest
fileutils.ensure_unlinked(os.path.join(self.dirname, 'manifests', self.package))
def to_xml(self):
entry_node = ET.Element('entry', {'package': self.package,
'version': self.version})
if 'installed-date' in self.metadata:
entry_node.attrib['installed'] = _format_isotime(self.metadata['installed-date'])
if 'configure-hash' in self.metadata:
entry_node.attrib['configure-hash'] = \
self.metadata['configure-hash']
return entry_node
@classmethod
def from_xml(cls, node, dirname):
package = node.attrib['package']
version = node.attrib['version']
metadata = {}
installed_string = node.attrib['installed']
if installed_string:
metadata['installed-date'] = _parse_isotime(installed_string)
configure_hash = node.attrib.get('configure-hash')
if configure_hash:
metadata['configure-hash'] = configure_hash
dbentry = cls(package, version, metadata, dirname)
return dbentry
@classmethod
def open(cls, dirname, package):
try:
info = open (os.path.join (dirname, 'info', package))
doc = ET.parse(info)
node = doc.getroot()
if node.tag == 'entry':
return PackageEntry.from_xml(node, dirname)
except EnvironmentError as e:
if e.errno != errno.ENOENT:
raise
# That didn't work: try the old packagedb.xml file instead. We
# use the manifest file to check if the package 'really exists'
# because otherwise we may see the old packagedb.xml entry for
# an uninstalled package (since we no longer update that file)
#
# please delete this code in 2016
try:
if os.path.exists(os.path.join(dirname, 'manifests', package)):
info = open (os.path.join (dirname, 'packagedb.xml'))
doc = ET.parse(info)
root = doc.getroot()
if root.tag == 'packagedb':
for node in root:
if node.tag == 'entry' and node.attrib['package'] == package:
return PackageEntry.from_xml(node, dirname)
except EnvironmentError as e:
if e.errno != errno.ENOENT:
raise
# it seems not to exist...
return None
class PackageDB:
def __init__(self, dbfile, config):
self.dirname = os.path.dirname(dbfile)
self.config = config
def get(self, package):
'''Return entry if package is installed, otherwise return None.'''
return PackageEntry.open(self.dirname, package)
def add(self, package, version, contents, configure_cmd = None):
'''Add a module to the install cache.'''
entry = self.get(package)
if entry:
metadata = entry.metadata
else:
metadata = {}
metadata['installed-date'] = time.time() # now
if configure_cmd:
metadata['configure-hash'] = hashlib.md5(configure_cmd).hexdigest()
pkg = PackageEntry(package, version, metadata, self.dirname)
pkg.manifest = contents
pkg.write()
def check(self, package, version=None):
'''Check whether a particular module is installed.'''
entry = self.get(package)
if entry is None:
return False
if version is not None:
if entry.version != version: return False
return True
def installdate(self, package, version=None):
'''Get the install date for a particular module.'''
entry = self.get(package)
if entry is None:
return None
if version and (entry.version != version):
return None
return entry.metadata['installed-date']
def uninstall(self, package_name):
'''Remove a module from the install cache.'''
entry = self.get(package_name)
if entry is None:
raise KeyError
if entry.manifest is None:
logging.error("no manifest for '%s', can't uninstall. Try building again, then uninstalling." % (package_name,))
return
# Skip files that aren't in the prefix; otherwise we
# may try to remove the user's ~ or something
# (presumably we'd fail, but better not to try)
to_delete = fileutils.filter_files_by_prefix(self.config, entry.manifest)
# Don't warn on non-empty directories; we want to allow multiple
# modules to share the same directory. We could improve this by
# reference-counting directories.
for (path, was_deleted, error_string) in fileutils.remove_files_and_dirs(to_delete, allow_nonempty_dirs=True):
if was_deleted:
logging.info("Deleted: %(file)r" % {'file': path})
elif error_string is None:
pass
else:
logging.warn("Failed to delete %(file)r: %(msg)s" % { 'file': path,
'msg': error_string})
entry.remove()
|
""" Tags and set of it.
Used by optimization to keep track of the current state of optimization, these
tags trigger the execution of optimization steps, which in turn may emit these
tags to execute other steps.
"""
allowed_tags = (
# New code means new statements.
# Could be a new module, or an inlined exec statement.
"new_code",
# Added new import.
"new_import",
# New statements added, removed.
"new_statements",
# New expression added.
"new_expression",
# TODO: A bit unclear what this it, potentially a changed variable.
"var_usage",
# Detected module variable to be read only.
"read_only_mvar",
# New built-in reference detected.
"new_builtin_ref",
# New built-in call detected.
"new_builtin",
# New raise statement detected.
"new_raise",
# New constant introduced.
"new_constant",
)
class TagSet(set):
def onSignal(self, signal):
if type(signal) is str:
signal = signal.split()
for tag in signal:
self.add(tag)
def check(self, tags):
for tag in tags.split():
assert tag in allowed_tags, tag
if tag in self:
return True
return False
def add(self, tag):
assert tag in allowed_tags, tag
set.add(self, tag)
|
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('asset', '0002_auto_20160501_0012'),
]
operations = [
migrations.RenameField(
model_name='contract',
old_name='memo',
new_name='description',
),
migrations.RenameField(
model_name='disk',
old_name='memo',
new_name='description',
),
migrations.RenameField(
model_name='eventlog',
old_name='memo',
new_name='description',
),
migrations.RenameField(
model_name='manufacturers',
old_name='memo',
new_name='description',
),
migrations.AddField(
model_name='userprofile',
name='token',
field=models.CharField(blank=True, default=None, max_length=128, null=True, verbose_name='token'),
),
migrations.AlterField(
model_name='asset',
name='manufacturers',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='asset.Manufacturers', verbose_name='\u5236\u9020\u5546'),
),
]
|
""" stores snapshots of uncommitted changes
Configs::
"""
from . import commands
cmdtable = commands.cmdtable
|
"""
Plotting facility for astronomy.
"""
import matplotlib as mpl
from matplotlib import pyplot as plt
from .. import SlipyError
from ..Framework.Options import Options, OptionsError
from . import Fits, Spectrum
plt.ion()
class PlotError(SlipyError):
"""
Exception specific to Plot module.
"""
pass
class SPlot:
"""
SPlot( spectrum, **kwargs )
Spectrum Plot - Plot the data in `spectrum`.
"""
def __init__(self, spectrum, **kwargs):
"""
Assign `options` in `kwargs` and initialize the figure.
"""
try:
# available options
self.options = Options( kwargs,
{
'marker': 'b-' , # marker for plot
'label' : 'unspecified' , # label for data
'usetex': False # pdflatex setting
})
# assign options
self.usetex = self.options('usetex')
self.ylimits = []
self.gridv = None
self.yargs = []
self.xargs = []
self.largs = []
self.targs = []
self.xkwargs = {}
self.ykwargs = {}
self.tkwargs = {}
self.lkwargs = {}
self.txargs = []
self.txkwargs = []
if type(spectrum) is not Spectrum.Spectrum:
raise PlotError('Splot expects type `Spectrum`!')
# data in `list` allows for overplotting
self.data = [ spectrum.data ]
self.wave = [ spectrum.wave ]
if spectrum.data.unit:
self.yargs = [str(spectrum.data.unit)]
if spectrum.wave.unit:
self.xargs = [str(spectrum.wave.unit)]
# `name` always retains `label`
self.name = self.options('label')
# `label` and `marker`s same as data
self.label = [ self.options('label') ]
self.marker = [ self.options('marker') ]
# set x limits to the data
self.xlimits = [ spectrum.wave.min().value,
spectrum.wave.max().value ]
except OptionsError as err:
print(' --> OptionsError:', err.msg)
raise PlotError('Failed to construct Splot.')
# initialize the figure
self.fig = plt.figure("Spectral-Plot (SLiPy)")
self.ax = self.fig.add_subplot(111)
self.draw()
def xlim(self, xmin, xmax ):
"""
Handle to pyplot.xlim
"""
self.xlimits = [ xmin, xmax ]
self.ax.set_xlim(xmin, xmax)
def ylim(self, ymin, ymax ):
"""
Handle to pyplot.ylim
"""
self.ylimits = [ ymin, ymax ]
self.ax.set_ylim(ymin, ymax)
def xlabel(self, *args, **kwargs ):
"""
x axis label.
"""
self.xargs = args
self.xkwargs = kwargs
self.ax.set_xlabel( *args, **kwargs )
def ylabel(self, *args, **kwargs ):
"""
y axis label.
"""
self.yargs = args
self.ykwargs = kwargs
self.ax.set_ylabel( *args, **kwargs )
def title(self, *args, **kwargs ):
"""
title for plot.
"""
self.targs = args
self.tkwargs = kwargs
self.ax.set_title( *args, **kwargs )
def legend(self, *args, **kwargs):
"""
legend for plot.
"""
self.largs = args
self.lkwargs = kwargs
plt.legend( *args, **kwargs )
def text(self, *args, **kwargs):
"""
display text over plot.
"""
self.txargs.append( args )
self.txkwargs.append( kwargs )
self.ax.text( *args, **kwargs )
def txtclear(self):
"""
Clear all `text` from figure.
"""
self.txargs = []
self.txkwargs = []
self.draw()
def markers(self, *args):
"""
Reassign the values for the `marker`s in the figure. The number
of arguments must equal the number of spectra in the figure. This
starts out as one, but will increase for ever SPlot.overlay().
"""
if len(args) != len(self.data):
raise PlotError('{} arguments were given but there are {} '
'spectra plotted in this figure!'.format(len(args), len(self.data)))
for a, mark in enumerate(args):
if type(mark) is not str:
raise PlotError('Arguments given to SPlot.markers() must be '
'{} but argument #{} was {}'.format(type(''), a+1, type(mark)))
self.marker = list(args)
def __build(self, picker = False):
"""
Make the plot.
"""
if picker:
self.restore()
self.ax.plot(self.wave[0], self.data[0], self.marker[0],
label = self.label[0], picker = True)
else:
for x, y, m, l in zip(self.wave, self.data, self.marker, self.label):
self.ax.plot(x, y, m, label=l)
if self.xargs or self.xkwargs:
self.xlabel( *self.xargs, **self.xkwargs )
if self.yargs or self.ykwargs:
self.ylabel( *self.yargs, **self.ykwargs )
if self.targs or self.tkwargs:
self.title( *self.targs, **self.tkwargs )
if self.largs or self.lkwargs:
self.legend( *self.largs, **self.lkwargs )
if self.txargs or self.txkwargs:
for args, kwargs in zip(self.txargs, self.txkwargs):
self.ax.text( *args, **kwargs )
if self.xlimits:
self.xlim( *self.xlimits )
if self.ylimits:
self.ylim( *self.ylimits )
if self.gridv:
self.grid(self.gridv)
if self.usetex:
plt.rc('text', usetex=True)
plt.rc('font', family='serif')
def refresh(self):
"""
pyplot.draw()
"""
plt.draw()
def draw(self, picker = False):
"""
Re-build the plot
"""
self.ax.clear()
self.__build(picker = picker)
plt.draw()
def close(self):
"""
Close the plot.
"""
plt.close("Spectral-Plot (SLiPy)")
def grid(self, value):
"""
Show grid on plot.
"""
self.gridv = value
plt.grid(value)
def save(self, filename):
"""
Save plot to `filename`. Must have extension for formatting.
"""
if type(filename) is not str:
raise PlotError('`filename` should be of type str.')
if len(filename.split('.')) < 2:
raise PlotError('`filename` needs an extension.')
plt.savefig(filename, format=filename.split('.')[-1])
def xoffset(self, value):
"""
Toggle the offset for the x axis
"""
plt.gca().get_xaxis().get_major_formatter().set_useOffset(value)
def yoffset(self, value):
"""
Toggle the offset for the y axis
"""
plt.gca().get_yaxis().get_major_formatter().set_useOffset(value)
def tight_layout(self):
"""
pyplot.tight_layout()
"""
plt.tight_layout()
def overlay(self, *splots ):
"""
Overlay (add) spectra to this plot from other `splots`.
"""
for a, plot in enumerate(splots):
# check data type
if type(plot) is not SPlot:
raise PlotError('Splot.overlay expects '
'type Splot! (from argument {})'.format(a))
# add data
self.data += plot.data
self.wave += plot.wave
self.marker += plot.marker
self.label += plot.label
def restore(self):
"""
Restore self.data and self.wave from possible `overlay`s.
"""
self.data = [ self.data[0] ]
self.wave = [ self.wave[0] ]
self.marker = [ self.marker[0] ]
self.label = [ self.label[0] ]
def desired( plot ):
"""
Helper function for Iterate. Prompts user to keep `plot`;
returns True or False.
"""
# draw the plot
plot.draw()
# prompt the user for input
prompt = input('\r\033[K keep -> `{}` (y/[n]/x)?: '
.format(plot.name)).strip()
# insure valid response
while True:
if prompt not in ['y','n','','x']:
# invalid input, prompt again
print('\r\033[K `{}` was not a recognized response.'.format(prompt))
prompt = input('\033[2A\r\033[K keep -> `{}` (y/[n]/x)?: '
.format(plot.name)).strip()
else:
# clear the error message
print('\r\033[K\033[1A')
break
if prompt in ['n', '']:
return False
elif prompt in ['y']:
return True
else:
# the user input `x`
raise KeyboardInterrupt('\r\033[K User exitted early, saving results.')
def Iterate( *plots, **kwargs ):
"""
Iterate( *plots, **kwargs ):
Iterate thru `plots` to inspect data, the user marks `plots` of
interest. The function returns a list of `names` marked.
"""
try:
options = Options( kwargs,
{
'keep' : 'name' # alternatively, `plot`
})
keep = options('keep')
if keep not in ['name', 'plot']:
raise PlotError('Iterate expects either `name` or `plot` for '
'keyword argument `keep`.')
# check input arguments
for plot in plots:
if not hasattr(plot, 'draw'):
raise PlotError('Iterate expects objects to '
'have a `draw` method.')
if not hasattr(plot, 'name'):
raise PlotError('Iterate expects objects to '
'have a `name` method.')
# clear some space
print('\n')
keepers = []
for a, plot in enumerate(plots):
print('\033[2A\r\033[K Showing plot {} of {} ... '
.format(a, len(plots)) )
if desired( plot ):
if keep == 'name':
keepers.append( plot.name )
elif keep == 'plot':
keepers.append( plot )
return keepers
except OptionsError as err:
print(' --> OptionsError:', err.msg)
raise PlotError('Failed to initialize Iterate.')
except KeyboardInterrupt as x:
print(x)
return keepers
|
import subprocess
from bejond.basic.const import HEXO_POST_HEAD
from bejond.basic.output.write_file import write_head_up_to_post
with open('test.txt', 'a') as file:
str = HEXO_POST_HEAD.format(a='2020', b='2020-01-02')
file.write(str)
# 经验:需要是指定string调用format,不可用连接"+",只会对"+"后面的string格式化
write_head_up_to_post()
|
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0008_alter_user_username_max_length'),
]
operations = [
migrations.CreateModel(
name='Consents',
fields=[
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL, verbose_name='user')),
('terms_accepted', models.BooleanField(default=False, verbose_name='terms accepted')),
('marketing_consent', models.BooleanField(default=False, verbose_name='first-party marketing consent')),
('partner_consent', models.BooleanField(default=False, verbose_name='third-party marketing consent')),
],
),
]
|
from __future__ import absolute_import
from pychess.compat import PY3
from pychess.Utils.const import *
from pychess.Utils.repr import reprColor
from .ldata import *
from .attack import isAttacked
from .bitboard import *
from .PolyglotHash import *
STRICT_FEN = False
class LBoard:
ini_kings = (E1, E8)
ini_rooks = ((A1, H1), (A8, H8))
# Final positions of castled kings and rooks
fin_kings = ((C1,G1),(C8,G8))
fin_rooks = ((D1,F1),(D8,F8))
holding = ({PAWN:0, KNIGHT:0, BISHOP:0, ROOK:0, QUEEN:0, KING:0},
{PAWN:0, KNIGHT:0, BISHOP:0, ROOK:0, QUEEN:0, KING:0})
def __init__ (self, variant=NORMALCHESS):
self.variant = variant
self.nags = []
# children can contain comments and variations
# variations are lists of lboard objects
self.children = []
# the next and prev lboard objects in the variation list
self.next = None
self.prev = None
# The high level owner Board (with Piece objects) in gamemodel
self.pieceBoard = None
# This will True except in so called null_board
# null_board act as parent of the variation
# when we add a variation to last played board from hint panel
self.fen_was_applied = False
@property
def lastMove (self):
return self.hist_move[-1] if self.fen_was_applied and len(self.hist_move) > 0 else None
def repetitionCount (self, drawThreshold=3):
rc = 1
for ply in range(4, 1+min(len(self.hist_hash), self.fifty), 2):
if self.hist_hash[-ply] == self.hash:
rc += 1
if rc >= drawThreshold: break
return rc
def iniAtomic(self):
self.hist_exploding_around = []
def iniHouse(self):
self.promoted = [0]*64
self.capture_promoting = False
self.hist_capture_promoting = []
self.holding = ({PAWN:0, KNIGHT:0, BISHOP:0, ROOK:0, QUEEN:0, KING:0},
{PAWN:0, KNIGHT:0, BISHOP:0, ROOK:0, QUEEN:0, KING:0})
def iniCambodian(self):
self.ini_kings = (D1, E8)
self.ini_queens = (E1, D8)
self.is_first_move = {KING: [True, True], QUEEN: [True, True]}
self.hist_is_first_move = []
def applyFen (self, fenstr):
""" Applies the fenstring to the board.
If the string is not properly
written a SyntaxError will be raised, having its message ending in
Pos(%d) specifying the string index of the problem.
if an error is found, no changes will be made to the board. """
assert not self.fen_was_applied, "The applyFen() method can be used on new LBoard objects only!"
# Set board to empty on Black's turn (which Polyglot-hashes to 0)
self.blocker = 0
self.friends = [0]*2
self.kings = [-1]*2
self.boards = [[0]*7 for i in range(2)]
self.enpassant = None # cord which can be captured by enpassant or None
self.color = BLACK
self.castling = 0 # The castling availability in the position
self.hasCastled = [False, False]
self.fifty = 0 # A ply counter for the fifty moves rule
self.plyCount = 0
self.checked = None
self.opchecked = None
self.arBoard = [0]*64
self.hash = 0
self.pawnhash = 0
# Data from the position's history:
self.hist_move = [] # The move that was applied to get the position
self.hist_tpiece = [] # The piece the move captured, == EMPTY for normal moves
self.hist_enpassant = []
self.hist_castling = []
self.hist_hash = []
self.hist_fifty = []
self.hist_checked = []
self.hist_opchecked = []
# piece counts
self.pieceCount = [[0]*7, [0]*7]
# initial cords of rooks and kings for castling in Chess960
if self.variant == FISCHERRANDOMCHESS:
self.ini_kings = [None, None]
self.ini_rooks = ([None, None], [None, None])
elif self.variant in (WILDCASTLECHESS, WILDCASTLESHUFFLECHESS):
self.ini_kings = [None, None]
self.fin_kings = ([None, None], [None, None])
self.fin_rooks = ([None, None], [None, None])
elif self.variant in DROP_VARIANTS:
self.iniHouse()
elif self.variant == ATOMICCHESS:
self.iniAtomic()
elif self.variant == CAMBODIANCHESS:
self.iniCambodian()
# Get information
parts = fenstr.split()
castChr = "-"
epChr = "-"
fiftyChr = "0"
moveNoChr = "1"
if STRICT_FEN and len(parts) != 6:
raise SyntaxError(_("FEN needs 6 data fields. \n\n%s") % fenstr)
elif len(parts) < 2:
raise SyntaxError(_("FEN needs at least 2 data fields in fenstr. \n\n%s") % fenstr)
elif len(parts) >= 6:
pieceChrs, colChr, castChr, epChr, fiftyChr, moveNoChr = parts[:6]
elif len(parts) == 5:
pieceChrs, colChr, castChr, epChr, fiftyChr = parts
elif len(parts) == 4:
if parts[2].isdigit() and parts[3].isdigit():
# xboard FEN usage for asian variants
pieceChrs, colChr, fiftyChr, moveNoChr = parts
else:
pieceChrs, colChr, castChr, epChr = parts
elif len(parts) == 3:
pieceChrs, colChr, castChr = parts
else:
pieceChrs, colChr = parts
# Try to validate some information
# TODO: This should be expanded and perhaps moved
slashes = pieceChrs.count("/")
if slashes < 7:
raise SyntaxError(_("Needs 7 slashes in piece placement field. \n\n%s") % fenstr)
if not colChr.lower() in ("w", "b"):
raise SyntaxError(_("Active color field must be one of w or b. \n\n%s") % fenstr)
if castChr != "-":
for Chr in castChr:
valid_chars = "ABCDEFGHKQ" if self.variant==FISCHERRANDOMCHESS else "KQ"
if Chr.upper() not in valid_chars:
if self.variant == CAMBODIANCHESS:
pass
# sjaakii uses DEde in cambodian starting fen to indicate
# that queens and kings are virgins (not moved yet)
else:
raise SyntaxError(_("Castling availability field is not legal. \n\n%s") % fenstr)
if epChr != "-" and not epChr in cordDic:
raise SyntaxError(_("En passant cord is not legal. \n\n%s") % fenstr)
# Put the next one into comment, because we use
# "setboard 8/8/8/8/8/8/8/8 w - - 0 1" FEN to stop CECPEngine analyzers
#if (not 'k' in pieceChrs) or (not 'K' in pieceChrs):
# if self.variant not in (ATOMICCHESS, SUICIDECHESS):
# raise SyntaxError, "FEN needs at least 'k' and 'K' in piece placement field."
# Parse piece placement field
promoted = False
# if there is a holding within [] we change it to BFEN style first
if pieceChrs.endswith("]"):
pieceChrs = pieceChrs[:-1].replace("[", "/")
for r, rank in enumerate(pieceChrs.split("/")):
cord = (7-r)*8
for char in rank:
if r > 7:
# After the 8.rank BFEN can contain holdings (captured pieces)
# "~" after a piece letter denotes promoted piece
if r == 8 and self.variant in DROP_VARIANTS:
color = char.islower() and BLACK or WHITE
piece = chrU2Sign[char.upper()]
self.holding[color][piece] += 1
continue
else:
break
if char.isdigit():
cord += int(char)
elif char == "~":
promoted = True
else:
color = char.islower() and BLACK or WHITE
piece = chrU2Sign[char.upper()]
self._addPiece(cord, piece, color)
self.pieceCount[color][piece] += 1
if self.variant in DROP_VARIANTS and promoted:
self.promoted[cord] = 1
promoted = False
if self.variant == CAMBODIANCHESS:
if piece == KING and self.kings[color] != self.ini_kings[color]:
self.is_first_move[KING][color] = False
if piece == QUEEN and cord != self.ini_queens[color]:
self.is_first_move[QUEEN][color] = False
cord += 1
if self.variant == FISCHERRANDOMCHESS:
# Save ranks fo find outermost rooks
# if KkQq was used in castling rights
if r == 0:
rank8 = rank
elif r == 7:
rank1 = rank
# Parse active color field
if colChr.lower() == "w":
self.setColor (WHITE)
else: self.setColor (BLACK)
# Parse castling availability
castling = 0
for char in castChr:
if self.variant == FISCHERRANDOMCHESS:
if char in reprFile:
if char < reprCord[self.kings[BLACK]][0]:
castling |= B_OOO
self.ini_rooks[1][0] = reprFile.index(char) + 56
else:
castling |= B_OO
self.ini_rooks[1][1] = reprFile.index(char) + 56
elif char in [c.upper() for c in reprFile]:
if char < reprCord[self.kings[WHITE]][0].upper():
castling |= W_OOO
self.ini_rooks[0][0] = reprFile.index(char.lower())
else:
castling |= W_OO
self.ini_rooks[0][1] = reprFile.index(char.lower())
elif char == "K":
castling |= W_OO
self.ini_rooks[0][1] = rank1.rfind('R')
elif char == "Q":
castling |= W_OOO
self.ini_rooks[0][0] = rank1.find('R')
elif char == "k":
castling |= B_OO
self.ini_rooks[1][1] = rank8.rfind('r') + 56
elif char == "q":
castling |= B_OOO
self.ini_rooks[1][0] = rank8.find('r') + 56
else:
if char == "K":
castling |= W_OO
elif char == "Q":
castling |= W_OOO
elif char == "k":
castling |= B_OO
elif char == "q":
castling |= B_OOO
if self.variant in (WILDCASTLECHESS, WILDCASTLESHUFFLECHESS, FISCHERRANDOMCHESS):
self.ini_kings[WHITE] = self.kings[WHITE]
self.ini_kings[BLACK] = self.kings[BLACK]
if self.variant in (WILDCASTLECHESS, WILDCASTLESHUFFLECHESS):
if self.ini_kings[WHITE] == D1 and self.ini_kings[BLACK] == D8:
self.fin_kings = ([B1,F1],[B8,F8])
self.fin_rooks = ([C1,E1],[C8,E8])
elif self.ini_kings[WHITE] == D1:
self.fin_kings = ([B1,F1],[C8,G8])
self.fin_rooks = ([C1,E1],[D8,F8])
elif self.ini_kings[BLACK] == D8:
self.fin_kings = ([C1,G1],[B8,F8])
self.fin_rooks = ([D1,F1],[C8,E8])
else:
self.fin_kings = ([C1,G1],[C8,G8])
self.fin_rooks = ([D1,F1],[D8,F8])
self.setCastling(castling)
# Parse en passant target sqaure
if epChr == "-":
self.setEnpassant (None)
else:
self.setEnpassant(cordDic[epChr])
# Parse halfmove clock field
if fiftyChr.isdigit():
self.fifty = int(fiftyChr)
else:
self.fifty = 0
# Parse fullmove number
if moveNoChr.isdigit():
movenumber = max(int(moveNoChr),1)*2 -2
if self.color == BLACK:
movenumber += 1
self.plyCount = movenumber
else:
self.plyCount = 1
self.fen_was_applied = True
def isChecked (self):
if self.variant == SUICIDECHESS:
return False
elif self.variant == ATOMICCHESS:
if not self.boards[self.color][KING]:
return False
elif self.variant == SITTUYINCHESS and self.plyCount < 16:
return False
if self.checked == None:
kingcord = self.kings[self.color]
self.checked = isAttacked (self, kingcord, 1-self.color, ischecked=True)
return self.checked
def opIsChecked (self):
if self.variant == SUICIDECHESS:
return False
elif self.variant == ATOMICCHESS:
if not self.boards[1-self.color][KING]:
return False
elif self.variant == SITTUYINCHESS and self.plyCount < 16:
return False
if self.opchecked == None:
kingcord = self.kings[1-self.color]
self.opchecked = isAttacked (self, kingcord, self.color, ischecked=True)
return self.opchecked
def willLeaveInCheck (self, move):
if self.variant == SUICIDECHESS:
return False
board_clone = self.clone()
board_clone.applyMove(move)
return board_clone.opIsChecked()
def _addPiece (self, cord, piece, color):
_setBit = setBit
self.boards[color][piece] = _setBit(self.boards[color][piece], cord)
self.friends[color] = _setBit(self.friends[color], cord)
self.blocker = _setBit(self.blocker, cord)
if piece == PAWN:
self.pawnhash ^= pieceHashes[color][PAWN][cord]
elif piece == KING:
self.kings[color] = cord
self.hash ^= pieceHashes[color][piece][cord]
self.arBoard[cord] = piece
def _removePiece (self, cord, piece, color):
_clearBit = clearBit
self.boards[color][piece] = _clearBit(self.boards[color][piece], cord)
self.friends[color] = _clearBit(self.friends[color], cord)
self.blocker = _clearBit(self.blocker, cord)
if piece == PAWN:
self.pawnhash ^= pieceHashes[color][PAWN][cord]
self.hash ^= pieceHashes[color][piece][cord]
self.arBoard[cord] = EMPTY
def setColor (self, color):
if color == self.color: return
self.color = color
self.hash ^= colorHash
def setCastling (self, castling):
if self.castling == castling: return
if castling & W_OO != self.castling & W_OO:
self.hash ^= W_OOHash
if castling & W_OOO != self.castling & W_OOO:
self.hash ^= W_OOOHash
if castling & B_OO != self.castling & B_OO:
self.hash ^= B_OOHash
if castling & B_OOO != self.castling & B_OOO:
self.hash ^= B_OOOHash
self.castling = castling
def setEnpassant (self, epcord):
# Strip the square if there's no adjacent enemy pawn to make the capture
if epcord != None:
sideToMove = (epcord >> 3 == 2 and BLACK or WHITE)
fwdPawns = self.boards[sideToMove][PAWN]
if sideToMove == WHITE:
fwdPawns >>= 8
else:
fwdPawns <<= 8
pawnTargets = (fwdPawns & ~fileBits[0]) << 1;
pawnTargets |= (fwdPawns & ~fileBits[7]) >> 1;
if not pawnTargets & bitPosArray[epcord]:
epcord = None
if self.enpassant == epcord: return
if self.enpassant != None:
self.hash ^= epHashes[self.enpassant & 7]
if epcord != None:
self.hash ^= epHashes[epcord & 7]
self.enpassant = epcord
#@profile
def applyMove (self, move):
flag = move >> 12
fcord = (move >> 6) & 63
tcord = move & 63
fpiece = fcord if flag==DROP else self.arBoard[fcord]
tpiece = self.arBoard[tcord]
color = self.color
opcolor = 1-self.color
castling = self.castling
self.hist_move.append(move)
self.hist_enpassant.append(self.enpassant)
self.hist_castling.append(self.castling)
self.hist_hash.append(self.hash)
self.hist_fifty.append(self.fifty)
self.hist_checked.append(self.checked)
self.hist_opchecked.append(self.opchecked)
if self.variant in DROP_VARIANTS:
self.hist_capture_promoting.append(self.capture_promoting)
if self.variant == CAMBODIANCHESS:
self.hist_is_first_move.append({KING: self.is_first_move[KING][:], \
QUEEN: self.is_first_move[QUEEN][:]})
self.opchecked = None
self.checked = None
if flag == NULL_MOVE:
self.setColor(opcolor)
self.plyCount += 1
return move
if self.variant == CAMBODIANCHESS:
if fpiece == KING and self.is_first_move[KING][color]:
self.is_first_move[KING][color] = False
elif fpiece == QUEEN and self.is_first_move[QUEEN][color]:
self.is_first_move[QUEEN][color] = False
# Castling moves can be represented strangely, so normalize them.
if flag in (KING_CASTLE, QUEEN_CASTLE):
side = flag - QUEEN_CASTLE
fpiece = KING
tpiece = EMPTY # In FRC, there may be a rook there, but the king doesn't capture it.
fcord = self.ini_kings[color]
if FILE(fcord) == 3 and self.variant in (WILDCASTLECHESS, WILDCASTLESHUFFLECHESS):
side = 0 if side == 1 else 1
tcord = self.fin_kings[color][side]
rookf = self.ini_rooks[color][side]
rookt = self.fin_rooks[color][side]
# Capture (sittuyin in place promotion is not capture move!)
if tpiece != EMPTY and fcord != tcord:
self._removePiece(tcord, tpiece, opcolor)
self.pieceCount[opcolor][tpiece] -= 1
if self.variant in DROP_VARIANTS:
if self.promoted[tcord]:
if self.variant == CRAZYHOUSECHESS:
self.holding[color][PAWN] += 1
self.capture_promoting = True
else:
if self.variant == CRAZYHOUSECHESS:
self.holding[color][tpiece] += 1
self.capture_promoting = False
elif self.variant == ATOMICCHESS:
from pychess.Variants.atomic import piecesAround
apieces = [(fcord, fpiece, color),]
for acord, apiece, acolor in piecesAround(self, tcord):
if apiece != PAWN and acord != fcord:
self._removePiece(acord, apiece, acolor)
self.pieceCount[acolor][apiece] -= 1
apieces.append((acord, apiece, acolor))
if apiece == ROOK and acord != fcord:
if acord == self.ini_rooks[opcolor][0]:
castling &= ~CAS_FLAGS[opcolor][0]
elif acord == self.ini_rooks[opcolor][1]:
castling &= ~CAS_FLAGS[opcolor][1]
self.hist_exploding_around.append(apieces)
self.hist_tpiece.append(tpiece)
# Remove moving piece(s), then add them at their destination.
if flag == DROP:
if self.variant in DROP_VARIANTS:
assert self.holding[color][fpiece] > 0
self.holding[color][fpiece] -= 1
self.pieceCount[color][fpiece] += 1
else:
self._removePiece(fcord, fpiece, color)
if flag in (KING_CASTLE, QUEEN_CASTLE):
self._removePiece (rookf, ROOK, color)
self._addPiece (rookt, ROOK, color)
self.hasCastled[color] = True
if flag == ENPASSANT:
takenPawnC = tcord + (color == WHITE and -8 or 8)
self._removePiece (takenPawnC, PAWN, opcolor)
self.pieceCount[opcolor][PAWN] -= 1
if self.variant == CRAZYHOUSECHESS:
self.holding[color][PAWN] += 1
elif self.variant == ATOMICCHESS:
from pychess.Variants.atomic import piecesAround
apieces = [(fcord, fpiece, color),]
for acord, apiece, acolor in piecesAround(self, tcord):
if apiece != PAWN and acord != fcord:
self._removePiece(acord, apiece, acolor)
self.pieceCount[acolor][apiece] -= 1
apieces.append((acord, apiece, acolor))
self.hist_exploding_around.append(apieces)
elif flag in PROMOTIONS:
# Pretend the pawn changes into a piece before reaching its destination.
fpiece = flag - 2
self.pieceCount[color][fpiece] += 1
self.pieceCount[color][PAWN] -=1
if self.variant in DROP_VARIANTS:
if tpiece == EMPTY:
self.capture_promoting = False
if flag in PROMOTIONS:
self.promoted[tcord] = 1
elif flag != DROP:
if self.promoted[fcord]:
self.promoted[fcord] = 0
self.promoted[tcord] = 1
elif tpiece != EMPTY:
self.promoted[tcord] = 0
if self.variant == ATOMICCHESS and (tpiece != EMPTY or flag == ENPASSANT):
self.pieceCount[color][fpiece] -= 1
else:
self._addPiece(tcord, fpiece, color)
if fpiece == PAWN and abs(fcord-tcord) == 16:
self.setEnpassant ((fcord + tcord) // 2)
else: self.setEnpassant (None)
if tpiece == EMPTY and fpiece != PAWN:
self.fifty += 1
else:
self.fifty = 0
# Clear castle flags
king = self.ini_kings[color]
wildcastle = FILE(king) == 3 and self.variant in (WILDCASTLECHESS, WILDCASTLESHUFFLECHESS)
if fpiece == KING:
castling &= ~CAS_FLAGS[color][0]
castling &= ~CAS_FLAGS[color][1]
elif fpiece == ROOK:
if fcord == self.ini_rooks[color][0]:
side = 1 if wildcastle else 0
castling &= ~CAS_FLAGS[color][side]
elif fcord == self.ini_rooks[color][1]:
side = 0 if wildcastle else 1
castling &= ~CAS_FLAGS[color][side]
if tpiece == ROOK:
if tcord == self.ini_rooks[opcolor][0]:
side = 1 if wildcastle else 0
castling &= ~CAS_FLAGS[opcolor][side]
elif tcord == self.ini_rooks[opcolor][1]:
side = 0 if wildcastle else 1
castling &= ~CAS_FLAGS[opcolor][side]
self.setCastling(castling)
self.setColor(opcolor)
self.plyCount += 1
def popMove (self):
# Note that we remove the last made move, which was not made by boards
# current color, but by its opponent
color = 1 - self.color
opcolor = self.color
move = self.hist_move.pop()
cpiece = self.hist_tpiece.pop()
flag = move >> 12
if flag == NULL_MOVE:
self.setColor(color)
return
fcord = (move >> 6) & 63
tcord = move & 63
tpiece = self.arBoard[tcord]
# Castling moves can be represented strangely, so normalize them.
if flag in (KING_CASTLE, QUEEN_CASTLE):
side = flag - QUEEN_CASTLE
tpiece = KING
fcord = self.ini_kings[color]
if FILE(fcord) == 3 and self.variant in (WILDCASTLECHESS, WILDCASTLESHUFFLECHESS):
side = 0 if side == 1 else 1
tcord = self.fin_kings[color][side]
rookf = self.ini_rooks[color][side]
rookt = self.fin_rooks[color][side]
self._removePiece (tcord, tpiece, color)
self._removePiece (rookt, ROOK, color)
self._addPiece (rookf, ROOK, color)
self.hasCastled[color] = False
else:
self._removePiece (tcord, tpiece, color)
# Put back captured piece
if cpiece != EMPTY and fcord != tcord:
self._addPiece (tcord, cpiece, opcolor)
self.pieceCount[opcolor][cpiece] += 1
if self.variant == CRAZYHOUSECHESS:
if self.capture_promoting:
assert self.holding[color][PAWN] > 0
self.holding[color][PAWN] -= 1
else:
assert self.holding[color][cpiece] > 0
self.holding[color][cpiece] -= 1
elif self.variant == ATOMICCHESS:
apieces = self.hist_exploding_around.pop()
for acord, apiece, acolor in apieces:
self._addPiece (acord, apiece, acolor)
self.pieceCount[acolor][apiece] += 1
# Put back piece captured by enpassant
if flag == ENPASSANT:
epcord = color == WHITE and tcord - 8 or tcord + 8
self._addPiece (epcord, PAWN, opcolor)
self.pieceCount[opcolor][PAWN] += 1
if self.variant == CRAZYHOUSECHESS:
assert self.holding[color][PAWN] > 0
self.holding[color][PAWN] -= 1
elif self.variant == ATOMICCHESS:
apieces = self.hist_exploding_around.pop()
for acord, apiece, acolor in apieces:
self._addPiece (acord, apiece, acolor)
self.pieceCount[acolor][apiece] += 1
# Un-promote pawn
if flag in PROMOTIONS:
tpiece = PAWN
self.pieceCount[color][flag-2] -= 1
self.pieceCount[color][PAWN] +=1
# Put back moved piece
if flag == DROP:
self.holding[color][tpiece] += 1
self.pieceCount[color][tpiece] -= 1
else:
if not (self.variant == ATOMICCHESS and (cpiece != EMPTY or flag == ENPASSANT)):
self._addPiece (fcord, tpiece, color)
if self.variant in DROP_VARIANTS:
if flag != DROP:
if self.promoted[tcord] and (not flag in PROMOTIONS):
self.promoted[fcord] = 1
if self.capture_promoting:
self.promoted[tcord] = 1
else:
self.promoted[tcord] = 0
self.capture_promoting = self.hist_capture_promoting.pop()
if self.variant == CAMBODIANCHESS:
self.is_first_move = self.hist_is_first_move.pop()
self.setColor(color)
self.checked = self.hist_checked.pop()
self.opchecked = self.hist_opchecked.pop()
self.enpassant = self.hist_enpassant.pop()
self.castling = self.hist_castling.pop()
self.hash = self.hist_hash.pop()
self.fifty = self.hist_fifty.pop()
self.plyCount -= 1
def __hash__ (self):
return self.hash
def __eq__ (self, other):
return isinstance(other, LBoard) and \
self.fen_was_applied and other.fen_was_applied and \
self.hash == other.hash and self.plyCount == other.plyCount
def reprCastling (self):
if not self.castling:
return "-"
else:
strs = []
if self.variant == FISCHERRANDOMCHESS:
if self.castling & W_OO:
strs.append(reprCord[self.ini_rooks[0][1]][0].upper())
if self.castling & W_OOO:
strs.append(reprCord[self.ini_rooks[0][0]][0].upper())
if self.castling & B_OO:
strs.append(reprCord[self.ini_rooks[1][1]][0])
if self.castling & B_OOO:
strs.append(reprCord[self.ini_rooks[1][0]][0])
else:
if self.castling & W_OO:
strs.append("K")
if self.castling & W_OOO:
strs.append("Q")
if self.castling & B_OO:
strs.append("k")
if self.castling & B_OOO:
strs.append("q")
return "".join(strs)
def __repr__ (self):
if not hasattr(self, "color"):
return("LBoard without applied FEN")
b = "#" + reprColor[self.color] + " "
b += self.reprCastling() + " "
b += self.enpassant != None and reprCord[self.enpassant] or "-"
b += "\n# "
rows = [self.arBoard[i:i+8] for i in range(0,64,8)][::-1]
for r, row in enumerate(rows):
for i, piece in enumerate(row):
if piece != EMPTY:
if bitPosArray[(7-r)*8+i] & self.friends[WHITE]:
assert self.boards[WHITE][piece], "self.boards doesn't match self.arBoard !!!"
sign = FAN_PIECES[WHITE][piece]
else:
assert self.boards[BLACK][piece], "self.boards doesn't match self.arBoard !!!"
sign = FAN_PIECES[BLACK][piece]
b += sign
else: b += "."
b += " "
b += "\n# "
if self.variant in DROP_VARIANTS:
for color in (BLACK, WHITE):
holding = self.holding[color]
b += "\n# [%s]" % "".join([FAN_PIECES[color][piece]*holding[piece] \
for piece in holding if holding[piece]>0])
return b if PY3 else b.encode('utf8')
def asFen (self, enable_bfen=True):
fenstr = []
rows = [self.arBoard[i:i+8] for i in range(0,64,8)][::-1]
for r, row in enumerate(rows):
empty = 0
for i, piece in enumerate(row):
if piece != EMPTY:
if empty > 0:
fenstr.append(str(empty))
empty = 0
if self.variant in (CAMBODIANCHESS, MAKRUKCHESS):
sign = reprSignMakruk[piece]
elif self.variant == SITTUYINCHESS:
sign = reprSignSittuyin[piece]
else:
sign = reprSign[piece]
if bitPosArray[(7-r)*8+i] & self.friends[WHITE]:
sign = sign.upper()
else: sign = sign.lower()
fenstr.append(sign)
if self.variant in (BUGHOUSECHESS, CRAZYHOUSECHESS):
if self.promoted[r*8+i]:
fenstr.append("~")
else:
empty += 1
if empty > 0:
fenstr.append(str(empty))
if r != 7:
fenstr.append("/")
if self.variant in DROP_VARIANTS:
holding_pieces = []
for color in (BLACK, WHITE):
holding = self.holding[color]
for piece in holding:
if holding[piece] > 0:
if self.variant == SITTUYINCHESS:
sign = reprSignSittuyin[piece]
else:
sign = reprSign[piece]
sign = sign.upper() if color == WHITE else sign.lower()
holding_pieces.append(sign*holding[piece])
if holding_pieces:
if enable_bfen:
fenstr.append("/")
fenstr += holding_pieces
else:
fenstr.append("[")
fenstr += holding_pieces
fenstr.append("]")
fenstr.append(" ")
fenstr.append(self.color == WHITE and "w" or "b")
fenstr.append(" ")
if self.variant == CAMBODIANCHESS:
cast = ""
if self.is_first_move[KING][WHITE]:
cast += "D"
if self.is_first_move[QUEEN][WHITE]:
cast += "E"
if self.is_first_move[KING][BLACK]:
cast += "d"
if self.is_first_move[QUEEN][BLACK]:
cast += "e"
if not cast:
cast = "-"
fenstr.append(cast)
else:
fenstr.append(self.reprCastling())
fenstr.append(" ")
if not self.enpassant:
fenstr.append("-")
else:
fenstr.append(reprCord[self.enpassant])
fenstr.append(" ")
fenstr.append(str(self.fifty))
fenstr.append(" ")
fullmove = (self.plyCount)//2 + 1
fenstr.append(str(fullmove))
return "".join(fenstr)
def clone (self):
copy = LBoard(self.variant)
copy.blocker = self.blocker
copy.friends = self.friends[:]
copy.kings = self.kings[:]
copy.boards = [self.boards[WHITE][:], self.boards[BLACK][:]]
copy.arBoard = self.arBoard[:]
copy.pieceCount = [self.pieceCount[WHITE][:], self.pieceCount[BLACK][:]]
copy.color = self.color
copy.plyCount = self.plyCount
copy.hasCastled = self.hasCastled[:]
copy.enpassant = self.enpassant
copy.castling = self.castling
copy.hash = self.hash
copy.pawnhash = self.pawnhash
copy.fifty = self.fifty
copy.checked = self.checked
copy.opchecked = self.opchecked
copy.hist_move = self.hist_move[:]
copy.hist_tpiece = self.hist_tpiece[:]
copy.hist_enpassant = self.hist_enpassant[:]
copy.hist_castling = self.hist_castling[:]
copy.hist_hash = self.hist_hash[:]
copy.hist_fifty = self.hist_fifty[:]
copy.hist_checked = self.hist_checked[:]
copy.hist_opchecked = self.hist_opchecked[:]
if self.variant == FISCHERRANDOMCHESS:
copy.ini_kings = self.ini_kings[:]
copy.ini_rooks = (self.ini_rooks[0][:], self.ini_rooks[1][:])
elif self.variant in (WILDCASTLECHESS, WILDCASTLESHUFFLECHESS):
copy.ini_kings = self.ini_kings[:]
copy.fin_kings = (self.fin_kings[0][:], self.fin_kings[1][:])
copy.fin_rooks = (self.fin_rooks[0][:], self.fin_rooks[1][:])
elif self.variant in DROP_VARIANTS:
copy.promoted = self.promoted[:]
copy.holding = (self.holding[0].copy(), self.holding[1].copy())
copy.capture_promoting = self.capture_promoting
copy.hist_capture_promoting = self.hist_capture_promoting[:]
elif self.variant == ATOMICCHESS:
copy.hist_exploding_around = [a[:] for a in self.hist_exploding_around]
elif self.variant == CAMBODIANCHESS:
copy.ini_kings = self.ini_kings
copy.ini_queens = self.ini_queens
copy.is_first_move = {KING: self.is_first_move[KING][:], \
QUEEN: self.is_first_move[QUEEN][:]}
copy.hist_is_first_move = self.hist_is_first_move[:]
copy.fen_was_applied = self.fen_was_applied
return copy
|
import werkzeug
from odoo import http
from odoo.http import request
class LinkTracker(http.Controller):
@http.route('/r/<string:code>', type='http', auth='none', website=True)
def full_url_redirect(self, code, **post):
country_code = request.session.geoip and request.session.geoip.get('country_code') or False
request.env['link.tracker.click'].sudo().add_click(
code,
ip=request.httprequest.remote_addr,
country_code=country_code
)
redirect_url = request.env['link.tracker'].get_url_from_code(code)
return werkzeug.utils.redirect(redirect_url or '', 301)
|
import sys
import linecache
sys.path.append('../heuristics/')
from utils import *
from heu import *
from checker import *
from const import *
from collections import OrderedDict
from operator import *
from heu import *
def initTmpStructures(lMachines, lItems, cTime):
b2tasks = {}
m2b = {}
b2capa = {}
b2usage = {}
b2remDur = OrderedDict([])
b = 0
# Build bin struct from machines and items
mOcc = [m for m in lMachines if m.usages[CONST_LCPU] != 0.0]
for i, m in enumerate(mOcc):
m2b[m] = b
b2capa[b] = m.capacities[CONST_LCPU]
b2usage[b] = sum([t.reqs[CONST_LCPU] for t in m.tasks])
b2tasks[b] = set(m.tasks)
b2remDur[b] = max([t.remdur for t in m.tasks] + [0.0])
b += 1
firstBinToAllocate = b
for j in lItems:
b2tasks[b] = set([j])
b2remDur[b] = j.remdur
b2usage[b] = j.reqs[CONST_LCPU]
b2capa[b] = 1.0
b += 1
b2remDur = OrderedDict(sorted(b2remDur.items(), key=itemgetter(1), reverse = True))
return b2tasks, m2b, b2capa, b2usage, b2remDur,firstBinToAllocate
def allcateBins(h,lMachines,cTime,m2b,b2tasks, droppedBins):
mLibre = [m for m in lMachines if m.usages[CONST_LCPU] == 0.0]
for m in m2b:
b = m2b[m]
for t in b2tasks[b]:
if t not in m.tasks:
h.assign(m, t, cTime)
for b in b2tasks:
if b not in droppedBins and b not in m2b.values():
#print m2b.values()
m = mLibre[0]
for t in b2tasks[b]:
#print "assigningB {0} to {1}".format(t, m)
#assert(t is not None)
h.assign(m, t, cTime)
mLibre.remove(m)
''' FLEXIBLE FIT METHODS '''
def solve(h, lMachines, lItems, cTime):
b2tasks, m2b, b2capa, b2usage, b2remDur,firstBinToAllocate = initTmpStructures(lMachines, lItems, cTime)
# create a function initializing And updating b2Weigth
# create a function updating binsTovisit
binsToVisit = b2remDur.keys()
mergedBins = []
bs, br = getBestMergeableBinsAndUpdate(binsToVisit,b2capa,b2usage,b2remDur, firstBinToAllocate)
while bs !=-1 and br !=-1 :
b2tasks[br] = b2tasks[br].union(b2tasks[bs])
b2usage[br] = b2usage[br] + b2usage[bs]
b2remDur[br] = max( b2remDur[br], b2remDur[bs])
mergedBins.append(bs)
if bs in binsToVisit:
binsToVisit.remove(bs)
bs, br = getBestMergeableBinsAndUpdate(binsToVisit,b2capa,b2usage,b2remDur, firstBinToAllocate)
# call the update function
allcateBins(h,lMachines,cTime,m2b,b2tasks, mergedBins)
def getBestMergeableBinsAndUpdate(bToVisit,b2capa,b2usage,b2weight,firstBinToAllocate):
unMergeableBins =[]
maxW = 0
maxBs = -1
maxBr = 1
for i in range(len(bToVisit)-1):
bi = bToVisit[i]
if bi in unMergeableBins :
continue
biIsMergeable = False
for j in range(i+1,len(bToVisit)):
bj = bToVisit[j]
if bj in unMergeableBins :
continue
if bi == bj:
continue
if bj < firstBinToAllocate and bi < firstBinToAllocate:
continue
if bi < firstBinToAllocate:
br = bi
bs = bj
else:
br = bj
bs = bi
if b2usage[br] + b2usage[bs] > b2capa[br]:
continue
biIsMergeable = True
wbsbr =b2weight[bs]+ b2weight[br]
if(maxW<wbsbr):
maxW =wbsbr
maxBr = br
maxBs = bs
if not biIsMergeable:
unMergeableBins.append(bi)
for b in unMergeableBins:
bToVisit.remove(b);
return maxBs, maxBr
|
"""
pyReefCore Model main entry file.
"""
import time
import numpy as np
from pyReefCore import (preProc, xmlParser, enviForce, coralGLV, coreData, modelPlot)
import cProfile
import os
import pstats
import StringIO
class Model(object):
"""State object for the pyReef model."""
def __init__(self):
"""
Constructor.
"""
# Simulation state
self.dt = 0.
self.tNow = 0.
self.tDisp = 0.
self.waveID = 0
self.outputStep = 0
self.applyDisp = False
self.simStarted = False
self.dispRate = None
#self._rank = mpi.COMM_WORLD.rank
#self._size = mpi.COMM_WORLD.size
#self._comm = mpi.COMM_WORLD
# Initialise pre-processing functions
self.enviforcing = preProc.preProc()
def load_xml(self, filename, verbose=False):
"""
Load an XML configuration file.
"""
# Only the first node should create a unique output dir
#self.input = xmlParser.xmlParser(filename, makeUniqueOutputDir=(self._rank == 0))
self.input = xmlParser.xmlParser(filename)
self.tNow = self.input.tStart
self.tCoral = self.tNow
self.tLayer = self.tNow + self.input.laytime
# Seed the random number generator consistently on all nodes
seed = None
#if self._rank == 0:
# limit to max uint32
seed = np.random.mtrand.RandomState().tomaxint() % 0xFFFFFFFF
#seed = self._comm.bcast(seed, root=0)
np.random.seed(seed)
self.iter = 0
self.layID = 0
# Initialise environmental forcing conditions
self.force = enviForce.enviForce(input=self.input)
# Initialise core data
self.core = coreData.coreData(input=self.input)
# Environmental forces functions
self.core.seatime = self.force.seatime
self.core.sedtime = self.force.sedtime
self.core.flowtime = self.force.flowtime
self.core.seaFunc = self.force.seaFunc
self.core.sedFunc = self.force.sedFunc
self.core.flowFunc = self.force.flowFunc
self.core.sedfctx = self.force.plotsedy
self.core.sedfcty = self.force.plotsedx
self.core.flowfctx = self.force.plotflowy
self.core.flowfcty = self.force.plotflowx
# Initialise plotting functions
self.plot = modelPlot.modelPlot(input=self.input)
return
def run_to_time(self, tEnd, showtime=10, profile=False, verbose=False):
"""
Run the simulation to a specified point in time (tEnd).
If profile is True, dump cProfile output to /tmp.
"""
timeVerbose = self.tNow+showtime
if profile:
pid = os.getpid()
pr = cProfile.Profile()
pr.enable()
#if self._rank == 0:
print 'tNow = %s [yr]' %self.tNow
timetec = self.input.tStart
if tEnd > self.input.tEnd:
tEnd = self.input.tEnd
print 'Requested end time is longer than the one defined in your XmL input file'
print 'Your simulation will run for %s years.'%(tEnd)
if self.tNow == self.input.tStart:
# Initialise Generalized Lotka-Volterra equation
self.coral = coralGLV.coralGLV(input=self.input)
# Perform main simulation loop
# NOTE: number of iteration for the ODE during a given time step, could be user defined...
N = 100
# Define environmental factors
dfac = np.ones(self.input.speciesNb,dtype=float)
sfac = np.ones(self.input.speciesNb,dtype=float)
ffac = np.ones(self.input.speciesNb,dtype=float)
tfac = np.ones(self.input.speciesNb,dtype=float)
nfac = np.ones(self.input.speciesNb,dtype=float)
pfac = np.ones(self.input.speciesNb,dtype=float)
while self.tNow < tEnd:
# Initial coral population
if self.tNow == self.input.tStart:
self.coral.population[:,self.iter] = self.input.speciesPopulation
# Get tectonic
if self.input.tecOn:
tmp = self.core.topH
self.core.topH, dfac = self.force.getTec(self.tNow, timetec, tmp)
timetec = self.tNow
if self.tNow == self.input.tStart:
self.core.tecrate[self.layID] = self.force.tecrate
else:
self.core.tecrate[self.layID+1] = self.force.tecrate
else:
self.force.tecrate = 0.
self.core.tecrate[self.layID+1] = 0.
# Get sea-level
if self.input.seaOn:
tmp = self.core.topH
self.core.topH, dfac = self.force.getSea(self.tNow, tmp)
if self.tNow == self.input.tStart:
self.core.sealevel[self.layID] = self.force.sealevel
else:
self.core.sealevel[self.layID+1] = self.force.sealevel
else:
self.force.sealevel = 0.
if self.tNow == self.input.tStart:
self.core.sealevel[self.layID] = self.force.sealevel
else:
self.core.sealevel[self.layID+1] = self.force.sealevel
self.coral.mbsl[self.iter] = self.force.sealevel
# Store accommodation space through time
self.coral.accspace[self.iter] = self.core.topH #max(self.core.topH,0.)
# Get sediment input
if self.input.sedOn:
sedh, sfac = self.force.getSed(self.tNow, self.core.topH)
self.core.sedinput[self.layID] = self.force.sedlevel
else:
sedh = 0.
# Get flow velocity
if self.input.flowOn:
ffac = self.force.getFlow(self.tNow, self.core.topH)
self.core.waterflow[self.layID] = self.force.flowlevel
# Get temperature control
if self.input.tempOn:
tfac = self.force.getTemp(self.tNow)
self.core.temperature[self.layID] = self.force.templevel
# Get pH control
if self.input.pHOn:
pfac = self.force.getPh(self.tNow)
self.core.pH[self.layID] = self.force.pHlevel
# Get nutrients control
if self.input.nutrientOn:
nfac = self.force.getNu(self.tNow)
self.core.nutrient[self.layID] = self.force.nulevel
# Limit species activity from environmental forces
tmp = np.minimum(dfac, sfac)
tmp2 = np.minimum(tfac, tmp)
tmp3 = np.minimum(pfac, tmp2)
tmp4 = np.minimum(nfac, tmp3)
fac = np.minimum(ffac, tmp4)
self.coral.epsilon = self.input.malthusParam * fac
# Initialise RKF conditions
self.odeRKF = self.coral.solverGLV()
self.odeRKF.set_initial_condition(self.coral.population[:,self.iter])
# Define coral evolution time interval and time stepping
self.tCoral += self.input.tCarb
tODE = np.linspace(self.tNow, self.tCoral, N+1)
self.dt = tODE[1]-tODE[0]
# Solve the Generalized Lotka-Volterra equation
coral,t = self.odeRKF.solve(tODE)
population = coral.T
tmppop = np.copy(population[:,-1])
tmppop[tmppop>self.input.maxpop] = self.input.maxpop
population[:,-1] = tmppop
# Update coral population
self.iter += 1
ids = np.where(self.coral.epsilon==0.)[0]
population[ids,-1] = 0.
ids = np.where(np.logical_and(fac>=self.input.facOpt,population[:,-1]==0.))[0]
population[ids,-1] = 1.
self.coral.population[:self.input.speciesNb,self.iter] = population[:,-1]
# In case there is no accommodation space
if self.core.topH <= 0.:
population[ids,-1] = 0.
self.coral.population[:self.input.speciesNb,self.iter] = 0.
ero = -self.input.karstRate*self.input.tCarb
if self.core.topH > ero:
ero = self.core.topH
else:
ero = 0.
# Compute carbonate production and update coral core characteristics
self.core.coralProduction(self.layID, self.coral.population[:,self.iter],
self.coral.epsilon, sedh, ero, verbose)
# Update time step
self.tNow = self.tCoral
# Update stratigraphic layer ID
if self.tLayer <= self.tNow :
self.tLayer += self.input.laytime
self.layID += 1
#if self._rank == 0 and self.tNow>=timeVerbose:
if self.tNow>=timeVerbose:
timeVerbose = self.tNow+showtime
print 'tNow = %s [yr]' %self.tNow
# Update plotting parameters
self.plot.pop = self.coral.population
self.plot.timeCarb = self.coral.iterationTime
self.plot.mbsl = self.coral.mbsl
self.plot.depth = self.core.thickness
self.plot.sedH = self.core.coralH
self.plot.karstero = self.core.karstero
self.plot.timeLay = self.core.layTime
self.plot.surf = self.core.topH
self.plot.sealevel = self.core.sealevel
self.plot.tecinput = self.core.tecrate
self.plot.sedinput = self.core.sedinput
self.plot.waterflow = self.core.waterflow
self.plot.pH = self.core.pH
self.plot.temperature = self.core.temperature
self.plot.nutrient = self.core.nutrient
self.plot.accspace = self.coral.accspace
return
def ncpus(self):
"""
Return the number of CPUs used to generate the results.
"""
return 1
|
from django.conf import settings
from django.contrib.aderit.access_account.views import \
(SignupView as AccessAccountSignupView)
from django.contrib.aderit.generic_utils.views import \
(GenericUtilView, GenericProtectedView)
from django.utils.translation import ugettext_lazy as _
from django.utils.log import getLogger
from django.template.response import TemplateResponse
from django.views.generic.edit import FormView
from django.views.generic.base import TemplateView
from django.http import HttpResponseRedirect, HttpResponse, urlparse
from django.core.urlresolvers import reverse
from account.models import Account
logger = getLogger('account.views')
class SignupView(AccessAccountSignupView):
def get_initial(self):
_reported_by = self.request.GET.get('reported_by', "")
if _reported_by:
return {'reported_by': _reported_by}
return {}
|
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from django.http import Http404
from .utils import knuth_decode
class KnuthIdMixin(object):
knuth_id_url_kwarg = 'oid'
def get_object(self, queryset=None):
if queryset is None:
queryset = self.get_queryset()
oid = self.kwargs.get(self.knuth_id_url_kwarg, None)
if oid is None:
raise AttributeError("Generic detail view %s must be called with "
"an object oid."
% self.__class__.__name__)
pk = knuth_decode(oid)
queryset = queryset.filter(pk=pk)
try:
obj = queryset.get()
except queryset.model.DoesNotExist:
raise Http404(_("No %(verbose_name)s found matching the query") %
{'verbose_name': queryset.model._meta.verbose_name})
return obj
|
class Singleton:
'''
Singleton decorator for classes
'''
def __init__(self, cls):
self.cls = cls
self.instance = None
def __call__(self, *args, **kwargs):
if self.instance is None:
self.instance = self.cls(*args, **kwargs)
return self.instance
class RunControlBunch(dict):
'''
This is the classic Python "Bunch" class to be used
as a base-class (with a Singleton decorator) for
holding run control parameters.
'''
def __init__(self, **kwargs):
dict.__init__(self, kwargs)
self.__dict__ = self
def __getattr__(self, key):
self.__dict__[key] = RunControlBunch()
return self.__dict__[key]
def flat_str(self, parent=None):
'''
Prints all parameters in the format:
key = value
key.subkey = value
...
'''
ret = []
for key, value in sorted(self.__dict__.items()):
if parent is not None:
parent_key = parent+'.'+key
else:
parent_key = key
if isinstance(value, RunControlBunch):
ret.append(value.flat_str(parent_key))
else:
if isinstance(value, str):
ret.append(parent_key+" = '"+str(value)+"'")
else:
ret.append(parent_key+' = '+str(value))
return '\n'.join(ret)
@Singleton
class RunControl(RunControlBunch):
'''
A generic singleton bunch class to hold global
run control parameters.
'''
def __init__(self, **kwargs):
RunControlBunch.__init__(self, **kwargs)
def __str__(self):
return self.flat_str()
def lock(self):
'''
Prevents any new parameters from being created
though it still allows already defined parameters
to be changed.
'''
def locked_setattr(self, k, v):
if k in self.__dict__:
self.__dict__[k] = v
else:
msg = 'Unknown key. Run control parameters have been locked.'
raise KeyError(msg)
RunControlBunch.__setattr__ = locked_setattr
def unlock(self):
'''
Allows creation of new parameters.
'''
RunControlBunch.__setattr__ = dict.__setattr__
|
import asyncio
import re
from datetime import datetime as dt
import discord
from discord.ext import commands, vbu
class MeowChat(vbu.Cog):
VALID_KEYWORDS = (
"mew",
"meow",
"nya",
"uwu",
"owo",
"x3",
":3",
";3",
"rawr",
"purr",
"murr",
"nuzzle",
)
EMOJI_REGEX = re.compile(r"<a?:.+?:\d+?>")
def __init__(self, bot: vbu.Bot):
super().__init__(bot)
self.meow_chats = set()
self.meow_disable_tasks = {}
@vbu.Cog.listener()
async def on_message(self, message: discord.Message):
await self.check_message(message)
@vbu.Cog.listener()
async def on_message_edit(self, before: discord.Message, after: discord.Message):
await self.check_message(after)
async def check_message(self, message: discord.Message):
"""
Handles deleting any messages that aren't meow-friendly.
"""
if message.channel not in self.meow_chats:
return
if message.author.bot:
return
if message.author.id in self.bot.owner_ids:
return
content = self.EMOJI_REGEX.sub("", message.content.lower())
if any([i in content for i in self.VALID_KEYWORDS]):
return
try:
await message.delete()
expiry_time, _ = self.meow_disable_tasks.get(message.channel.id, (None, None))
if message.channel.permissions_for(message.author).manage_messages:
text = f"{message.author.mention}, your message needs to have a 'meow' in it (to disable, run the `meow off` command)."
else:
text = f"{message.author.mention}, your message needs to have a 'meow' in it :<"
if expiry_time:
text = text.replace("in it", f"in it until meow chat expires {discord.utils.format_dt(expiry_time, 'R')}")
await message.channel.send(text, delete_after=3)
except discord.HTTPException:
pass
@commands.group()
@commands.has_permissions(manage_messages=True)
@commands.bot_has_permissions(send_messages=True, manage_messages=True)
async def meow(self, ctx: vbu.Context):
"""
The parent group for the meow chat commands.
"""
if ctx.invoked_subcommand is None:
return await ctx.send_help(ctx.command)
@meow.command(name="enable", aliases=["start", "on"])
@commands.has_permissions(manage_messages=True)
@commands.bot_has_permissions(send_messages=True, manage_messages=True)
async def meow_enable(self, ctx: vbu.Context, duration: vbu.TimeValue = None):
"""
Turn on meow chat for this channel.
"""
self.meow_chats.add(ctx.channel)
if duration:
await ctx.send(f"Meow chat has been enabled in {ctx.channel.mention} for {duration.clean_full} owo")
else:
await ctx.send(f"Meow chat has been enabled in {ctx.channel.mention} owo")
# See if we want to disable meow chat after a while
if duration:
async def waiter():
await asyncio.sleep(duration.delta.total_seconds())
try:
self.meow_chats.remove(ctx.channel)
await ctx.send("Turned off meow chat as scheduled :<")
except KeyError:
pass
_, current_task = self.meow_disable_tasks.get(ctx.channel.id, (None, None))
if current_task:
current_task.cancel()
self.meow_disable_tasks[ctx.channel.id] = (discord.utils.utcnow() + duration.delta, self.bot.loop.create_task(waiter()))
@meow.command(name="disable", aliases=["stop", "off"])
@commands.has_permissions(manage_messages=True)
@commands.bot_has_permissions(send_messages=True, manage_messages=True)
async def meow_disable(self, ctx: vbu.Context):
"""
Turn off meow chat for this channel.
"""
try:
self.meow_chats.remove(ctx.channel)
except KeyError:
return await ctx.send("Meow chat is already disabled in this channel.")
await ctx.send(f"Meow chat has been disabled in {ctx.channel.mention} :<")
# See if there's a running task to keep it alive
_, current_task = self.meow_disable_tasks.pop(ctx.channel.id, (None, None))
if current_task:
current_task.cancel()
def setup(bot: vbu.Bot):
x = MeowChat(bot)
bot.add_cog(x)
|
{
'name': "Customer Invoice Line Refund",
'summary': """
""",
'author': "Pambudi Satria",
'website': "https://github.com/pambudisatria",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Accounting & Finance',
'version': '8.0.0.1.0',
# any module necessary for this one to work correctly
'depends': [
'account',
],
# always loaded
'data': [
'views/account_invoice_view.xml',
],
# only loaded in demonstration mode
'demo': [
],
'qweb': [],
'installable': True,
'auto_install': False,
'pre_init_hook': 'pre_init_hook',
}
|
'''
features.py contains a decorator to register features to a global dict, for
use in other modules. It also contains the standard features, though others can
be added
'''
from functools import reduce
from function_registry import FunctionRegistry
all_features = FunctionRegistry()
def register_feature(name=None):
'''
Register a feature to the global table. If no name is provided, use
the function's name.
'''
def decorator(func):
'''
Register the function as a feature and return it.
'''
regname = name
if regname is None:
regname = func.__name__
all_features[regname] = func
return func
return decorator
@all_features.register
def intensity(digit):
'''
Find the overall intensity of a given digit
'''
return sum(pixel+1 for pixel in digit)
@all_features.register
def symmetry(digit):
'''
Find the symmetry of a given digit
'''
def diffs():
for row in digit.rows:
for pixel, opposite in zip(row, reversed(row)):
yield abs(pixel - opposite)
return -sum(diffs())
def minmax(data):
'''
Find the minimum and maximum of a given iterable
'''
def check_minmax(current, check):
'''
Reduction to find the overall minmax
'''
return min(current[0], check[0]), max(current[1], check[1])
return reduce(check_minmax, zip(data, data))
@all_features.register
def horizontal_sweep(digit):
'''
Find the difference between the leftmost and rightmost center, where a
center is the weighted mean of a row from left to right.
'''
def centers():
'''
Find the individual centers.
'''
for row in digit.rows:
row_weight = sum(p+1 for p in row)
row_total = sum(c * (pixel+1) for c, pixel in enumerate(row))
if row_total == 0:
yield 0
else:
yield row_total/row_weight
min_row, max_row = minmax(centers())
return max_row - min_row
@all_features.register
def vertical_uniformity(digit):
'''
Find the sum total of the differences in range of each column
'''
def column_spans():
'''
Find the range differnece for a given column
'''
for column in digit.columns:
min_value, max_value = minmax(column)
yield max_value - min_value
return 32 - sum(column_spans())
|
import logging
from scap.Model import Model
logger = logging.getLogger(__name__)
class RegistryBehaviors(Model):
MODEL_MAP = {
'attributes': {
'max_depth': {'type': 'Integer', 'default': -1},
'recurse_direction': {'enum': ['none', 'up', 'down'], 'default': 'none'},
'windows_view': {'enum': ['32_bit', '64_bit'], 'default': '64_bit'},
}
}
|
import os
try:
from ConfigParser import ConfigParser as ConfigParser
except:
from configparser import ConfigParser as ConfigParser
def goglib_tags_get(game_name, tags_file):
parser = ConfigParser()
parser.read(tags_file)
if 'goglib tags' in parser.sections():
if game_name in parser.options('goglib tags'):
tags = parser.get('goglib tags', game_name).split(',')
else:
tags = []
else:
tags = []
return tags
if __name__ == "__main__":
import sys
goglib_tags_get(sys.argv[1], sys.argv[2])
|
import math
import random
PRIME_INDEX_1 = [
49193,
11887,
23819,
93983,
28283,
87179,
74933,
82561,
29741,
98453,
72719,
48193,
66883,
95071,
12841,
89603,
49261,
52529,
57697,
70321,
54617,
49363,
41233,
39883,
35393,
33149,
37493,
42989,
58073,
62507,
99829,
41999,
44087,
31907,
10627,
29231,
57559,
36809,
17123,
50593,
38449,
71317,
38149,
60637,
10607,
48677,
23189,
83701,
78853,
35617,
28477,
86117,
46901,
80819,
89491,
36097,
54881,
94781,
20707,
20011,
69457,
14593,
49253,
35257,
14753,
44851,
10289,
36097,
62017,
82723,
10037,
77551,
89513,
70429,
30269,
30703,
77711,
69313,
20021,
31657,
33851,
27749,
85667,
42793,
47599,
92789,
97771,
84551,
61637,
68659,
93263,
62201,
42131,
78823,
17747,
31183,
89611,
91009,
76673,
47051,
94099,
96757,
90977,
65141,
46051,
11093,
19073,
44633,
27967,
25171,
41221,
96223,
88997,
74941,
49559,
11909,
11593,
97369,
55733,
35869,
55849,
87931,
81131,
99023,
76561,
78977,
48857,
74717,
]
PRIME_INDEX_2 = [
7360349,
1287757,
3764759,
5276833,
2735671,
7351777,
7705903,
2034551,
2427493,
3883639,
4260859,
6814097,
3226933,
2648249,
4458793,
8015303,
2323733,
7991233,
5560879,
9826913,
3634811,
3746299,
1051543,
2954789,
7874983,
9380681,
4577789,
4306829,
6714599,
8395733,
2718493,
1429867,
5675147,
6104573,
3118727,
2657243,
9750043,
1853377,
9441353,
7247969,
7669553,
5334157,
9376649,
9518137,
9368297,
3912679,
3230237,
7291939,
1361677,
1034167,
4998089,
1178239,
5160677,
6130199,
8056553,
8527361,
4261093,
8640553,
5553391,
6024797,
7275019,
7245019,
7661483,
5120033,
4388117,
5941147,
7682189,
9303467,
7165777,
1509163,
5223929,
9696487,
8012383,
6254273,
1400731,
9958177,
7733573,
1498993,
1553467,
4791257,
4524521,
7048633,
3630821,
7931179,
2341457,
6432269,
9597703,
4338011,
6665059,
7911653,
8384317,
2230531,
7904621,
1633559,
9096533,
6873301,
2717821,
5897977,
3608543,
2248243,
3174599,
8634233,
4028963,
6435001,
6611399,
3250469,
4046353,
1429943,
8552111,
1970261,
1045043,
9552523,
6993799,
6141467,
5723479,
9578867,
9233299,
7224641,
3165023,
4583899,
3905861,
1633993,
5013137,
5092613,
2197163,
7732213,
6559019,
2538499,
]
PRIME_INDEX_3 = [
1368916889,
3054015583,
6066123341,
8673964289,
9002483141,
7187080993,
5319345529,
6961795349,
1653814157,
3416288497,
6454122317,
2480898239,
3878100221,
5956454227,
9767569853,
5981528503,
4962084931,
4489312199,
3013312061,
9818685161,
4061204663,
1816202221,
7567463471,
9839749459,
3993070667,
5872839331,
9256050443,
4854483611,
4876755749,
3823459247,
6244209637,
4199084081,
6053970359,
1093521049,
7385602219,
7289318273,
9333908789,
9701161343,
8139801689,
5013046681,
4094649187,
2372669671,
9010267157,
4298511787,
7575340187,
9252205969,
5923706413,
7112626819,
6531270523,
8379490583,
4521945149,
6804302789,
6984132251,
9173492033,
1657527653,
1532523367,
3132088123,
5910371431,
7551540169,
1643193353,
6127000571,
2637510193,
7904761379,
2954227033,
7344843263,
8077648457,
9397237879,
6775740173,
1950824101,
1152859999,
2990299673,
8197021109,
2184824123,
4309539167,
1742841137,
9113517421,
4752058561,
5594292329,
9565022153,
8519292151,
6553311239,
5204301593,
8405487593,
1987918357,
3175759277,
5659428917,
6611421781,
8765753053,
3781235599,
5651365571,
8399394649,
3867050417,
3258145379,
9836441977,
2499690049,
2742615479,
7720787857,
6135275183,
9580731373,
1860360379,
2879750459,
4302251633,
8019104719,
3889658671,
7242891343,
2516043583,
8081336113,
7718332591,
4940550151,
2216825899,
7387662781,
5562762407,
2486416781,
9111045257,
1197705721,
6649659239,
6110149477,
4548595937,
3169540631,
8993669479,
6444114251,
3098519969,
1609592407,
5803463207,
8385117647,
3056488453,
1046337653,
8165632597,
]
class PerlinNoise(object):
def __init__(
self,
persistence=0.5,
number_of_octaves=4,
prime_1=15731,
prime_2=789221,
prime_3=1376312589,
):
self.persistence = persistence
self.number_of_octaves = number_of_octaves
self.prime_1 = prime_1
self.prime_2 = prime_2
self.prime_3 = prime_3
def cosineInterpolation(self, a, b, x):
ft = x * 3.1415927
f = (1.0 - math.cos(ft)) * 0.5
return a * (1 - f) + b * f
def noise(self, x):
x = (x << 13) ^ x
return (
1.0
- ((x * (x * x + self.prime_1 + self.prime_2) + self.prime_3) & 0x7FFFFFFF)
/ 1073741824.0
)
def smoothedNoise(self, x):
return self.noise(x) / 2.0 + self.noise(x - 1) / 4.0 + self.noise(x + 1) / 4.0
def interpolatedNoise(self, x):
int_x = int(x)
frac_x = x - int_x
v1 = self.smoothedNoise(int_x)
v2 = self.smoothedNoise(int_x + 1)
return self.cosineInterpolation(v1, v2, frac_x)
def perlinNoise1D(self, x):
total = 0.0
for octave in range(self.number_of_octaves - 1):
frequency = math.pow(2, octave)
amplitude = math.pow(self.persistence, octave)
total += self.interpolatedNoise(x * frequency) * amplitude
return total
@staticmethod
def isPrime(num):
for j in range(2, int(math.sqrt(num) + 1)):
if (num % j) == 0:
return False
return True
@staticmethod
def createPrime(digits=10):
done = False
low = int("1" + "0" * (digits - 1))
high = int("9" * digits)
if low == 1:
low = 2
while not done:
num = random.randint(low, high)
if PerlinNoise.isPrime(num):
return num
def createObservationFile(report_step, observation, count, std=0.2):
with open("perlin_obs_%d.txt" % report_step, "w") as f:
for index in range(count):
x = index / 8.0
f.write("%f %f\n" % (observation.perlinNoise1D(x), std))
def readParameters(filename):
params = {}
with open(filename, "r") as f:
for line in f:
key, value = line.split(":", 1)
params[key] = float(value)
return params
if __name__ == "__main__":
count = 100
# primes = []
# for p in range(128):
# primes.append(str(PerlinNoise.createPrime(7)))
#
# print(",".join(primes))
observations = {
1: PerlinNoise(prime_1=15731, prime_2=789221, prime_3=1376312589),
2: PerlinNoise(prime_1=8831, prime_2=1300237, prime_3=32416187567),
3: PerlinNoise(prime_1=10657, prime_2=105767, prime_3=2902956923),
}
for report_step in observations:
observation = observations[report_step]
# createObservationFile(report_step, observation, count)
params = readParameters("perlin_params.txt")
scale = params["SCALE"]
offset = params["OFFSET"]
octaves = int(round(params["OCTAVES"]))
persistence = params["PERSISTENCE"]
p1_index = int(round(params["PRIME_1"]))
p2_index = int(round(params["PRIME_2"]))
p3_index = int(round(params["PRIME_3"]))
with open("perlin_%d.txt" % report_step, "w") as f:
P1 = PRIME_INDEX_1[p1_index]
P2 = PRIME_INDEX_2[p2_index]
P3 = PRIME_INDEX_3[p3_index]
# P1 = PerlinNoise.createPrime()
# P2 = PerlinNoise.createPrime()
# P3 = PerlinNoise.createPrime()
report_step_noise = PerlinNoise(
persistence=persistence,
number_of_octaves=octaves,
prime_1=P1,
prime_2=P2,
prime_3=P3,
)
for i in range(count):
x = i / 8.0
obs = observation.perlinNoise1D(x)
noise = report_step_noise.perlinNoise1D(x)
f.write("%f\n" % (obs + offset + noise * scale))
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('database', '0005_field_changes'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='dataID',
field=models.CharField(blank=True, max_length=200),
),
]
|
from Content import *
from Display import *
from KeyProcessor import *
class TextEntry( DisplayGenerator ):
kBlock = '_' # Character to show at the next position
#
# Characters to cycle through for a given digit (eg. pressing '2' three
# times will leave a 'C' showing)
#
kDigits = [ '',
'1234567890',
'ABC2',
'DEF3',
'GHI4',
'JKL5',
'MNO6',
'PQRS7',
'TUV8',
'WXYZ9' ]
def __init__( self, client, prevLevel, title, minCount = 3,
acceptor = None ):
DisplayGenerator.__init__( self, client, prevLevel )
self.title = title
self.minCount = minCount
self.acceptor = acceptor
self.reset()
#
# Override of Display method. Enable all arrow keys
#
def fillKeyMap( self ):
DisplayGenerator.fillKeyMap( self )
self.addKeyMapEntry( kArrowUp, ( kModFirst, kModRepeat ), self.up )
self.addKeyMapEntry( kArrowDown, ( kModFirst, kModRepeat ), self.down )
self.addKeyMapEntry( kArrowRight, ( kModFirst, ), self.right )
self.addKeyMapEntry( kOK, (kModFirst, ), self.validateAndAccept )
#
# Reset the search parameters to an initial state
#
def reset( self ):
self.text = self.kBlock
self.lastDigit = None
self.digitIndex = 0
self.stack = None
#
# Generate the search screen
#
def generate( self ):
return Content( [ self.title, self.text ] )
def updateLastCharacter( self, value ):
self.text = self.text[ : -1 ] + value
#
# Show the next character in the current position.
#
def up( self ):
value = self.text[ -1 ]
if value == 'Z':
value = '0'
elif value == '9' or value == self.kBlock:
value = 'A'
else:
value = chr( ord( value ) + 1 )
self.updateLastCharacter( value )
return self
#
# Show the previous character in the current position.
#
def down( self ):
value = self.text[ -1 ]
if value == 'A':
value = '9'
elif value == '0' or value == self.kBlock:
value = 'Z'
else:
value = chr( ord( value ) - 1 )
self.updateLastCharacter( value )
return self
#
# Process digit keys in a way similar to some text message input systems,
# where pressing the same digit key cycles through the letters associated
# with the key.
#
def digit( self, digit ):
#
# Treat zero as a reset.
#
if digit == 0:
self.reset()
return self
values = self.kDigits[ digit ]
if digit == self.lastDigit:
index = self.digitIndex + 1
if index == len( values ):
index = 0
else:
index = 0
value = values[ index ]
self.digitIndex = index
self.lastDigit = digit
self.updateLastCharacter( value )
return self
#
# Add a character to the text. If done twice in a row, invoke submit()
#
def right( self ):
#
# If the last character is not a 'new' character, then save the current
# state digit processing state and add a 'new' character
#
if self.terminateText():
return self
return self.validateAndAccept()
def terminateText( self ):
#
# If the last character is not a 'new' character, then save the current
# state digit processing state and add a 'new' character. Unli
#
if self.text[ -1 ] != self.kBlock:
self.stack = ( self.lastDigit, self.digitIndex, self.stack )
self.text += self.kBlock
self.lastDigit = None
self.digitIndex = 0
return True
return False
def validateAndAccept( self ):
self.terminateText()
#
# Strip off the 'new' character and hand the text to the previous
# level's 'accept' method.
#
text = self.text[ : -1 ]
if not self.validate( text ):
return self
return self.accept( text )
def validate( self, text ):
return len( text ) >= self.minCount
def accept( self, text ):
if self.acceptor:
return self.acceptor( text )
raise NotImplementedError, 'accept'
#
# Override of DisplayGenerator method. Erase characters from the search
# display. If none left, show the previous level.
#
def left( self ):
if len( self.text ) == 1:
self.text = self.kBlock
return DisplayGenerator.left( self )
self.text = self.text[ : -1 ]
self.lastDigit, self.digitIndex, self.stack = self.stack
return self
|
"""Handle scheduling of polling jobs."""
import logging
import datetime
import time
from operator import itemgetter
from collections import defaultdict
from random import randint
from math import ceil
from twisted.python.failure import Failure
from twisted.internet import task, reactor
from twisted.internet.defer import Deferred
from twisted.internet.task import LoopingCall
from twisted.python.log import err
from django.utils.six import iteritems
from nav import ipdevpoll
from nav.ipdevpoll import db
from nav.ipdevpoll.snmp import SnmpError, AgentProxy
from nav.metrics.carbon import send_metrics
from nav.metrics.templates import metric_prefix_for_ipdevpoll_job
from nav.tableformat import SimpleTableFormatter
from nav.ipdevpoll.utils import log_unhandled_failure
from . import shadows, config, signals
from .dataloader import NetboxLoader
from .jobs import JobHandler, AbortedJobError, SuggestedReschedule
_logger = logging.getLogger(__name__)
class NetboxJobScheduler(object):
"""Netbox job schedule handler.
An instance of this class takes care of scheduling, running and
rescheduling of a single JobHandler for a single netbox.
"""
job_counters = {}
job_queues = {}
global_job_queue = []
global_intensity = config.ipdevpoll_conf.getint('ipdevpoll', 'max_concurrent_jobs')
_logger = ipdevpoll.ContextLogger()
def __init__(self, job, netbox, pool):
self.job = job
self.netbox = netbox
self.pool = pool
self._log_context = dict(job=job.name, sysname=netbox.sysname)
self._logger.debug(
"initializing %r job scheduling for %s", job.name, netbox.sysname
)
self.cancelled = False
self._deferred = Deferred()
self._next_call = None
self._last_job_started_at = 0
self.running = False
self._start_time = None
self._current_job = None
self.callLater = reactor.callLater
def get_current_runtime(self):
"""Returns time elapsed since the start of the job as a timedelta."""
return datetime.datetime.now() - self._start_time
def start(self):
"""Start polling schedule."""
self._next_call = self.callLater(0, self.run_job)
return self._deferred
def cancel(self):
"""Cancel scheduling of this job for this box.
Future runs will not be scheduled after this.
"""
if self.cancelled:
self._logger.debug(
"cancel: Job %r already cancelled for %s",
self.job.name,
self.netbox.sysname,
)
return
if self._next_call.active():
self._next_call.cancel()
self._logger.debug(
"cancel: Job %r cancelled for %s", self.job.name, self.netbox.sysname
)
else:
self._logger.debug(
"cancel: Job %r cancelled for %s, " "though no next run was scheduled",
self.job.name,
self.netbox.sysname,
)
self.cancelled = True
self.cancel_running_job()
self._deferred.callback(self)
def cancel_running_job(self):
if self._current_job:
self._logger.debug('Cancelling running job')
self.pool.cancel(self._current_job)
def run_job(self, dummy=None):
if self.is_running():
self._logger.info(
"Previous %r job is still running for %s, " "not running again now.",
self.job.name,
self.netbox.sysname,
)
return
if self.is_job_limit_reached():
self._logger.debug(
"intensity limit reached for %s - waiting to " "run for %s",
self.job.name,
self.netbox.sysname,
)
self.queue_myself(self.get_job_queue())
return
if self.is_global_limit_reached():
self._logger.debug(
"global intensity limit reached - waiting to " "run for %s",
self.netbox.sysname,
)
self.queue_myself(self.global_job_queue)
return
# We're ok to start a polling run.
try:
self._start_time = datetime.datetime.now()
deferred = self.pool.execute_job(
self.job.name,
self.netbox.id,
plugins=self.job.plugins,
interval=self.job.interval,
)
self._current_job = deferred
except Exception:
self._log_unhandled_error(Failure())
self.reschedule(60)
return
self.count_job()
self._last_job_started_at = time.time()
deferred.addErrback(self._adjust_intensity_on_snmperror)
deferred.addCallbacks(self._reschedule_on_success, self._reschedule_on_failure)
deferred.addErrback(self._log_unhandled_error)
deferred.addCallback(self._unregister_handler)
def is_running(self):
return self.running
@classmethod
def _adjust_intensity_on_snmperror(cls, failure):
if failure.check(AbortedJobError) and isinstance(
failure.value.cause, SnmpError
):
open_sessions = AgentProxy.count_open_sessions()
new_limit = int(ceil(open_sessions * 0.90))
if new_limit < cls.global_intensity:
cls._logger.warning("Setting global intensity limit to %d", new_limit)
cls.global_intensity = new_limit
return failure
def _update_counters(self, success):
prefix = metric_prefix_for_ipdevpoll_job(self.netbox.sysname, self.job.name)
counter_path = prefix + (".success-count" if success else ".failure-count")
_COUNTERS.increment(counter_path)
_COUNTERS.start()
def _reschedule_on_success(self, result):
"""Reschedules the next normal run of this job."""
delay = max(0, self.job.interval - self.get_runtime())
self.reschedule(delay)
if result:
self._log_finished_job(True)
else:
self._logger.debug("job did nothing")
self._update_counters(True if result else None)
return result
def _reschedule_on_failure(self, failure):
"""Examines the job failure and reschedules the job if needed."""
if failure.check(SuggestedReschedule):
delay = int(failure.value.delay)
else:
# within 5-10 minutes, but no longer than set interval
delay = min(self.job.interval, randint(5 * 60, 10 * 60))
self.reschedule(delay)
self._log_finished_job(False)
self._update_counters(False)
failure.trap(AbortedJobError)
def _log_finished_job(self, success=True):
status = "completed" if success else "failed"
runtime = datetime.timedelta(seconds=self.get_runtime())
next_time = self.get_time_to_next_run()
if next_time is not None:
if next_time <= 0:
delta = "right now"
else:
delta = "in %s" % datetime.timedelta(seconds=next_time)
self._logger.info(
"%s for %s %s in %s. next run %s.",
self.job.name,
self.netbox.sysname,
status,
runtime,
delta,
)
else:
self._logger.info("%s in %s. no next run scheduled", status, runtime)
def get_runtime(self):
"""Returns the number of seconds passed since the start of last job"""
return time.time() - self._last_job_started_at
def get_time_to_next_run(self):
"""Returns the number of seconds until the next job starts"""
if self._next_call.active():
return self._next_call.getTime() - time.time()
def reschedule(self, delay):
"""Reschedules the next run of of this job"""
if self.cancelled:
self._logger.debug("ignoring request to reschedule cancelled job")
return
next_time = datetime.datetime.now() + datetime.timedelta(seconds=delay)
self._logger.debug(
"Next %r job for %s will be in %d seconds (%s)",
self.job.name,
self.netbox.sysname,
delay,
next_time,
)
if self._next_call.active():
self._next_call.reset(delay)
else:
self._next_call = self.callLater(delay, self.run_job)
def _log_unhandled_error(self, failure):
if not failure.check(db.ResetDBConnectionError):
log_unhandled_failure(
self._logger, failure, "Unhandled exception raised by JobHandler"
)
def _unregister_handler(self, result):
"""Remove a JobHandler from internal data structures."""
if self.running:
self.uncount_job()
self.unqueue_next_job()
self.unqueue_next_global_job()
return result
def count_job(self):
current_count = self.__class__.job_counters.get(self.job.name, 0)
current_count += 1
self.__class__.job_counters[self.job.name] = current_count
self.running = True
def uncount_job(self):
current_count = self.__class__.job_counters.get(self.job.name, 0)
current_count -= 1
self.__class__.job_counters[self.job.name] = max(current_count, 0)
self.running = False
self._current_job = None
def get_job_count(self):
return self.__class__.job_counters.get(self.job.name, 0)
def is_job_limit_reached(self):
"Returns True if the number of jobs >= the job intensity limit"
return self.job.intensity > 0 and self.get_job_count() >= self.job.intensity
@classmethod
def is_global_limit_reached(cls):
"Returns True if the global number of jobs >= global intensity limit"
return cls.get_global_job_count() >= cls.global_intensity
@classmethod
def get_global_job_count(cls):
if cls.job_counters:
return sum(cls.job_counters.values())
else:
return 0
def queue_myself(self, queue):
queue.append(self)
def unqueue_next_job(self):
"Unqueues the next waiting job"
queue = self.get_job_queue()
if queue and not self.is_job_limit_reached():
handler = queue.pop(0)
return handler.start()
@classmethod
def unqueue_next_global_job(cls):
"Unqueues the next job waiting because of the global intensity setting"
if not cls.is_global_limit_reached():
for index, handler in enumerate(cls.global_job_queue):
if not handler.is_job_limit_reached():
del cls.global_job_queue[index]
return handler.start()
def get_job_queue(self):
if self.job.name not in self.job_queues:
self.job_queues[self.job.name] = []
return self.job_queues[self.job.name]
class JobScheduler(object):
active_schedulers = set()
job_logging_loop = None
netbox_reload_interval = 2 * 60.0 # seconds
netbox_reload_loop = None
_logger = ipdevpoll.ContextLogger()
def __init__(self, job, pool):
"""Initializes a job schedule from the job descriptor."""
self._log_context = dict(job=job.name)
self.job = job
self.pool = pool
self.netboxes = NetboxLoader()
self.active_netboxes = {}
self.active_schedulers.add(self)
@classmethod
def initialize_from_config_and_run(cls, pool, onlyjob=None):
descriptors = config.get_jobs()
schedulers = [
JobScheduler(d, pool)
for d in descriptors
if not onlyjob or (d.name == onlyjob)
]
for scheduler in schedulers:
scheduler.run()
def run(self):
"""Initiate scheduling of this job."""
signals.netbox_type_changed.connect(self.on_netbox_type_changed)
self._setup_active_job_logging()
self._start_netbox_reload_loop()
def _start_netbox_reload_loop(self):
if not self.netbox_reload_loop:
self.netbox_reload_loop = task.LoopingCall(self._reload_netboxes)
if self.netbox_reload_loop.running:
self.netbox_reload_loop.stop()
def die_on_unhandled_failure(failure):
err(failure, "Unhandled failure in data reload loop, stopping ipdevpoll")
if reactor.running:
reactor.callLater(0, reactor.stop)
deferred = self.netbox_reload_loop.start(
interval=self.netbox_reload_interval, now=True
)
deferred.addErrback(die_on_unhandled_failure)
def on_netbox_type_changed(self, netbox_id, new_type, **_kwargs):
"""Performs various cleanup and reload actions on a netbox type change
signal.
The netbox' data are cleaned up, and the next netbox data reload is
scheduled to take place immediately.
"""
sysname = (
netbox_id in self.netboxes
and self.netboxes[netbox_id].sysname
or str(netbox_id)
)
self._logger.info("Cancelling all jobs for %s due to type change.", sysname)
self.cancel_netbox_scheduler(netbox_id)
df = db.run_in_thread(
shadows.Netbox.cleanup_replaced_netbox, netbox_id, new_type
)
return df.addCallback(lambda x: self._start_netbox_reload_loop())
def _setup_active_job_logging(self):
if self.__class__.job_logging_loop is None:
loop = task.LoopingCall(self.__class__.log_active_jobs)
self.__class__.job_logging_loop = loop
loop.start(interval=5 * 60.0, now=False)
def _reload_netboxes(self):
"""Reload the set of netboxes to poll and update schedules."""
deferred = self.netboxes.load_all()
deferred.addCallbacks(
self._process_reloaded_netboxes, self._handle_reload_failures
)
db.django_debug_cleanup()
return deferred
def _process_reloaded_netboxes(self, result):
"""Process the result of a netbox reload and update schedules."""
(new_ids, removed_ids, changed_ids) = result
# Deschedule removed and changed boxes
for netbox_id in removed_ids.union(changed_ids):
self.cancel_netbox_scheduler(netbox_id)
# Schedule new and changed boxes
def _lastupdated(netboxid):
return self.netboxes[netboxid].last_updated.get(
self.job.name, datetime.datetime.min
)
new_and_changed = sorted(new_ids.union(changed_ids), key=_lastupdated)
for netbox_id in new_and_changed:
self.add_netbox_scheduler(netbox_id)
def _handle_reload_failures(self, failure):
failure.trap(db.ResetDBConnectionError)
self._logger.error(
"Reloading the IP device list failed because the "
"database connection was reset"
)
def add_netbox_scheduler(self, netbox_id):
netbox = self.netboxes[netbox_id]
scheduler = NetboxJobScheduler(self.job, netbox, self.pool)
self.active_netboxes[netbox_id] = scheduler
return scheduler.start()
def cancel_netbox_scheduler(self, netbox_id):
if netbox_id not in self.active_netboxes:
return
scheduler = self.active_netboxes[netbox_id]
scheduler.cancel()
del self.active_netboxes[netbox_id]
@classmethod
def reload(cls):
"""Reload netboxes for all jobs"""
for scheduler in cls.active_schedulers:
scheduler._reload_netboxes()
@classmethod
def log_active_jobs(cls, level=logging.DEBUG):
"""Debug logs a list of running job handlers.
The handlers will be sorted by descending runtime.
"""
jobs = [
(
netbox_scheduler.netbox.sysname,
netbox_scheduler.job.name,
netbox_scheduler.get_current_runtime(),
)
for scheduler in cls.active_schedulers
for netbox_scheduler in scheduler.active_netboxes.values()
if netbox_scheduler.is_running()
]
jobs.sort(key=itemgetter(2), reverse=True)
table_formatter = SimpleTableFormatter(jobs)
_logger = logging.getLogger("%s.joblist" % __name__)
if jobs:
_logger.log(
level, "currently active jobs (%d):\n%s", len(jobs), table_formatter
)
else:
_logger.log(
level,
"no active jobs (%d JobHandlers)",
JobHandler.get_instance_count(),
)
class CounterFlusher(defaultdict):
"""
A dictionary of counters that can be incremented and be flushed as
Graphite metrics at specific intervals.
"""
def __init__(self, interval=60):
"""
Initialize a dictionary of counters.
:param interval: How often (in seconds) to flush the counters to
a Carbon backend.
"""
super(CounterFlusher, self).__init__(int)
self.loop = LoopingCall(self.flush)
self.interval = interval
def start(self):
"""Starts the counter flushing task if it isn't running already"""
if not self.loop.running:
self.loop.start(self.interval, now=False)
def increment(self, name):
"""Increments a named counter by one"""
self[name] += 1
def flush(self):
"""
Flushes all the counters to the Carbon backend and resets them to zero
"""
if not self:
_logger.debug("no counters to flush yet")
_logger.debug("flushing %d counters to graphite", len(self))
metrics = []
timestamp = time.time()
for counter, count in iteritems(self):
metrics.append((counter, (timestamp, count)))
self[counter] = 0
send_metrics(metrics)
_COUNTERS = CounterFlusher()
|
"""
A platform independent file lock that supports the with-statement.
"""
import time
import atexit
import os
import threading
try:
import warnings
except ImportError:
warnings = None
try:
import msvcrt
except ImportError:
msvcrt = None
try:
import fcntl
except ImportError:
fcntl = None
try:
TimeoutError
except NameError:
TimeoutError = OSError
__all__ = ["Timeout", "FileLock"]
__version__ = "1.0.3"
class Timeout(TimeoutError):
"""
Raised when the lock could not be acquired in *timeout*
seconds.
"""
def __init__(self, lock_file):
self.lock_file = lock_file
return None
def __str__(self):
temp = "The file lock '{}' could not be acquired."\
.format(self.lock_file)
return temp
class BaseFileLock(object):
"""
Implements the base class of a file lock.
The file lock counts how often your acquired the filelock and will
release it only if *release* has been called as often as *acquire*.
Usage:
.. code-block:: python
with BaseFileLock("afile"):
pass
or if you need to specify a timeout:
.. code-block:: python
with BaseFileLock("afile").acquire(5):
pass
The lock counter works like this:
.. code-block:: python
lock = BaseFileLock("afile")
with lock:
with lock:
pass
assert lock.is_locked()
"""
def __init__(self, lock_file):
self._lock_file = lock_file
self._lock_file_fd = None
# We use this lock primarily for the lock counter.
self._thread_lock = threading.Lock()
self._lock_counter = 0
atexit.register(self.release)
return None
lock_file = property(lambda self: self._lock_file)
# Platform dependent locking
# --------------------------------------------
def _acquire(self):
"""
Platform dependent. If the file lock could be
acquired, self._lock_file_fd holds the file descriptor
of the lock file.
"""
raise NotImplementedError()
def _release(self):
"""
Releases the lock and sets self._lock_file_fd to None.
"""
raise NotImplementedError()
# Platform independent methods
# --------------------------------------------
def is_locked(self):
"""
Returns true, if the object holds the file lock.
"""
return self._lock_file_fd is not None
def acquire(self, timeout=None, poll_intervall=0.05):
"""
Tries every *poll_intervall* seconds to acquire the lock.
If the lock could not be acquired after *timeout* seconds,
a Timeout exception will be raised.
If *timeout* is ``None``, there's no time limit.
"""
# Increment the number right at the beginning.
# We can still undo it, if something fails.
with self._thread_lock:
self._lock_counter += 1
try:
start_time = time.time()
while True:
with self._thread_lock:
if not self.is_locked():
self._acquire()
if self.is_locked():
break
elif (timeout is not None and
time.time() - start_time > timeout):
raise Timeout(self._lock_file)
else:
time.sleep(poll_intervall)
except:
# Something did go wrong, so decrement the counter.
with self._thread_lock:
self._lock_counter = max(0, self._lock_counter - 1)
raise
return self
def release(self, force=False):
"""
Releases the file lock.
:arg bool force:
If true, the lock counter is ignored and the lock is released in
every case.
"""
with self._thread_lock:
if self.is_locked():
self._lock_counter -= 1
if self._lock_counter == 0 or force:
self._release()
self._lock_counter = 0
return None
def __enter__(self):
self.acquire()
return self
def __exit__(self, exc_type, exc_value, traceback):
self.release()
return None
def __del__(self):
self.release()
return None
if msvcrt:
class FileLock(BaseFileLock):
def _acquire(self):
open_mode = os.O_RDWR | os.O_CREAT # | os.O_TRUNC
try:
fd = os.open(self._lock_file, open_mode)
except OSError:
pass
else:
try:
msvcrt.locking(fd, msvcrt.LK_NBLCK, 1)
except OSError:
os.close(fd)
else:
self._lock_file_fd = fd
return None
def _release(self):
msvcrt.locking(self._lock_file_fd, msvcrt.LK_UNLCK, 1)
os.close(self._lock_file_fd)
self._lock_file_fd = None
# try:
# os.remove(self._lock_file)
# # Probably another instance of the application
# # that acquired the file lock.
# except OSError:
# pass
return None
elif fcntl:
class FileLock(BaseFileLock):
def _acquire(self):
open_mode = os.O_RDWR | os.O_CREAT # | os.O_TRUNC
fd = os.open(self._lock_file, open_mode)
try:
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
except (IOError, OSError):
os.close(fd)
else:
self._lock_file_fd = fd
return None
def _release(self):
fcntl.flock(self._lock_file_fd, fcntl.LOCK_UN)
os.close(self._lock_file_fd)
self._lock_file_fd = None
# try:
# os.remove(self._lock_file)
# # Probably another instance of the application
# # that acquired the file lock.
# except OSError:
# pass
# return None
else:
class FileLock(BaseFileLock):
def _acquire(self):
open_mode = os.O_WRONLY | os.O_CREAT | os.O_EXCL | os.O_TRUNC
try:
fd = os.open(self._lock_file, open_mode)
except (IOError, OSError):
pass
else:
self._lock_file_fd = fd
return None
def _release(self):
os.close(self._lock_file_fd)
self._lock_file_fd = None
# try:
# os.remove(self._lock_file)
# # The file is already deleted and that's what we want.
# except OSError:
# pass
return None
if warnings is not None:
warnings.warn("only soft file lock is available")
|
import xbmcaddon
import os
try:
addonPath = xbmcaddon.Addon(id = 'plugin.video.hubmaintenance').getAddonInfo('path')
name = 'Maintenance'
script = os.path.join(addonPath, 'maintenance.py')
version = 1
args = str(version)
cmd = 'AlarmClock(%s,RunScript(%s,%s),%d,True)' % (name.encode('utf-8', 'replace'), script.encode('utf-8', 'replace'), args.encode('utf-8', 'replace'), 0)
xbmc.executebuiltin(cmd)
except Exception:
pass
|
import os
import ray
import ray.tune as tune
import torch
from nupic.research.frameworks.dynamic_sparse.common.loggers import DEFAULT_LOGGERS
from nupic.research.frameworks.dynamic_sparse.common.utils import (
Trainable,
download_dataset,
)
exp_config = dict(
device="cuda",
# dataset related
dataset_name="CIFAR10",
input_size=(3, 32, 32),
num_classes=10,
stats_mean=(0.4914, 0.4822, 0.4465),
stats_std=(0.2023, 0.1994, 0.2010),
data_dir="~/nta/datasets",
augment_images=tune.grid_search([True, False]),
# model related
model=tune.grid_search(["DynamicRep", "SparseModel", "BaseModel"]),
# model="SparseModel",
network="Wide_ResNet",
dropout_rate=0,
depth=28,
widen_factor=2,
# optimizer related
optim_alg="SGD",
momentum=0.9,
learning_rate=0.1,
weight_decay=5e-4,
lr_scheduler="MultiStepLR",
lr_milestones=[60, 120, 160], # 2e-2, 4e-3, 8-e4
lr_gamma=0.20,
# sparse related
on_perc=0.2,
zeta=0.2,
start_sparse=1,
end_sparse=None,
# debugging
debug_weights=True,
debug_sparse=True,
)
tune_config = dict(
name="wideresnet-test",
num_samples=1,
local_dir=os.path.expanduser("~/nta/results"),
checkpoint_freq=0,
checkpoint_at_end=False,
stop={"training_iteration": 200}, # 300 in cifar
resources_per_trial={"cpu": 1, "gpu": 1},
loggers=DEFAULT_LOGGERS,
verbose=1,
config=exp_config,
)
if not torch.cuda.is_available():
exp_config["device"] = "cpu"
tune_config["resources_per_trial"] = {"cpu": 1}
download_dataset(exp_config)
ray.init()
tune.run(Trainable, **tune_config)
|
"""Tests for utility functions used in CSV imports.
Copyright (C) 2014 A. Samuel Pottinger ("Sam Pottinger", gleap.org)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import unittest
import prog_code.util.constants as constants
import prog_code.util.legacy_csv_import_util as legacy_csv_import_util
import prog_code.util.math_util as math_util
class FakePercentileTable:
def __init__(self, details):
self.details = details
class LegacyUploadParserAutomatonTests(unittest.TestCase):
def setUp(self):
self.__test_automaton = legacy_csv_import_util.UploadParserAutomaton({
constants.MALE: FakePercentileTable([-1]),
constants.FEMALE: FakePercentileTable([-2]),
constants.OTHER_GENDER: FakePercentileTable([-3])
})
def test_enter_error_state(self):
self.__test_automaton.enter_error_state('test error')
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_FOUND_ERROR
)
self.assertEqual(
self.__test_automaton.get_error(),
'test error'
)
def test_sanity_check_pass(self):
self.assertTrue(self.__test_automaton.sanity_check(['', 'test'],
'test', 1))
self.assertNotEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_FOUND_ERROR
)
self.assertNotEqual(
self.__test_automaton.get_error(),
'Expected \"test\" on row 1 but found \"test\".'
)
def test_sanity_check_fail(self):
self.assertFalse(self.__test_automaton.sanity_check(['', 'test'],
'other', 1))
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_FOUND_ERROR
)
self.assertEqual(
self.__test_automaton.get_error(),
'Expected \"other\" on row 1 but found \"test\".'
)
def test_safe_parse_float_pass(self):
self.assertTrue(self.__test_automaton.safe_parse_float('5.5', 1))
def test_safe_parse_float_fail(self):
self.assertFalse(self.__test_automaton.safe_parse_float('a', 1), 1)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_FOUND_ERROR
)
self.assertEqual(
self.__test_automaton.get_error(),
'Was expecting a number but found \"a\" on row 1.'
)
def test_safe_parse_int_pass(self):
self.assertTrue(self.__test_automaton.safe_parse_int('5', 1))
def test_safe_parse_int_fail(self):
self.assertFalse(self.__test_automaton.safe_parse_int('a', 1), 1)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_FOUND_ERROR
)
self.assertEqual(
self.__test_automaton.get_error(),
'Was expecting an integer but found \"a\" on row 1.'
)
def test_safe_parse_date_pass(self):
self.assertEqual(
self.__test_automaton.safe_parse_date('01/2/2013', 1),
'2013/1/2'
)
self.assertEqual(
self.__test_automaton.safe_parse_date('10/2/2013', 1),
'2013/10/2'
)
def test_safe_parse_date_fail(self):
self.assertFalse(self.__test_automaton.safe_parse_date('a', 1), 1)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_FOUND_ERROR
)
self.assertEqual(
self.__test_automaton.get_error(),
'Was expecting a date but found \"a\" on row 1.'
)
def test_safe_parse_date_malformed(self):
self.assertFalse(
self.__test_automaton.safe_parse_date('a/11/2012', 1),
1
)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_FOUND_ERROR
)
self.assertEqual(
self.__test_automaton.get_error(),
'Found a date (a/11/2012) but was expecting form MM/DD/YYYY on '\
'row 1.'
)
def test_safe_parse_date_invalid(self):
self.assertFalse(
self.__test_automaton.safe_parse_date('2012/11/10', 1),
1
)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_FOUND_ERROR
)
self.assertEqual(
self.__test_automaton.get_error(),
'Found a date (2012/11/10) but was expecting form MM/DD/YYYY on '\
'row 1.'
)
def test_step(self):
self.assertEqual(len(self.__test_automaton.get_prototypes()), 0)
self.__test_automaton.step(
['', 'Child\'s ID (from database)', '123', '456', '789'],
1
)
self.assertEqual(len(self.__test_automaton.get_prototypes()), 3)
def test_parse_child_db_id(self):
self.__test_automaton.set_state(legacy_csv_import_util.STATE_PARSE_CHILD_DB_ID)
self.__test_automaton.parse_child_db_id(
['', 'Child\'s ID (from database)', '123', '456', '789'],
1
)
prototypes = self.__test_automaton.get_prototypes()
self.assertEqual(len(prototypes), 3)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_PARSE_CHILD_STUDY_ID
)
self.assertEqual(prototypes[0]['child_id'], 123)
self.assertEqual(prototypes[1]['child_id'], 456)
self.assertEqual(prototypes[2]['child_id'], 789)
def test_parse_child_db_id_invalid(self):
self.__test_automaton.set_state(legacy_csv_import_util.STATE_PARSE_CHILD_DB_ID)
self.__test_automaton.parse_child_db_id(
['', 'Child\'s ID (from database)', '123a', '456', '789'],
1
)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_FOUND_ERROR
)
def test_parse_child_db_id_wrong_label(self):
self.__test_automaton.set_state(legacy_csv_import_util.STATE_PARSE_CHILD_DB_ID)
self.__test_automaton.parse_child_db_id(
['', 'Child\'s ID (from daabase)', '123', '456', '789'],
1
)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_FOUND_ERROR
)
def test_parse_child_study_id(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(
legacy_csv_import_util.STATE_PARSE_CHILD_STUDY_ID
)
self.__test_automaton.parse_child_study_id(
['', 'Name / Number', '02', '46', '80'],
1
)
prototypes = self.__test_automaton.get_prototypes()
self.assertEqual(len(prototypes), 3)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_PARSE_STUDY_AND_SOURCE
)
self.assertEqual(prototypes[0]['study_id'], '02')
self.assertEqual(prototypes[1]['study_id'], '46')
self.assertEqual(prototypes[2]['study_id'], '80')
def test_parse_child_study_id_wrong_label(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(
legacy_csv_import_util.STATE_PARSE_CHILD_STUDY_ID
)
self.__test_automaton.parse_child_study_id(
['', 'Name Number', '02', '46', '80'],
1
)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_FOUND_ERROR
)
def test_parse_study_and_source(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(
legacy_csv_import_util.STATE_PARSE_STUDY_AND_SOURCE
)
self.__test_automaton.parse_study_and_source(
['', 'Study / Source', 'Study1', 'Study2', 'Study3'],
1
)
prototypes = self.__test_automaton.get_prototypes()
self.assertEqual(len(prototypes), 3)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_PARSE_GENDER
)
self.assertEqual(prototypes[0]['study'], 'Study1')
self.assertEqual(prototypes[1]['study'], 'Study2')
self.assertEqual(prototypes[2]['study'], 'Study3')
def test_parse_study_and_source_wrong_label(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(
legacy_csv_import_util.STATE_PARSE_STUDY_AND_SOURCE
)
self.__test_automaton.parse_study_and_source(
['', 'Study Source', 'Study1', 'Study2', 'Study3'],
1
)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_FOUND_ERROR
)
def test_parse_gender_pass(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(
legacy_csv_import_util.STATE_PARSE_GENDER
)
self.__test_automaton.parse_gender(
['', 'Gender', 'M', 'F', 'O'],
1
)
prototypes = self.__test_automaton.get_prototypes()
self.assertEqual(len(prototypes), 3)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_PARSE_AGE
)
self.assertEqual(prototypes[0]['gender'], constants.MALE)
self.assertEqual(prototypes[1]['gender'], constants.FEMALE)
self.assertEqual(prototypes[2]['gender'], constants.OTHER_GENDER)
def test_parse_gender_invalid(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(
legacy_csv_import_util.STATE_PARSE_GENDER
)
self.__test_automaton.parse_gender(
['', 'Gender', '5', 'F', 'O'],
1
)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_FOUND_ERROR
)
def test_parse_gender_wrong_label(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(
legacy_csv_import_util.STATE_PARSE_GENDER
)
self.__test_automaton.parse_gender(
['', 'Genders', 'M', 'F', 'O'],
1
)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_FOUND_ERROR
)
def test_parse_age(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(legacy_csv_import_util.STATE_PARSE_AGE)
self.__test_automaton.parse_age(
['', 'Age (months)', '123.1', '456.2', '789.3'],
1
)
prototypes = self.__test_automaton.get_prototypes()
self.assertEqual(len(prototypes), 3)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_PARSE_DATE_OF_BIRTH
)
self.assertEqual(prototypes[0]['age'], 123.1)
self.assertEqual(prototypes[1]['age'], 456.2)
self.assertEqual(prototypes[2]['age'], 789.3)
def test_parse_age_invalid(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(legacy_csv_import_util.STATE_PARSE_AGE)
self.__test_automaton.parse_age(
['', 'Age (months)', 'a 123.1', '456.2', '789.3'],
1
)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_FOUND_ERROR
)
def test_parse_age_wrong_label(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(legacy_csv_import_util.STATE_PARSE_AGE)
self.__test_automaton.parse_age(
['', 'Age', '123.1', '456.2', '789.3'],
1
)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_FOUND_ERROR
)
def test_parse_date_of_birth(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(
legacy_csv_import_util.STATE_PARSE_DATE_OF_BIRTH
)
self.__test_automaton.parse_date_of_birth(
['', 'Date of Birth', '01/02/2013', '02/03/2013', '04/05/2013'],
1
)
prototypes = self.__test_automaton.get_prototypes()
self.assertEqual(len(prototypes), 3)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_PARSE_DATE_OF_SESSION
)
self.assertEqual(prototypes[0]['birthday'], '2013/1/2')
self.assertEqual(prototypes[1]['birthday'], '2013/2/3')
self.assertEqual(prototypes[2]['birthday'], '2013/4/5')
def test_parse_date_of_birth_invalid(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(
legacy_csv_import_util.STATE_PARSE_DATE_OF_BIRTH
)
self.__test_automaton.parse_date_of_birth(
['', 'Date of Birth', '05/01/02', '2013/02/03', '2013/04/05'],
1
)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_FOUND_ERROR
)
def test_parse_date_of_birth_wrong_label(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(
legacy_csv_import_util.STATE_PARSE_DATE_OF_BIRTH
)
self.__test_automaton.parse_date_of_birth(
['', 'Dae of Birth', '2013/01/02', '2013/02/03', '2013/04/05'],
1
)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_FOUND_ERROR
)
def test_parse_date_of_session(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(
legacy_csv_import_util.STATE_PARSE_DATE_OF_SESSION
)
self.__test_automaton.parse_date_of_session(
['', 'Date of Session', '01/02/2013', '02/03/2013', '04/05/2013'],
1
)
prototypes = self.__test_automaton.get_prototypes()
self.assertEqual(len(prototypes), 3)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_PARSE_SESSION_NUM
)
self.assertEqual(prototypes[0]['session_date'], '2013/1/2')
self.assertEqual(prototypes[1]['session_date'], '2013/2/3')
self.assertEqual(prototypes[2]['session_date'], '2013/4/5')
def test_parse_date_of_session_wrong_label(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(
legacy_csv_import_util.STATE_PARSE_DATE_OF_SESSION
)
self.__test_automaton.parse_date_of_session(
['', 'Date of Sessin', '2013/01/02', '2013/02/03', '2013/04/05'],
1
)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_FOUND_ERROR
)
def test_parse_date_of_session_invalid(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(
legacy_csv_import_util.STATE_PARSE_DATE_OF_SESSION
)
self.__test_automaton.parse_date_of_session(
['', 'Date of Session', '2013/01/02a', '2013/02/03', '2013/04/05'],
1
)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_FOUND_ERROR
)
def test_parse_session_num(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(legacy_csv_import_util.STATE_PARSE_SESSION_NUM)
self.__test_automaton.parse_session_num(
['', 'Session #', '123', '456', '789'],
1
)
prototypes = self.__test_automaton.get_prototypes()
self.assertEqual(len(prototypes), 3)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_PARSE_TOTAL_SESSION_NUM
)
self.assertEqual(prototypes[0]['session_num'], 123)
self.assertEqual(prototypes[1]['session_num'], 456)
self.assertEqual(prototypes[2]['session_num'], 789)
def test_parse_total_session_num(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(
legacy_csv_import_util.STATE_PARSE_TOTAL_SESSION_NUM)
self.__test_automaton.parse_total_session_num(
['', 'Total # of Sessions', '123', '456', '789'],
1
)
prototypes = self.__test_automaton.get_prototypes()
self.assertEqual(len(prototypes), 3)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_PARSE_WORDS_SPOKEN
)
self.assertEqual(prototypes[0]['total_num_sessions'], 123)
self.assertEqual(prototypes[1]['total_num_sessions'], 456)
self.assertEqual(prototypes[2]['total_num_sessions'], 789)
def test_parse_words_spoken(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(
legacy_csv_import_util.STATE_PARSE_WORDS_SPOKEN)
self.__test_automaton.parse_words_spoken(
['', 'Words Spoken', '123', '456', '789'],
1
)
prototypes = self.__test_automaton.get_prototypes()
self.assertEqual(len(prototypes), 3)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_PARSE_ITEMS_EXCLUDED
)
self.assertEqual(prototypes[0]['words_spoken'], 123)
self.assertEqual(prototypes[1]['words_spoken'], 456)
self.assertEqual(prototypes[2]['words_spoken'], 789)
def test_parse_items_excluded(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(
legacy_csv_import_util.STATE_PARSE_ITEMS_EXCLUDED)
self.__test_automaton.parse_items_excluded(
['', 'Items Excluded', '123', '456', '789'],
1
)
prototypes = self.__test_automaton.get_prototypes()
self.assertEqual(len(prototypes), 3)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_PARSE_PERCENTILE
)
self.assertEqual(prototypes[0]['items_excluded'], 123)
self.assertEqual(prototypes[1]['items_excluded'], 456)
self.assertEqual(prototypes[2]['items_excluded'], 789)
def test_parse_percentile(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(legacy_csv_import_util.STATE_PARSE_PERCENTILE)
self.__test_automaton.parse_percentile(
['', 'Percentile', '80.1', '90.2', 'calculate'],
1
)
prototypes = self.__test_automaton.get_prototypes()
self.assertEqual(len(prototypes), 3)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_PARSE_EXTRA_CATEGORIES
)
needing_precentile = self.__test_automaton.get_list_needing_precentile()
self.assertEqual(needing_precentile, [2])
self.assertEqual(prototypes[0]['percentile'], 80.1)
self.assertEqual(prototypes[1]['percentile'], 90.2)
self.assertEqual(prototypes[2]['percentile'], -1)
def test_parse_non_standard_percentile(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(legacy_csv_import_util.STATE_PARSE_PERCENTILE)
self.__test_automaton.parse_percentile(
['', 'Percentile', '80.1', '90.2', ''],
1
)
prototypes = self.__test_automaton.get_prototypes()
self.assertEqual(len(prototypes), 3)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_FOUND_ERROR
)
def test_parse_percentile_invalid(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(legacy_csv_import_util.STATE_PARSE_PERCENTILE)
self.__test_automaton.parse_percentile(
['', 'Percentile', '100.1', '90.2', '95.3'],
1
)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_FOUND_ERROR
)
def test_parse_extra_categories(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(
legacy_csv_import_util.STATE_PARSE_EXTRA_CATEGORIES)
self.__test_automaton.parse_extra_categories(
['', 'Extra Categories?', 'Y', 'N', 'Y'],
1
)
prototypes = self.__test_automaton.get_prototypes()
self.assertEqual(len(prototypes), 3)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_PARSE_SECTION_WORD_HEADING
)
self.assertEqual(prototypes[0]['extra_categories'],
constants.EXPLICIT_TRUE)
self.assertEqual(prototypes[1]['extra_categories'],
constants.EXPLICIT_FALSE)
self.assertEqual(prototypes[2]['extra_categories'],
constants.EXPLICIT_TRUE)
def test_parse_extra_categories_invalid(self):
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.set_state(
legacy_csv_import_util.STATE_PARSE_EXTRA_CATEGORIES)
self.__test_automaton.parse_extra_categories(
['', 'Extra Categories?', '1', 'N', 'Y'],
1
)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_FOUND_ERROR
)
def test_parse_section_word_heading(self):
self.__test_automaton.set_state(
legacy_csv_import_util.STATE_PARSE_SECTION_WORD_HEADING)
self.__test_automaton.set_prototypes([{}, {}, {}])
self.__test_automaton.parse_section_word_heading(
['Section', 'Word'],
1
)
prototypes = self.__test_automaton.get_prototypes()
self.assertEqual(len(prototypes[0]['words']), 0)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_PARSE_WORDS
)
def test_parse_words(self):
self.__test_automaton.set_state(legacy_csv_import_util.STATE_PARSE_WORDS)
self.__test_automaton.set_prototypes([
{'words': []}, {'words': []}, {'words': []}
])
self.__test_automaton.parse_words(['1', 'test', 'y', 'n', 'na'], 1)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_PARSE_WORDS
)
prototypes = self.__test_automaton.get_prototypes()
self.assertEqual(len(prototypes[0]['words']), 1)
self.assertEqual(prototypes[0]['words'][0]['word'], 'test')
self.assertEqual(prototypes[0]['words'][0]['val'],
constants.EXPLICIT_TRUE)
self.assertEqual(prototypes[1]['words'][0]['word'], 'test')
self.assertEqual(prototypes[1]['words'][0]['val'],
constants.EXPLICIT_FALSE)
self.assertEqual(prototypes[2]['words'][0]['word'], 'test')
self.assertEqual(prototypes[2]['words'][0]['val'],
constants.NO_DATA)
self.__test_automaton.parse_words(['1', 'again', 'n', '2', 'n'], 2)
self.assertEqual(prototypes[0]['words'][1]['word'], 'again')
self.assertEqual(prototypes[0]['words'][1]['val'],
constants.EXPLICIT_FALSE)
self.assertEqual(prototypes[1]['words'][1]['word'], 'again')
self.assertEqual(prototypes[1]['words'][1]['val'],
2)
self.assertEqual(prototypes[2]['words'][1]['word'], 'again')
self.assertEqual(prototypes[2]['words'][1]['val'],
constants.EXPLICIT_FALSE)
self.__test_automaton.parse_words(['1', 'again', 'n', '', 'n'], 3)
self.assertEqual(
self.__test_automaton.get_state(),
legacy_csv_import_util.STATE_FOUND_ERROR
)
def test_finish_without_error(self):
with unittest.mock.patch('prog_code.util.legacy_csv_import_util.math_util') as mock:
mock.find_percentile = unittest.mock.MagicMock(
return_value = 50
)
self.__test_automaton.set_prototypes([
{'words': []}, {'words': []}, {'words': []}
])
self.__test_automaton.parse_percentile(
['', 'Percentile', '80.1', '90.2', 'calculate'],
1
)
self.__test_automaton.parse_age(
['', 'Age (months)', '123.1', '456.2', '789.3'],
1
)
self.__test_automaton.parse_gender(
['', 'Gender', 'M', 'F', 'O'],
1
)
self.__test_automaton.parse_words(['1', 'test', '1', '0', 'na'], 1)
self.__test_automaton.parse_words(['1', 'test', '1', '0', '0'], 2)
self.__test_automaton.parse_words(['1', 'test', '1', '0', '1'], 3)
self.__test_automaton.parse_words(['1', 'test', '1', '0', '1'], 4)
target_prototype = self.__test_automaton.get_prototypes()[2]
self.assertEqual(target_prototype['percentile'], -1)
self.__test_automaton.finish()
mock.find_percentile.assert_called_with([-3], 2, 789.3, 3)
target_prototype = self.__test_automaton.get_prototypes()[2]
self.assertEqual(target_prototype['percentile'], 50)
def test_finish_with_error(self):
self.__test_automaton.set_state(legacy_csv_import_util.STATE_FOUND_ERROR)
self.__test_automaton.finish()
if __name__ == '__main__':
unittest.main()
|
from keystoneauth1 import exceptions
from keystoneauth1 import identity
from keystoneauth1 import loading
def _add_common_identity_options(options):
options.extend([
loading.Opt('user-id', help='User ID'),
loading.Opt('username',
help='Username',
deprecated=[loading.Opt('user-name')]),
loading.Opt('user-domain-id', help="User's domain id"),
loading.Opt('user-domain-name', help="User's domain name"),
])
def _assert_identity_options(options):
if (options.get('username') and
not (options.get('user_domain_name') or
options.get('user_domain_id'))):
m = "You have provided a username. In the V3 identity API a " \
"username is only unique within a domain so you must " \
"also provide either a user_domain_id or user_domain_name."
raise exceptions.OptionError(m)
class Password(loading.BaseV3Loader):
@property
def plugin_class(self):
return identity.V3Password
def get_options(self):
options = super(Password, self).get_options()
_add_common_identity_options(options)
options.extend([
loading.Opt('password',
secret=True,
prompt='Password: ',
help="User's password"),
])
return options
def load_from_options(self, **kwargs):
_assert_identity_options(kwargs)
return super(Password, self).load_from_options(**kwargs)
class Token(loading.BaseV3Loader):
@property
def plugin_class(self):
return identity.V3Token
def get_options(self):
options = super(Token, self).get_options()
options.extend([
loading.Opt('token',
secret=True,
help='Token to authenticate with'),
])
return options
class _OpenIDConnectBase(loading.BaseFederationLoader):
def load_from_options(self, **kwargs):
if not (kwargs.get('access_token_endpoint') or
kwargs.get('discovery_endpoint')):
m = ("You have to specify either an 'access-token-endpoint' or "
"a 'discovery-endpoint'.")
raise exceptions.OptionError(m)
return super(_OpenIDConnectBase, self).load_from_options(**kwargs)
def get_options(self):
options = super(_OpenIDConnectBase, self).get_options()
options.extend([
loading.Opt('client-id', help='OAuth 2.0 Client ID'),
loading.Opt('client-secret', secret=True,
help='OAuth 2.0 Client Secret'),
loading.Opt('openid-scope', default="openid profile",
dest="scope",
help='OpenID Connect scope that is requested from '
'authorization server. Note that the OpenID '
'Connect specification states that "openid" '
'must be always specified.'),
loading.Opt('access-token-endpoint',
help='OpenID Connect Provider Token Endpoint. Note '
'that if a discovery document is being passed this '
'option will override the endpoint provided by the '
'server in the discovery document.'),
loading.Opt('discovery-endpoint',
help='OpenID Connect Discovery Document URL. '
'The discovery document will be used to obtain the '
'values of the access token endpoint and the '
'authentication endpoint. This URL should look like '
'https://idp.example.org/.well-known/'
'openid-configuration'),
loading.Opt('access-token-type',
help='OAuth 2.0 Authorization Server Introspection '
'token type, it is used to decide which type '
'of token will be used when processing token '
'introspection. Valid values are: '
'"access_token" or "id_token"'),
])
return options
class OpenIDConnectClientCredentials(_OpenIDConnectBase):
@property
def plugin_class(self):
return identity.V3OidcClientCredentials
def get_options(self):
options = super(OpenIDConnectClientCredentials, self).get_options()
return options
class OpenIDConnectPassword(_OpenIDConnectBase):
@property
def plugin_class(self):
return identity.V3OidcPassword
def get_options(self):
options = super(OpenIDConnectPassword, self).get_options()
options.extend([
loading.Opt('username', help='Username', required=True),
loading.Opt('password', secret=True,
help='Password', required=True),
])
return options
class OpenIDConnectAuthorizationCode(_OpenIDConnectBase):
@property
def plugin_class(self):
return identity.V3OidcAuthorizationCode
def get_options(self):
options = super(OpenIDConnectAuthorizationCode, self).get_options()
options.extend([
loading.Opt('redirect-uri', help='OpenID Connect Redirect URL'),
loading.Opt('code', secret=True, required=True,
deprecated=[loading.Opt('authorization-code')],
help='OAuth 2.0 Authorization Code'),
])
return options
class OpenIDConnectAccessToken(loading.BaseFederationLoader):
@property
def plugin_class(self):
return identity.V3OidcAccessToken
def get_options(self):
options = super(OpenIDConnectAccessToken, self).get_options()
options.extend([
loading.Opt('access-token', secret=True, required=True,
help='OAuth 2.0 Access Token'),
])
return options
class TOTP(loading.BaseV3Loader):
@property
def plugin_class(self):
return identity.V3TOTP
def get_options(self):
options = super(TOTP, self).get_options()
_add_common_identity_options(options)
options.extend([
loading.Opt('passcode', secret=True, help="User's TOTP passcode"),
])
return options
def load_from_options(self, **kwargs):
_assert_identity_options(kwargs)
return super(TOTP, self).load_from_options(**kwargs)
class TokenlessAuth(loading.BaseLoader):
@property
def plugin_class(self):
return identity.V3TokenlessAuth
def get_options(self):
options = super(TokenlessAuth, self).get_options()
options.extend([
loading.Opt('auth-url', required=True,
help='Authentication URL'),
loading.Opt('domain-id', help='Domain ID to scope to'),
loading.Opt('domain-name', help='Domain name to scope to'),
loading.Opt('project-id', help='Project ID to scope to'),
loading.Opt('project-name', help='Project name to scope to'),
loading.Opt('project-domain-id',
help='Domain ID containing project'),
loading.Opt('project-domain-name',
help='Domain name containing project'),
])
return options
def load_from_options(self, **kwargs):
if (not kwargs.get('domain_id') and
not kwargs.get('domain_name') and
not kwargs.get('project_id') and
not kwargs.get('project_name') or
(kwargs.get('project_name') and
not (kwargs.get('project_domain_name') or
kwargs.get('project_domain_id')))):
m = ('You need to provide either a domain_name, domain_id, '
'project_id or project_name. '
'If you have provided a project_name, in the V3 identity '
'API a project_name is only unique within a domain so '
'you must also provide either a project_domain_id or '
'project_domain_name.')
raise exceptions.OptionError(m)
return super(TokenlessAuth, self).load_from_options(**kwargs)
class ApplicationCredential(loading.BaseV3Loader):
@property
def plugin_class(self):
return identity.V3ApplicationCredential
def get_options(self):
options = super(ApplicationCredential, self).get_options()
_add_common_identity_options(options)
options.extend([
loading.Opt('application_credential_secret', secret=True,
required=True,
help="Application credential auth secret"),
]),
options.extend([
loading.Opt('application_credential_id',
help='Application credential ID'),
]),
options.extend([
loading.Opt('application_credential_name',
help='Application credential name'),
])
return options
def load_from_options(self, **kwargs):
_assert_identity_options(kwargs)
if (not kwargs.get('application_credential_id') and
not kwargs.get('application_credential_name')):
m = ('You must provide either an application credential ID or an '
'application credential name and user.')
raise exceptions.OptionError(m)
if not kwargs.get('application_credential_secret'):
m = ('You must provide an auth secret.')
raise exceptions.OptionError(m)
return super(ApplicationCredential, self).load_from_options(**kwargs)
|
def cmd_test(raw_in):
answer = ['||', 'Passed!', 'I\'m alive!', '👍🏻', '👌🏻']
msg = random.choice(answer)
send_msg(raw_in, msg)
commands = [['test', cmd_test, False, 'raw', 'Check bot\'s activity.']]
|
import math
from PyQt5.QtCore import (pyqtSignal, QBasicTimer, QObject, QPoint, QPointF,
QRect, QSize, QStandardPaths, Qt, QUrl)
from PyQt5.QtGui import (QColor, QDesktopServices, QImage, QPainter,
QPainterPath, QPixmap, QRadialGradient)
from PyQt5.QtWidgets import QAction, QApplication, QMainWindow, QWidget
from PyQt5.QtNetwork import (QNetworkAccessManager, QNetworkDiskCache,
QNetworkRequest)
HOLD_TIME = 701
MAX_MAGNIFIER = 229
TDIM = 256
class Point(QPoint):
"""QPoint, that is fully qualified as a dict key"""
def __init__(self, *par):
if par:
super(Point, self).__init__(*par)
else:
super(Point, self).__init__()
def __hash__(self):
return self.x() * 17 ^ self.y()
def __repr__(self):
return "Point(%s, %s)" % (self.x(), self.y())
def tileForCoordinate(lat, lng, zoom):
zn = float(1 << zoom)
tx = float(lng + 180.0) / 360.0
ty = (1.0 - math.log(math.tan(lat * math.pi / 180.0) +
1.0 / math.cos(lat * math.pi / 180.0)) / math.pi) / 2.0
return QPointF(tx * zn, ty * zn)
def longitudeFromTile(tx, zoom):
zn = float(1 << zoom)
lat = tx / zn * 360.0 - 180.0
return lat
def latitudeFromTile(ty, zoom):
zn = float(1 << zoom)
n = math.pi - 2 * math.pi * ty / zn
lng = 180.0 / math.pi * math.atan(0.5 * (math.exp(n) - math.exp(-n)))
return lng
class SlippyMap(QObject):
updated = pyqtSignal(QRect)
def __init__(self, parent=None):
super(SlippyMap, self).__init__(parent)
self._offset = QPoint()
self._tilesRect = QRect()
self._tilePixmaps = {} # Point(x, y) to QPixmap mapping
self._manager = QNetworkAccessManager()
self._url = QUrl()
# public vars
self.width = 400
self.height = 300
self.zoom = 15
self.latitude = 59.9138204
self.longitude = 10.7387413
self._emptyTile = QPixmap(TDIM, TDIM)
self._emptyTile.fill(Qt.lightGray)
cache = QNetworkDiskCache()
cache.setCacheDirectory(
QStandardPaths.writableLocation(QStandardPaths.CacheLocation))
self._manager.setCache(cache)
self._manager.finished.connect(self.handleNetworkData)
def invalidate(self):
if self.width <= 0 or self.height <= 0:
return
ct = tileForCoordinate(self.latitude, self.longitude, self.zoom)
tx = ct.x()
ty = ct.y()
# top-left corner of the center tile
xp = int(self.width / 2 - (tx - math.floor(tx)) * TDIM)
yp = int(self.height / 2 - (ty - math.floor(ty)) * TDIM)
# first tile vertical and horizontal
xa = (xp + TDIM - 1) / TDIM
ya = (yp + TDIM - 1) / TDIM
xs = int(tx) - xa
ys = int(ty) - ya
# offset for top-left tile
self._offset = QPoint(xp - xa * TDIM, yp - ya * TDIM)
# last tile vertical and horizontal
xe = int(tx) + (self.width - xp - 1) / TDIM
ye = int(ty) + (self.height - yp - 1) / TDIM
# build a rect
self._tilesRect = QRect(xs, ys, xe - xs + 1, ye - ys + 1)
if self._url.isEmpty():
self.download()
self.updated.emit(QRect(0, 0, self.width, self.height))
def render(self, p, rect):
for x in range(self._tilesRect.width()):
for y in range(self._tilesRect.height()):
tp = Point(x + self._tilesRect.left(), y + self._tilesRect.top())
box = self.tileRect(tp)
if rect.intersects(box):
p.drawPixmap(box, self._tilePixmaps.get(tp, self._emptyTile))
def pan(self, delta):
dx = QPointF(delta) / float(TDIM)
center = tileForCoordinate(self.latitude, self.longitude, self.zoom) - dx
self.latitude = latitudeFromTile(center.y(), self.zoom)
self.longitude = longitudeFromTile(center.x(), self.zoom)
self.invalidate()
# slots
def handleNetworkData(self, reply):
img = QImage()
tp = Point(reply.request().attribute(QNetworkRequest.User))
url = reply.url()
if not reply.error():
if img.load(reply, None):
self._tilePixmaps[tp] = QPixmap.fromImage(img)
reply.deleteLater()
self.updated.emit(self.tileRect(tp))
# purge unused tiles
bound = self._tilesRect.adjusted(-2, -2, 2, 2)
for tp in list(self._tilePixmaps.keys()):
if not bound.contains(tp):
del self._tilePixmaps[tp]
self.download()
def download(self):
grab = None
for x in range(self._tilesRect.width()):
for y in range(self._tilesRect.height()):
tp = Point(self._tilesRect.topLeft() + QPoint(x, y))
if tp not in self._tilePixmaps:
grab = QPoint(tp)
break
if grab is None:
self._url = QUrl()
return
path = 'http://tile.openstreetmap.org/%d/%d/%d.png' % (self.zoom, grab.x(), grab.y())
self._url = QUrl(path)
request = QNetworkRequest()
request.setUrl(self._url)
request.setRawHeader(b'User-Agent', b'Nokia (PyQt) Graphics Dojo 1.0')
request.setAttribute(QNetworkRequest.User, grab)
self._manager.get(request)
def tileRect(self, tp):
t = tp - self._tilesRect.topLeft()
x = t.x() * TDIM + self._offset.x()
y = t.y() * TDIM + self._offset.y()
return QRect(x, y, TDIM, TDIM)
class LightMaps(QWidget):
def __init__(self, parent = None):
super(LightMaps, self).__init__(parent)
self.pressed = False
self.snapped = False
self.zoomed = False
self.invert = False
self._normalMap = SlippyMap(self)
self._largeMap = SlippyMap(self)
self.pressPos = QPoint()
self.dragPos = QPoint()
self.tapTimer = QBasicTimer()
self.zoomPixmap = QPixmap()
self.maskPixmap = QPixmap()
self._normalMap.updated.connect(self.updateMap)
self._largeMap.updated.connect(self.update)
def setCenter(self, lat, lng):
self._normalMap.latitude = lat
self._normalMap.longitude = lng
self._normalMap.invalidate()
self._largeMap.invalidate()
# slots
def toggleNightMode(self):
self.invert = not self.invert
self.update()
def updateMap(self, r):
self.update(r)
def activateZoom(self):
self.zoomed = True
self.tapTimer.stop()
self._largeMap.zoom = self._normalMap.zoom + 1
self._largeMap.width = self._normalMap.width * 2
self._largeMap.height = self._normalMap.height * 2
self._largeMap.latitude = self._normalMap.latitude
self._largeMap.longitude = self._normalMap.longitude
self._largeMap.invalidate()
self.update()
def resizeEvent(self, event):
self._normalMap.width = self.width()
self._normalMap.height = self.height()
self._normalMap.invalidate()
self._largeMap.width = self._normalMap.width * 2
self._largeMap.height = self._normalMap.height * 2
self._largeMap.invalidate()
def paintEvent(self, event):
p = QPainter()
p.begin(self)
self._normalMap.render(p, event.rect())
p.setPen(Qt.black)
p.drawText(self.rect(), Qt.AlignBottom | Qt.TextWordWrap,
"Map data CCBYSA 2009 OpenStreetMap.org contributors")
p.end()
if self.zoomed:
dim = min(self.width(), self.height())
magnifierSize = min(MAX_MAGNIFIER, dim * 2 / 3)
radius = magnifierSize / 2
ring = radius - 15
box = QSize(magnifierSize, magnifierSize)
# reupdate our mask
if self.maskPixmap.size() != box:
self.maskPixmap = QPixmap(box)
self.maskPixmap.fill(Qt.transparent)
g = QRadialGradient()
g.setCenter(radius, radius)
g.setFocalPoint(radius, radius)
g.setRadius(radius)
g.setColorAt(1.0, QColor(255, 255, 255, 0))
g.setColorAt(0.5, QColor(128, 128, 128, 255))
mask = QPainter(self.maskPixmap)
mask.setRenderHint(QPainter.Antialiasing)
mask.setCompositionMode(QPainter.CompositionMode_Source)
mask.setBrush(g)
mask.setPen(Qt.NoPen)
mask.drawRect(self.maskPixmap.rect())
mask.setBrush(QColor(Qt.transparent))
mask.drawEllipse(g.center(), ring, ring)
mask.end()
center = self.dragPos - QPoint(0, radius)
center += QPoint(0, radius / 2)
corner = center - QPoint(radius, radius)
xy = center * 2 - QPoint(radius, radius)
# only set the dimension to the magnified portion
if self.zoomPixmap.size() != box:
self.zoomPixmap = QPixmap(box)
self.zoomPixmap.fill(Qt.lightGray)
if True:
p = QPainter(self.zoomPixmap)
p.translate(-xy)
self._largeMap.render(p, QRect(xy, box))
p.end()
clipPath = QPainterPath()
clipPath.addEllipse(QPointF(center), ring, ring)
p = QPainter(self)
p.setRenderHint(QPainter.Antialiasing)
p.setClipPath(clipPath)
p.drawPixmap(corner, self.zoomPixmap)
p.setClipping(False)
p.drawPixmap(corner, self.maskPixmap)
p.setPen(Qt.gray)
p.drawPath(clipPath)
if self.invert:
p = QPainter(self)
p.setCompositionMode(QPainter.CompositionMode_Difference)
p.fillRect(event.rect(), Qt.white)
p.end()
def timerEvent(self, event):
if not self.zoomed:
self.activateZoom()
self.update()
def mousePressEvent(self, event):
if event.buttons() != Qt.LeftButton:
return
self.pressed = self.snapped = True
self.pressPos = self.dragPos = event.pos()
self.tapTimer.stop()
self.tapTimer.start(HOLD_TIME, self)
def mouseMoveEvent(self, event):
if not event.buttons():
return
if not self.zoomed:
if not self.pressed or not self.snapped:
delta = event.pos() - self.pressPos
self.pressPos = event.pos()
self._normalMap.pan(delta)
return
else:
threshold = 10
delta = event.pos() - self.pressPos
if self.snapped:
self.snapped &= delta.x() < threshold
self.snapped &= delta.y() < threshold
self.snapped &= delta.x() > -threshold
self.snapped &= delta.y() > -threshold
if not self.snapped:
self.tapTimer.stop()
else:
self.dragPos = event.pos()
self.update()
def mouseReleaseEvent(self, event):
self.zoomed = False
self.update()
def keyPressEvent(self, event):
if not self.zoomed:
if event.key() == Qt.Key_Left:
self._normalMap.pan(QPoint(20, 0))
if event.key() == Qt.Key_Right:
self._normalMap.pan(QPoint(-20, 0))
if event.key() == Qt.Key_Up:
self._normalMap.pan(QPoint(0, 20))
if event.key() == Qt.Key_Down:
self._normalMap.pan(QPoint(0, -20))
if event.key() == Qt.Key_Z or event.key() == Qt.Key_Select:
self.dragPos = QPoint(self.width() / 2, self.height() / 2)
self.activateZoom()
else:
if event.key() == Qt.Key_Z or event.key() == Qt.Key_Select:
self.zoomed = False
self.update()
delta = QPoint(0, 0)
if event.key() == Qt.Key_Left:
delta = QPoint(-15, 0)
if event.key() == Qt.Key_Right:
delta = QPoint(15, 0)
if event.key() == Qt.Key_Up:
delta = QPoint(0, -15)
if event.key() == Qt.Key_Down:
delta = QPoint(0, 15)
if delta != QPoint(0, 0):
self.dragPos += delta
self.update()
class MapZoom(QMainWindow):
def __init__(self):
super(MapZoom, self).__init__(None)
self.map_ = LightMaps(self)
self.setCentralWidget(self.map_)
self.map_.setFocus()
self.osloAction = QAction("&Oslo", self)
self.berlinAction = QAction("&Berlin", self)
self.jakartaAction = QAction("&Jakarta", self)
self.nightModeAction = QAction("Night Mode", self)
self.nightModeAction.setCheckable(True)
self.nightModeAction.setChecked(False)
self.osmAction = QAction("About OpenStreetMap", self)
self.osloAction.triggered.connect(self.chooseOslo)
self.berlinAction.triggered.connect(self.chooseBerlin)
self.jakartaAction.triggered.connect(self.chooseJakarta)
self.nightModeAction.triggered.connect(self.map_.toggleNightMode)
self.osmAction.triggered.connect(self.aboutOsm)
menu = self.menuBar().addMenu("&Options")
menu.addAction(self.osloAction)
menu.addAction(self.berlinAction)
menu.addAction(self.jakartaAction)
menu.addSeparator()
menu.addAction(self.nightModeAction)
menu.addAction(self.osmAction)
# slots
def chooseOslo(self):
self.map_.setCenter(59.9138204, 10.7387413)
def chooseBerlin(self):
self.map_.setCenter(52.52958999943302, 13.383053541183472)
def chooseJakarta(self):
self.map_.setCenter(-6.211544, 106.845172)
def aboutOsm(self):
QDesktopServices.openUrl(QUrl('http://www.openstreetmap.org'))
if __name__ == '__main__':
import sys
app = QApplication(sys.argv)
app.setApplicationName('LightMaps')
w = MapZoom()
w.setWindowTitle("OpenStreetMap")
w.resize(600, 450)
w.show()
sys.exit(app.exec_())
|
get_ipython().system('pip install -I "phoebe>=2.1,<2.2"')
get_ipython().run_line_magic('matplotlib', 'inline')
import phoebe
from phoebe import u # units
import numpy as np
import matplotlib.pyplot as plt
logger = phoebe.logger()
b = phoebe.default_binary()
b['q'] = 0.7
b['requiv@primary'] = 1.0
b['requiv@secondary'] = 0.5
b['teff@secondary@component'] = 5000
b['syncpar@primary@component'] = 2
b.add_dataset('rv', times=np.linspace(0,2,201), dataset='dynamicalrvs')
b.add_dataset('rv', times=np.linspace(0,2,201), dataset='numericalrvs')
times = b.get_value('times@primary@numericalrvs@dataset')
times = times[times<0.1]
print times
b.add_dataset('mesh', dataset='mesh01', times=times, columns=['vws', 'rvs*'])
b.set_value_all('rv_method@dynamicalrvs@compute', 'dynamical')
b.set_value_all('rv_method@numericalrvs@compute', 'flux-weighted')
print b['rv_method']
b.run_compute(irrad_method='none')
afig, mplfig = b['dynamicalrvs@model'].plot(c={'primary': 'b', 'secondary': 'r'}, show=True)
afig, mplfig = b['numericalrvs@model'].plot(c={'primary': 'b', 'secondary': 'r'}, show=True)
afig, mplfig = b['mesh@model'].plot(time=0.03, fc='rvs@numericalrvs', ec="None", show=True)
afig, mplfig = b['mesh01@model'].plot(time=0.09, fc='vws', ec="None", show=True)
|
import sys
import unittest
import libsbml
class TestRateRule(unittest.TestCase):
global RR
RR = None
def setUp(self):
self.RR = libsbml.RateRule(1,2)
if (self.RR == None):
pass
pass
def tearDown(self):
_dummyList = [ self.RR ]; _dummyList[:] = []; del _dummyList
pass
def test_RateRule_create(self):
self.assert_( self.RR.getTypeCode() == libsbml.SBML_RATE_RULE )
self.assert_( self.RR.getMetaId() == "" )
self.assert_( self.RR.getNotes() == None )
self.assert_( self.RR.getAnnotation() == None )
self.assert_( self.RR.getFormula() == "" )
self.assert_( self.RR.getMath() == None )
self.assert_( self.RR.getVariable() == "" )
self.assert_( self.RR.getType() == libsbml.RULE_TYPE_RATE )
pass
def test_RateRule_createWithNS(self):
xmlns = libsbml.XMLNamespaces()
xmlns.add( "http://www.sbml.org", "testsbml")
sbmlns = libsbml.SBMLNamespaces(2,1)
sbmlns.addNamespaces(xmlns)
object = libsbml.RateRule(sbmlns)
self.assert_( object.getTypeCode() == libsbml.SBML_RATE_RULE )
self.assert_( object.getMetaId() == "" )
self.assert_( object.getNotes() == None )
self.assert_( object.getAnnotation() == None )
self.assert_( object.getLevel() == 2 )
self.assert_( object.getVersion() == 1 )
self.assert_( object.getNamespaces() != None )
self.assert_( object.getNamespaces().getLength() == 2 )
_dummyList = [ object ]; _dummyList[:] = []; del _dummyList
pass
def test_RateRule_free_NULL(self):
_dummyList = [ None ]; _dummyList[:] = []; del _dummyList
pass
def test_RateRule_setVariable(self):
variable = "x";
self.RR.setVariable(variable)
self.assert_(( variable == self.RR.getVariable() ))
self.assertEqual( True, self.RR.isSetVariable() )
if (self.RR.getVariable() == variable):
pass
self.RR.setVariable(self.RR.getVariable())
self.assert_(( variable == self.RR.getVariable() ))
self.RR.setVariable("")
self.assertEqual( False, self.RR.isSetVariable() )
if (self.RR.getVariable() != None):
pass
pass
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestRateRule))
return suite
if __name__ == "__main__":
if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() :
sys.exit(0)
else:
sys.exit(1)
|
import logging
import datetime
import os
import shutil
from mimetypes import guess_type
import re
import zipfile
from paste.fileapp import FileApp
from pylons import config
from pylons import request, response, session, app_globals, tmpl_context as c
from pylons import url
from pylons.controllers.util import abort, forward, redirect
from pylons.decorators import validate
from pylons.decorators.rest import restrict
import webhelpers.paginate as paginate
from formencode.schema import Schema
from formencode.validators import Invalid, FancyValidator
from formencode.validators import Int, DateConverter, UnicodeString, OneOf, Regex
from formencode import variabledecode
from formencode import htmlfill
from formencode.foreach import ForEach
from formencode.api import NoDefault
from sqlalchemy.sql import or_
from sqlalchemy import desc
from onlinelinguisticdatabase.lib.base import BaseController, render
import onlinelinguisticdatabase.model as model
import onlinelinguisticdatabase.model.meta as meta
import onlinelinguisticdatabase.lib.helpers as h
log = logging.getLogger(__name__)
class NewFileForm(Schema):
"""NewFileForm is a Schema for validating the data entered at the Add File page."""
allow_extra_fields = True
filter_extra_fields = True
dateElicited = DateConverter(month_style='mm/dd/yyyy')
description = UnicodeString()
speaker = UnicodeString()
elicitor = UnicodeString()
utteranceType = UnicodeString()
class NewFileFormDM(NewFileForm):
dateElicited = DateConverter(month_style='dd/mm/yyyy')
class UpdateFileForm(NewFileForm):
ID = UnicodeString()
class UpdateFileFormDM(UpdateFileForm):
dateElicited = DateConverter(month_style='dd/mm/yyyy')
class RestrictorStruct(Schema):
location = UnicodeString()
containsNot = UnicodeString()
allAnyOf = UnicodeString
options = ForEach(UnicodeString())
class DateRestrictorStruct(Schema):
location = UnicodeString()
relation = UnicodeString()
date = DateConverter(month_style='mm/dd/yyyy')
class IntegerRestrictorStruct(Schema):
allow_extra_fields = True
filter_extra_fields = True
location = UnicodeString()
relation = UnicodeString()
integer = Regex(r'^ *[0-9]+(\.[0-9]+)? *$')
unit = UnicodeString()
class SearchFileForm(Schema):
"""SearchFile is a Schema for validating the search terms entered at the Search Files page."""
allow_extra_fields = True
filter_extra_fields = True
pre_validators = [variabledecode.NestedVariables()]
searchTerm1 = UnicodeString()
searchType1 = UnicodeString()
searchLocation1 = UnicodeString()
searchTerm2 = UnicodeString()
searchType2 = UnicodeString()
searchLocation2 = UnicodeString()
andOrNot = UnicodeString()
restrictors = ForEach(RestrictorStruct())
dateRestrictors = ForEach(DateRestrictorStruct())
integerRestrictors = ForEach(IntegerRestrictorStruct())
orderByColumn = UnicodeString()
orderByDirection = UnicodeString()
class AssociateFileFormForm(Schema):
allow_extra_fields = True
filter_extra_fields = True
formID = Regex(r'^ *[1-9]+[0-9]* *( *, *[1-9]+[0-9]* *)*$', not_empty=True)
def renderAddFile(values=None, errors=None, addUpdate='add'):
"""Function is called by both the add and update actions to create the
Add File and Update File HTML forms. The create and save actions can also
call this function if any errors are present in the input.
"""
# if addUpdate is set to 'update', render update.html instead of add.html
if addUpdate == 'add':
form = render('/derived/file/addForm.html')
c.heading = u'Add a File'
c.filledForm = htmlfill.render(form, defaults=values, errors=errors)
page = render('/derived/file/add.html')
else:
form = render('/derived/file/updateForm.html')
c.heading = u'Updating File %s' % c.file.id
c.filledForm = htmlfill.render(form, defaults=values, errors=errors)
page = render('/derived/file/update.html')
return page
def getFileAttributes(file, result, fileSize, fileName, createOrSave):
"""Given a (SQLAlchemy) File object, a result dictionary populated by
user-entered data and a fileSize calculated in the create action, this
function populates the appropriate attributes with the appropriate values.
Function called by both create and save actions.
"""
# User-entered Data
file.description = h.NFD(result['description'])
# Recording-only Data
if result['speaker']:
file.speaker = meta.Session.query(
model.Speaker).get(int(result['speaker']))
else:
file.speaker = None
if result['elicitor']:
file.elicitor = meta.Session.query(
model.User).get(int(result['elicitor']))
else:
file.elicitor = None
file.dateElicited = result['dateElicited']
file.utteranceType = result['utteranceType']
if createOrSave == 'create':
# Data extracted from uploaded file
fileData = request.POST['fileData']
file.MIMEtype = guess_type(fileData.filename)[0]
file.size = fileSize
file.name = h.NFD(fileName).replace("'", "").replace('"', '')
file.pathName = os.path.join(
config['app_conf']['permanent_store'], fileName)
# Add the Enterer as the current user
file.enterer = meta.Session.query(model.User).get(
int(session['user_id']))
# OLD-generated Data
now = datetime.datetime.utcnow()
if createOrSave == 'create':
file.datetimeEntered = now
file.datetimeModified = now
return file
class FileController(BaseController):
"""File Controller contains actions about OLD Files. Authorization and
authentication are implemented by the helper decorators authenticate and
authorize which can be found in lib/auth.py.
"""
@h.authenticate
def retrieve(self, path):
"""retrieve action is referenced by the <a>, <img>, <audio>, <video>,
<embed>, etc. tags.
"""
path = os.path.join(config['app_conf']['permanent_store'], path)
app = FileApp(path)
return forward(app)
@h.authenticate
def retrieve_temp(self, path):
"""retrieve_temp action is referenced by the <a> button rendered in
/derived/file/export.html.
"""
path = os.path.join(config['app_conf']['temporary_store'], path)
app = FileApp(path)
return forward(app)
@h.authenticate
def browse(self):
"""Browse through all Files in the system."""
file_q = meta.Session.query(model.File).order_by(model.File.name)
c.paginator = paginate.Page(
file_q,
page=int(request.params.get('page', 1)),
items_per_page = app_globals.file_items_per_page
)
c.browsing = True
return render('/derived/file/results.html')
@h.authenticate
def view(self, id):
"""View a BLD File. Requires a File ID as input."""
if id is None:
abort(404)
file_q = meta.Session.query(model.File)
try:
c.file = file_q.get(int(id))
except ValueError:
abort(404)
if c.file is None:
abort(404)
return render('/derived/file/view.html')
@h.authenticate
@h.authorize(['administrator', 'contributor'])
def add(self):
"""Display HTML form for adding a new BLD File. HTML form calls create
action.
"""
return renderAddFile()
@h.authenticate
def search(self, values=None, errors=None):
"""Display HTML form for searching for Files. HTML form calls query
action.
"""
# if no user-entered defaults are set, make description the default for
# searchLocation2
if not values:
values = {'searchLocation2': u'description'}
values['orderByColumn'] = 'id'
# By default, the additional search restrictors are hidden
c.viewRestrictors = False
# Get today in MM/DD/YYYY file
c.today = datetime.date.today().strftime('%m/%d/%Y')
html = render('/derived/file/search.html')
return htmlfill.render(html, defaults=values, errors=errors)
@h.authenticate
@h.authorize(['administrator', 'contributor'])
@restrict('POST')
def create(self):
"""Enter BLD File data into the database. This is the action referenced
by the HTML form rendered by the add action.
"""
dateFormat = session.get('userSettings').get('dateFormat')
if dateFormat == 'DD/MM/YYYY':
schema = NewFileFormDM()
else:
schema = NewFileForm()
values = dict(request.params)
try:
result = schema.to_python(dict(request.params), c)
except Invalid, e:
return renderAddFile(
values=values,
errors=variabledecode.variable_encode(
e.unpack_errors() or {},
add_repetitions=False
)
)
else:
# Make sure that the file type is allowed for upload
# and return the form if this is not the case
if request.POST['fileData'] == '':
return renderAddFile(
values=values,
errors={'fileData': 'please enter a file to upload'}
)
fileData = request.POST['fileData']
fileType = guess_type(fileData.filename)[0]
if fileType not in app_globals.allowedFileTypes:
return renderAddFile(
values=values,
errors={'fileData': 'that file type is not allowed'}
)
# All is good: save the file to permanent_store (see development.ini)
fileName = fileData.filename.replace(os.sep, '_').replace(
"'", "").replace('"', '').replace(' ', '_')
filePathName = os.path.join(
config['app_conf']['permanent_store'],
fileName
)
# If the file already exists in permanent_store add a number to the end
# until we have a unique file name
while os.path.exists(filePathName):
patt = re.compile('[0-9]+')
if patt.match(os.path.splitext(fileName)[0].split('_')[-1]):
fileName = '_'.join(os.path.splitext(fileName)[0].split('_')[:-1]) + '_' + str(int(os.path.splitext(fileName)[0].split('_')[-1]) + 1) + os.path.splitext(fileName)[1]
else:
fileName = os.path.splitext(fileName)[0] + '_' + str(1) + os.path.splitext(fileName)[1]
filePathName = os.path.join(
config['app_conf']['permanent_store'],
fileName
)
# Create the permanent file, copy the file data to it and close
permanent_file = open(
filePathName,
'wb'
)
shutil.copyfileobj(fileData.file, permanent_file)
fileData.file.close()
# Get the size of the newly uploaded file before closing it
fileSize = os.path.getsize(permanent_file.name)
permanent_file.close()
# Create a new File SQLAlchemy Object and populate its attributes with the results
file = model.File()
file = getFileAttributes(file, result, fileSize, fileName, 'create')
# Enter the data
meta.Session.add(file)
meta.Session.commit()
# Issue an HTTP redirect
redirect(url(controller='file', action='view', id=file.id))
@h.authenticate
@restrict('POST')
def query(self):
"""Query action validates the search input values;
if valid, query stores the search input values in the session and redirects to results;
if invalid, query redirect to search action (though I don't think it's possible to enter an invalid query...).
Query is the action referenced by the HTML form rendered by the search action."""
schema = SearchFileForm()
values = dict(request.params)
try:
result = schema.to_python(dict(request.params), c)
except Invalid, e:
return self.search(
values=values,
errors=variabledecode.variable_encode(
e.unpack_errors() or {},
add_repetitions=False
)
)
else:
# result is a Python dict nested structure representing the user's query
# we put result into session['fileSearchValues'] so that the results action
# can use it to build the SQLAlchemy query
session['fileSearchValues'] = result
session.save()
# Issue an HTTP redirect
response.status_int = 302
response.headers['location'] = url(controller='file', action='results')
return "Moved temporarily"
@h.authenticate
def results(self):
"""Results action uses the filterSearchQuery helper function to build
a query based on the values entered by the user in the search file."""
if 'fileSearchValues' in session:
result = session['fileSearchValues']
file_q = meta.Session.query(model.File)
file_q = h.filterSearchQuery(result, file_q, 'File')
else:
file_q = meta.Session.query(model.File)
c.paginator = paginate.Page(
file_q,
page=int(request.params.get('page', 1)),
items_per_page = app_globals.file_items_per_page
)
return render('/derived/file/results.html')
@h.authenticate
@h.authorize(['administrator', 'contributor'])
def update(self, id=None):
"""Displays an HTML form for updating a BLD File. The HTML form calls
the save action.
"""
if id is None:
abort(404)
file_q = meta.Session.query(model.File)
file = file_q.filter_by(id=id).first()
if file is None:
abort(404)
c.file = file
values = {
'ID': file.id,
'description': file.description,
'elicitor': file.elicitor_id,
'speaker': file.speaker_id,
'utteranceType': file.utteranceType
}
if file.dateElicited:
dateFormat = session.get('userSettings').get('dateFormat')
if dateFormat == 'DD/MM/YYYY':
values['dateElicited'] = file.dateElicited.strftime('%d/%m/%Y')
else:
values['dateElicited'] = file.dateElicited.strftime('%m/%d/%Y')
return renderAddFile(values, None, 'update')
@h.authenticate
@h.authorize(['administrator', 'contributor'])
@restrict('POST')
def save(self):
"""Updates existing BLD File. This is the action referenced by the HTML
form rendered by the update action.
"""
dateFormat = session.get('userSettings').get('dateFormat')
if dateFormat == 'DD/MM/YYYY':
schema = UpdateFileFormDM()
else:
schema = UpdateFileForm()
values = dict(request.params)
try:
result = schema.to_python(dict(request.params), c)
except Invalid, e:
id = int(values['ID'])
file_q = meta.Session.query(model.File)
file = file_q.filter_by(id=id).first()
c.file = file
return renderAddFile(
values=values,
errors=variabledecode.variable_encode(
e.unpack_errors() or {},
add_repetitions=False
),
addUpdate='update'
)
else:
# Get the File object with ID from hidden field in update.html
file_q = meta.Session.query(model.File)
file = file_q.filter_by(id=result['ID']).first()
# Populate the File's attributes with the data from the user-entered
# result dict
file = getFileAttributes(file, result, file.size, file.name, 'save')
# Commit the update
meta.Session.commit()
# Issue an HTTP redirect
response.status_int = 302
response.headers['location'] = url(controller='file', action='view', id=file.id)
return "Moved temporarily"
@h.authenticate
@h.authorize(['administrator', 'contributor'])
def delete(self, id):
"""Delete the BLD file with ID=id.
"""
if id is None:
abort(404)
file_q = meta.Session.query(model.File)
file = file_q.get(int(id))
if file is None:
abort(404)
# Delete File info in database
meta.Session.delete(file)
meta.Session.commit()
# Delete file's media in files folder
filePathName = os.path.join(
config['app_conf']['permanent_store'],
file.name
)
try:
os.remove(filePathName)
except OSError:
pass
# Create the flash message
session['flash'] = "File %s has been deleted" % id
session.save()
redirect(url(controller='file', action='results'))
@h.authenticate
@h.authorize(['administrator', 'contributor'])
def associate(self, id):
"""Display the page for associating a BLD File with id=id to a BLD Form.
The HTML form in the rendered page references the link action.
"""
if id is None:
abort(404)
c.file = meta.Session.query(model.File).get(int(id))
if c.file is None:
abort(404)
c.associateForm = render('/derived/file/associateForm.html')
return render('/derived/file/associate.html')
@h.authenticate
@h.authorize(['administrator', 'contributor'])
@restrict('POST')
def link(self, id):
"""Associate BLD File with id=id to a BLD Form. The ID of the Form is
passed via a POST form. This "ID" may in fact be a comma-separated list
of Form IDs.
"""
schema=AssociateFileFormForm()
values = dict(request.params)
try:
result = schema.to_python(dict(request.params), c)
except Invalid, e:
c.file = meta.Session.query(model.File).filter_by(id=id).first()
associateForm = render('/derived/file/associateForm.html')
errors = variabledecode.variable_encode(
e.unpack_errors() or {},
add_repetitions=False
)
c.associateForm = htmlfill.render(associateForm, defaults=values,
errors=errors)
return render('/derived/file/associate.html')
else:
# Get the File
if id is None:
abort(404)
file = meta.Session.query(model.File).get(int(id))
if file is None:
abort(404)
# Get the Form(s)
formID = result['formID']
patt = re.compile('^[0-9 ]+$')
formIDs = [int(ID.strip().replace(' ', '')) for ID in formID.split(',')
if patt.match(ID)]
forms = meta.Session.query(model.Form).filter(
model.Form.id.in_(formIDs)).all()
if forms:
for form in forms:
if form in file.forms:
msg = '<p>Form %s is already associated ' % form.id + \
'to File %s.</p>' % file.id
h.appendMsgToFlash(h.literal(msg))
else:
if h.userIsAuthorizedToAccessForm(session['user'], form):
file.forms.append(form)
msg = '<p>Form %d successfully ' % form.id + \
'associated to File %d.' % file.id
h.appendMsgToFlash(h.literal(msg))
else:
msg = '<p>Sorry, you are not authorized to ' + \
'access form %d.</p>' % form.id
h.appendMsgToFlash(h.literal(msg))
meta.Session.commit()
session.save()
else:
msg = u'<p>Sorry, no Forms have any of the following ' + \
u'IDs: %s.</p>' % formID
session['flash'] = h.literal(msg)
session.save()
return redirect(url(controller='file', action='view', id=file.id))
@h.authenticate
@h.authorize(['administrator', 'contributor'])
def disassociate(self, id, otherID):
"""Disassociate BLD File id from BLD Form otherID."""
if id is None or otherID is None:
abort(404)
file = meta.Session.query(model.File).get(int(id))
form = meta.Session.query(model.Form).get(int(otherID))
if form is None:
if file is None:
abort(404)
else:
session['flash'] = 'There is no Form with ID %s' % otherID
if form in file.forms:
file.forms.remove(form)
meta.Session.commit()
session['flash'] = 'Form %s disassociated' % otherID
else:
session['flash'] = 'File %s was never associated to Form %s' % (id, otherID)
session.save()
redirect(url(controller='file', action='view', id=id))
@h.authenticate
def export(self, id=None):
"""Export the BLD Files matching the search criteria
as a .zip archive. ."""
# Get the Files that match the search and get their full path names
try:
result = session['fileSearchValues']
file_q = meta.Session.query(model.File)
file_q = h.filterSearchQuery(result, file_q, 'File')
files = file_q.all()
except KeyError:
files = meta.Session.query(model.File).all()
fileNames = [os.path.join(config['app_conf']['permanent_store'], file.name) for file in files]
# Create the .zip file and write the Files to it
# Python 2.5 was raising a UnicodeDecodeError when ZipFile.write was
# passed unicode arguments, so I've str()-ed them
zfileName = str('%s_%s_file_export.zip' % \
(session['user_firstName'].lower(), session['user_lastName'].lower()))
zfileName = str(
os.path.join(config['app_conf']['temporary_store'], zfileName))
zout = zipfile.ZipFile(zfileName, "w")
for fileName in fileNames:
zout.write(str(fileName), os.path.basename(str(fileName)))
zout.close()
c.zfileName = os.path.basename(zfileName)
c.zfileSize = os.path.getsize(zfileName)
c.fileNames = [(file.name, os.path.getsize(os.path.join(config['app_conf']['permanent_store'], file.name))) for file in files]
return render('/derived/file/export.html')
|
"""
tmdbsimple
~~~~~~~~~~
*tmdbsimple* is a wrapper, written in Python, for The Movie Database (TMDb)
API v3. By calling the functions available in *tmdbsimple* you can simplify
your code and easily access a vast amount of movie, tv, and cast data. To find
out more about The Movie Database API, check out the overview page
http://www.themoviedb.org/documentation/api and documentation page
https://developers.themoviedb.org/3/getting-started
https://www.themoviedb.org/documentation/api/status-codes
:copyright: (c) 2013-2020 by Celia Oakley.
:license: GPLv3, see LICENSE for more details
"""
__title__ = 'tmdbsimple'
__version__ = '2.8.0'
__author__ = 'Celia Oakley'
__copyright__ = 'Copyright (c) 2013-2020 Celia Oakley'
__license__ = 'GPLv3'
import os
import requests
from .account import Account, Authentication, GuestSessions, Lists
from .base import APIKeyError
from .changes import Changes
from .configuration import Configuration, Certifications
from .discover import Discover
from .find import Find, Trending
from .genres import Genres
from .movies import Movies, Collections, Companies, Keywords, Reviews
from .people import People, Credits
from .search import Search
from .tv import TV, TV_Seasons, TV_Episodes, TV_Episode_Groups, TV_Changes, Networks
__all__ = ['Account', 'Authentication', 'GuestSessions', 'Lists',
'APIKeyError',
'Changes',
'Configuration', 'Certifications',
'Discover',
'Find', 'Trending',
'Genres',
'Movies', 'Collections', 'Companies', 'Keywords', 'Reviews',
'People', 'Credits'
'Search',
'TV', 'TV_Seasons', 'TV_Episodes', 'TV_Episode_Groups', 'TV_Changes', 'Networks'
]
API_KEY = os.environ.get('TMDB_API_KEY', None)
API_VERSION = '3'
REQUESTS_SESSION = None
|
"""Test of neurosynth module."""
import pytest
from .. import neurosynth
@pytest.fixture
def neurosynth_database():
"""Return fixture for handle to neurosynth database."""
nd = neurosynth.NeurosynthDatabase()
return nd
@pytest.fixture
def neurosynth_database_frame(neurosynth_database):
"""Return fixture for neurosynth dataframe."""
df = neurosynth_database.database()
return df
def test_neurosynth_database_name(neurosynth_database):
"""Test name of neurosynth database object."""
assert neurosynth_database.name == 'Neurosynth'
def test_neurosynth_database_description(neurosynth_database):
"""Test description of neurosynth database object."""
assert len(neurosynth_database.description) > 20
def test_pmid(neurosynth_database_frame):
"""Test whether a specific PMID is in the data."""
assert 23400116 in set(neurosynth_database_frame.id)
|
""" Legend encapsulates a graphical plot legend drawing tool
The DIRAC Graphs package is derived from the GraphTool plotting package of the
CMS/Phedex Project by ... <to be added>
"""
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
__RCSID__ = "$Id$"
from matplotlib.patches import Rectangle
from matplotlib.text import Text
from DIRAC.Core.Utilities.Graphs.GraphUtilities import *
from DIRAC.Core.Utilities.Graphs.Palette import Palette
from DIRAC.Core.Utilities.Graphs.GraphData import GraphData
from matplotlib.figure import Figure
from matplotlib.backends.backend_agg import FigureCanvasAgg
import six
class Legend(object):
def __init__(self, data=None, axes=None, *aw, **kw):
self.text_size = 0
self.column_width = 0
self.labels = {}
if isinstance(data, dict):
for label, ddict in data.items():
# self.labels[label] = pretty_float(max([ float(x) for x in ddict.values() if x ]) )
self.labels[label] = "%.1f" % max([float(x) for x in ddict.values() if x])
elif isinstance(data, GraphData):
self.labels = data.getLabels()
else:
self.labels = data
# self.labels.reverse()
self.ax = axes
self.canvas = None
if self.ax:
self.canvas = self.ax.figure.canvas
self.ax.set_axis_off()
self.prefs = evalPrefs(*aw, **kw)
self.palette = Palette()
if self.labels and self.labels[0][0] != 'NoLabels':
percent_flag = self.prefs.get('legend_unit', '')
if percent_flag == "%":
sum_value = sum(data.label_values)
if sum_value > 0.:
self.labels = [(l, v / sum_value * 100.) for l, v in self.labels]
self.__get_column_width()
def dumpPrefs(self):
for key in self.prefs:
print(key.rjust(20), ':', str(self.prefs[key]).ljust(40))
def setLabels(self, labels):
self.labels = labels
def setAxes(self, axes):
self.ax = axes
self.canvas = self.ax.figure.canvas
self.ax.set_axis_off()
def getLegendSize(self):
self.__get_column_width()
legend_position = self.prefs['legend_position']
legend_width = float(self.prefs['legend_width'])
legend_height = float(self.prefs['legend_height'])
legend_padding = float(self.prefs['legend_padding'])
legend_text_size = self.prefs.get('legend_text_size', self.prefs['text_size'])
legend_text_padding = self.prefs.get('legend_text_padding', self.prefs['text_padding'])
if legend_position in ['right', 'left']:
# One column in case of vertical legend
legend_width = self.column_width + legend_padding
nLabels = len(self.labels)
legend_max_height = nLabels * (legend_text_size + legend_text_padding)
elif legend_position == 'bottom':
nColumns = min(self.prefs['legend_max_columns'], int(legend_width / self.column_width))
nLabels = len(self.labels)
maxRows = self.prefs['legend_max_rows']
nRows_ax = int(legend_height / 1.6 / self.prefs['text_size'])
nRows_label = nLabels / nColumns + (nLabels % nColumns != 0)
nRows = int(max(1, min(min(nRows_label, maxRows), nRows_ax)))
text_padding = self.prefs['text_padding']
text_padding = pixelToPoint(text_padding, self.prefs['dpi'])
legend_height = int(min(legend_height, (nRows * (self.text_size + text_padding) + text_padding)))
legend_max_height = int(nLabels * (self.text_size + text_padding))
return legend_width, legend_height, legend_max_height
def __get_legend_text_size(self):
text_size = self.prefs['text_size']
text_padding = self.prefs['text_padding']
legend_text_size = self.prefs.get('legend_text_size', text_size)
legend_text_padding = self.prefs.get('legend_text_padding', text_padding)
return legend_text_size, legend_text_padding
def __get_column_width(self):
max_length = 0
max_column_text = ''
flag = self.prefs.get('legend_numbers', True)
unit = self.prefs.get('legend_unit', False)
for label, num in self.labels:
if not flag:
num = None
if num is not None:
column_length = len(str(label) + str(num)) + 1
else:
column_length = len(str(label)) + 1
if column_length > max_length:
max_length = column_length
if flag:
if isinstance(num, six.integer_types):
numString = str(num)
else:
numString = "%.1f" % float(num)
max_column_text = '%s %s' % (str(label), numString)
if unit:
max_column_text += "%"
else:
max_column_text = '%s ' % str(label)
figure = Figure()
canvas = FigureCanvasAgg(figure)
dpi = self.prefs['dpi']
figure.set_dpi(dpi)
l_size, _ = self.__get_legend_text_size()
self.text_size = pixelToPoint(l_size, dpi)
text = Text(0., 0., text=max_column_text, size=self.text_size)
text.set_figure(figure)
bbox = text.get_window_extent(canvas.get_renderer())
columnwidth = bbox.width + 6 * l_size
# make sure the legend fit in the box
self.column_width = columnwidth if columnwidth <= self.prefs['legend_width']\
else self.prefs['legend_width'] - 6 * l_size
def draw(self):
dpi = self.prefs['dpi']
ax_xsize = self.ax.get_window_extent().width
ax_ysize = self.ax.get_window_extent().height
nLabels = len(self.labels)
nColumns = min(self.prefs['legend_max_columns'], int(ax_xsize / self.column_width))
maxRows = self.prefs['legend_max_rows']
nRows_ax = int(ax_ysize / 1.6 / self.prefs['text_size'])
nRows_label = nLabels / nColumns + (nLabels % nColumns != 0)
nRows = max(1, min(min(nRows_label, maxRows), nRows_ax))
self.ax.set_xlim(0., float(ax_xsize))
self.ax.set_ylim(-float(ax_ysize), 0.)
legend_text_size, legend_text_padding = self.__get_legend_text_size()
legend_text_size_point = pixelToPoint(legend_text_size, dpi)
box_width = legend_text_size
legend_offset = (ax_xsize - nColumns * self.column_width) / 2
nc = 0
# self.labels.reverse()
for label, num in self.labels:
num_flag = self.prefs.get('legend_numbers', True)
percent_flag = self.prefs.get('legend_unit', '')
if num_flag:
if percent_flag == "%":
num = "%.1f" % num + '%'
else:
num = "%.1f" % num
else:
num = None
color = self.palette.getColor(label)
row = nc % nRows
column = int(nc / nRows)
if row == nRows - 1 and column == nColumns - 1 and nc != nLabels - 1:
last_text = '... plus %d more' % (nLabels - nc)
self.ax.text(float(column * self.column_width) + legend_offset, -float(row * 1.6 * box_width),
last_text, horizontalalignment='left',
verticalalignment='top', size=legend_text_size_point)
break
else:
self.ax.text(float(column * self.column_width) + 2. * box_width + legend_offset, -row * 1.6 * box_width,
str(label), horizontalalignment='left',
verticalalignment='top', size=legend_text_size_point)
if num is not None:
self.ax.text(float((column + 1) * self.column_width) - 2 * box_width + legend_offset,
-float(row * 1.6 * box_width),
str(num), horizontalalignment='right',
verticalalignment='top', size=legend_text_size_point)
box = Rectangle((float(column * self.column_width) + legend_offset, -float(row * 1.6 * box_width) - box_width),
box_width, box_width)
box.set_edgecolor('black')
box.set_linewidth(pixelToPoint(0.5, dpi))
box.set_facecolor(color)
self.ax.add_patch(box)
nc += 1
|
class Solution(object):
def letterCombinations(self, digits):
"""
:type digits: str
:rtype: List[str]
"""
if not digits:
return []
mapping = {
"2": 'abc',
"3": 'def',
"4": 'ghi',
"5": 'jkl',
"6": 'mno',
"7": 'pqrs',
"8": 'tuv',
"9": 'wxyz',
}
def recurse(digits, ret):
if len(digits) == 0:
return ret
new_ret = []
for letter in mapping.get(digits[0], []):
for unique in ret:
new_ret.append(unique + letter)
return recurse(digits[1:], new_ret)
return recurse(digits, [''])
print Solution().letterCombinations(["2", "3"])
|
from __future__ import unicode_literals
import io
import os
import mock
from six import text_type
from flask import current_app
from flask import make_response
from data.fixtures.test_data import TestFixture
from compair.tests.test_compair import ComPAIRAPITestCase
from compair.core import db
from compair.models import KalturaMedia
class FileRetrieveTests(ComPAIRAPITestCase):
base_url = '/app'
fixtures = None
def setUp(self):
super(FileRetrieveTests, self).setUp()
self.fixtures = TestFixture().add_course(
num_students=5, num_assignments=1, num_groups=0,
num_answers=1, with_draft_student=True)
self.files_to_cleanup = []
def tearDown(self):
folder = current_app.config['ATTACHMENT_UPLOAD_FOLDER']
for file_name in self.files_to_cleanup:
file_path = os.path.join(folder, file_name)
try:
if os.path.isfile(file_path):
os.remove(file_path)
except Exception as e:
print(e)
def test_view_file(self):
db_file = self.fixtures.add_file(self.fixtures.instructor)
filename = db_file.name
url = self.base_url + '/attachment/' + filename
# test login required
rv = self.client.get(url)
self.assert401(rv)
# TODO: no authorization control right now and needs to be added in the future
# test unauthorized user
# with self.login(self.fixtures.unauthorized_instructor.username):
# rv = self.client.get(url)
# self.assert403(rv)
# valid instructor
with self.login(self.fixtures.instructor.username):
# invalid file name (db is not actually touched)
rv = self.client.get(self.base_url + '/attachment/'+filename)
self.assert404(rv)
self.assertEqual('invalid file name', text_type(rv.get_data(as_text=True)))
with mock.patch('compair.api.os.path.exists', return_value=True):
with mock.patch('compair.api.send_file', return_value=make_response("OK")) as mock_send_file:
# test all attachment types
extensions = [
('pdf', 'application/pdf'),
('mp3', 'audio/mpeg'),
('mp4', 'video/mp4'),
('jpg', 'image/jpeg'),
('jpeg', 'image/jpeg'),
('png', 'image/png')
]
for (extension, mimetype) in extensions:
db_file = self.fixtures.add_file(self.fixtures.instructor, name="file_name."+extension)
filename = db_file.name
url = self.base_url + '/attachment/' + filename
self.client.get(url)
if extension == 'pdf':
mock_send_file.assert_called_once_with(
'{}/{}'.format(current_app.config['ATTACHMENT_UPLOAD_FOLDER'], filename),
attachment_filename=None,
as_attachment=False,
mimetype=mimetype
)
else:
mock_send_file.assert_called_once_with(
'{}/{}'.format(current_app.config['ATTACHMENT_UPLOAD_FOLDER'], filename),
attachment_filename=None,
as_attachment=True,
mimetype=mimetype
)
mock_send_file.reset_mock()
# test overriding attachment filename
override_name = "override."+db_file.extension
self.client.get(url+"?name="+override_name)
if extension == 'pdf':
mock_send_file.assert_called_once_with(
'{}/{}'.format(current_app.config['ATTACHMENT_UPLOAD_FOLDER'], filename),
attachment_filename=None,
as_attachment=False,
mimetype=mimetype
)
else:
mock_send_file.assert_called_once_with(
'{}/{}'.format(current_app.config['ATTACHMENT_UPLOAD_FOLDER'], filename),
attachment_filename=override_name,
as_attachment=True,
mimetype=mimetype
)
mock_send_file.reset_mock()
def test_create_attachment(self):
url = '/api/attachment'
test_formats = [
('pdf', 'application/pdf'),
('mp3', 'audio/mpeg'),
('mp4', 'video/mp4'),
('webm', 'video/webm'),
('jpg', 'image/jpeg'),
('jpeg', 'image/jpeg')
]
# test login required
uploaded_file = io.BytesIO(b"this is a test")
rv = self.client.post(url, data=dict(file=(uploaded_file, 'alias.pdf')))
self.assert401(rv)
uploaded_file.close()
with self.login(self.fixtures.instructor.username):
# test no file uploaded
filename = 'alias.pdf'
rv = self.client.post(url, data=dict())
self.assert400(rv)
self.assertEqual("File Not Uploaded", rv.json['title'])
self.assertEqual("Sorry, no file was found to upload. Please try uploading again.", rv.json['message'])
# test no file uploaded
filename = 'alias.xyz'
uploaded_file = io.BytesIO(b"this is a test")
rv = self.client.post(url, data=dict(file=(uploaded_file, filename)))
self.assert400(rv)
self.assertEqual("File Not Uploaded", rv.json['title'])
self.assertEqual("Please try again with an approved file type, which includes: JPEG, JPG, MP3, MP4, PDF, PNG, WEBM.",
rv.json['message'])
for extension, mimetype in test_formats:
filename = 'alias.'+extension
uploaded_file = io.BytesIO(b"this is a test")
rv = self.client.post(url, data=dict(file=(uploaded_file, filename)))
self.assert200(rv)
uploaded_file.close()
actual_file = rv.json['file']
self.files_to_cleanup.append(actual_file['name'])
self.assertEqual(actual_file['id']+"."+extension.lower(), actual_file['name'])
self.assertEqual(filename, actual_file['alias'])
self.assertEqual(extension.lower(), actual_file['extension'])
self.assertEqual(mimetype, actual_file['mimetype'])
# test with uppercase extension
filename = 'alias.'+extension.upper()
uploaded_file = io.BytesIO(b"this is a test")
rv = self.client.post(url, data=dict(file=(uploaded_file, filename)))
self.assert200(rv)
uploaded_file.close()
actual_file = rv.json['file']
self.files_to_cleanup.append(actual_file['name'])
self.assertEqual(actual_file['id']+"."+extension.lower(), actual_file['name'])
self.assertEqual(filename, actual_file['alias'])
self.assertEqual(extension.lower(), actual_file['extension'])
self.assertEqual(mimetype, actual_file['mimetype'])
@mock.patch('compair.kaltura.kaltura_session.KalturaSession._api_start')
@mock.patch('compair.kaltura.kaltura_session.KalturaSession._api_end')
@mock.patch('compair.kaltura.upload_token.UploadToken._api_add')
def test_get_kaltura(self, mocked_upload_token_add, mocked_kaltura_session_end, mocked_kaltura_session_start):
url = '/api/attachment/kaltura'
current_app.config['KALTURA_ENABLED'] = True
current_app.config['KALTURA_SERVICE_URL'] = "https://www.kaltura.com"
current_app.config['KALTURA_PARTNER_ID'] = 123
current_app.config['KALTURA_USER_ID'] = "test@test.com"
current_app.config['KALTURA_SECRET'] = "abc"
current_app.config['KALTURA_PLAYER_ID'] = 456
mocked_kaltura_session_start.return_value = "ks_mock"
mocked_kaltura_session_end.return_value = {}
mocked_upload_token_add.return_value = {
"id": "mocked_upload_token_id"
}
expected_upload_url = "https://www.kaltura.com/api_v3/service/uploadtoken/action/upload?format=1&uploadTokenId=mocked_upload_token_id&ks=ks_mock"
# test login required
rv = self.client.get(url)
self.assert401(rv)
with self.login(self.fixtures.instructor.username):
# test kaltura disabled
current_app.config['KALTURA_ENABLED'] = False
rv = self.client.get(url)
self.assert400(rv)
# test kaltura enabled
current_app.config['KALTURA_ENABLED'] = True
rv = self.client.get(url)
self.assert200(rv)
self.assertEqual(rv.json['upload_url'], expected_upload_url)
self.assertEqual(mocked_kaltura_session_start.call_count, 2)
mocked_kaltura_session_start.assert_any_call("test@test.com")
mocked_kaltura_session_start.assert_any_call("test@test.com",
privileges="edit:mocked_upload_token_id,urirestrict:/api_v3/service/uploadtoken/action/upload*")
mocked_kaltura_session_start.reset_mock()
mocked_kaltura_session_end.assert_called_once_with("ks_mock")
mocked_kaltura_session_end.reset_mock()
mocked_upload_token_add.assert_called_once_with("ks_mock")
mocked_upload_token_add.reset_mock()
kaltura_media_items = KalturaMedia.query.all()
self.assertEqual(len(kaltura_media_items), 1)
self.assertEqual(kaltura_media_items[0].user_id, self.fixtures.instructor.id)
self.assertEqual(kaltura_media_items[0].partner_id, 123)
self.assertEqual(kaltura_media_items[0].player_id, 456)
self.assertEqual(kaltura_media_items[0].upload_ks, "ks_mock")
self.assertEqual(kaltura_media_items[0].upload_token_id, "mocked_upload_token_id")
self.assertIsNone(kaltura_media_items[0].file_name)
self.assertIsNone(kaltura_media_items[0].entry_id)
self.assertIsNone(kaltura_media_items[0].download_url)
# use global unique identifer (user has no global unique identifer)
current_app.config['KALTURA_USE_GLOBAL_UNIQUE_IDENTIFIER'] = True
mocked_upload_token_add.return_value = {
"id": "mocked_upload_token_id2"
}
expected_upload_url = "https://www.kaltura.com/api_v3/service/uploadtoken/action/upload?format=1&uploadTokenId=mocked_upload_token_id2&ks=ks_mock"
rv = self.client.get(url)
self.assert200(rv)
self.assertEqual(rv.json['upload_url'], expected_upload_url)
self.assertEqual(mocked_kaltura_session_start.call_count, 2)
mocked_kaltura_session_start.assert_any_call("test@test.com")
mocked_kaltura_session_start.assert_any_call("test@test.com",
privileges="edit:mocked_upload_token_id2,urirestrict:/api_v3/service/uploadtoken/action/upload*")
mocked_kaltura_session_start.reset_mock()
mocked_kaltura_session_end.assert_called_once_with("ks_mock")
mocked_kaltura_session_end.reset_mock()
mocked_upload_token_add.assert_called_once_with("ks_mock")
mocked_upload_token_add.reset_mock()
kaltura_media_items = KalturaMedia.query.all()
self.assertEqual(len(kaltura_media_items), 2)
self.assertEqual(kaltura_media_items[1].user_id, self.fixtures.instructor.id)
# use global unique identifer (user has global unique identifer)
self.fixtures.instructor.global_unique_identifier = "1234567890@test.com"
mocked_upload_token_add.return_value = {
"id": "mocked_upload_token_id3"
}
expected_upload_url = "https://www.kaltura.com/api_v3/service/uploadtoken/action/upload?format=1&uploadTokenId=mocked_upload_token_id3&ks=ks_mock"
rv = self.client.get(url)
self.assert200(rv)
self.assertEqual(rv.json['upload_url'], expected_upload_url)
self.assertEqual(mocked_kaltura_session_start.call_count, 2)
mocked_kaltura_session_start.assert_any_call("1234567890@test.com")
mocked_kaltura_session_start.assert_any_call("1234567890@test.com",
privileges="edit:mocked_upload_token_id3,urirestrict:/api_v3/service/uploadtoken/action/upload*")
mocked_kaltura_session_start.reset_mock()
mocked_kaltura_session_end.assert_called_once_with("ks_mock")
mocked_kaltura_session_end.reset_mock()
mocked_upload_token_add.assert_called_once_with("ks_mock")
mocked_upload_token_add.reset_mock()
kaltura_media_items = KalturaMedia.query.all()
self.assertEqual(len(kaltura_media_items), 3)
self.assertEqual(kaltura_media_items[2].user_id, self.fixtures.instructor.id)
current_app.config['KALTURA_USE_GLOBAL_UNIQUE_IDENTIFIER'] = False
@mock.patch('compair.kaltura.kaltura_session.KalturaSession._api_start')
@mock.patch('compair.kaltura.kaltura_session.KalturaSession._api_end')
@mock.patch('compair.kaltura.upload_token.UploadToken._api_get')
@mock.patch('compair.kaltura.media.Media._api_add')
@mock.patch('compair.kaltura.media.Media._api_add_content')
def test_post_kaltura(self, mocked_kaltura_media_add_content, mocked_kaltura_media_add, mocked_upload_token_get,
mocked_kaltura_session_end, mocked_kaltura_session_start):
url = '/api/attachment/kaltura/mocked_upload_token_id'
invalid_url = '/api/attachment/kaltura/mocked_upload_token_id_invalid'
current_app.config['KALTURA_ENABLED'] = True
current_app.config['KALTURA_SERVICE_URL'] = "https://www.kaltura.com"
current_app.config['KALTURA_PARTNER_ID'] = 123
current_app.config['KALTURA_USER_ID'] = "test@test.com"
current_app.config['KALTURA_SECRET'] = "abc"
current_app.config['KALTURA_PLAYER_ID'] = 456
mocked_kaltura_session_start.return_value = "ks_mock"
mocked_kaltura_session_end.return_value = {}
mocked_upload_token_get.return_value = {
"id": "mocked_upload_token_id",
"fileName": "uploaded_audio_file.mp3"
}
mocked_kaltura_media_add.return_value = {
"id": "mock_entry_id"
}
mocked_kaltura_media_add_content.return_value = {
"id": "mock_entry_id",
"downloadUrl": "http://www.download/url.com"
}
kaltura_media = KalturaMedia(
user=self.fixtures.instructor,
service_url="https://www.kaltura.com",
partner_id=123,
player_id=456,
upload_ks="upload_ks_mock",
upload_token_id="mocked_upload_token_id"
)
db.session.add(kaltura_media)
kaltura_media2 = KalturaMedia(
user=self.fixtures.instructor,
service_url="https://www.kaltura.com",
partner_id=123,
player_id=456,
upload_ks="upload_ks_mock2",
upload_token_id="mocked_upload_token_id2"
)
db.session.add(kaltura_media)
kaltura_media3 = KalturaMedia(
user=self.fixtures.instructor,
service_url="https://www.kaltura.com",
partner_id=123,
player_id=456,
upload_ks="upload_ks_mock3",
upload_token_id="mocked_upload_token_id3"
)
db.session.add(kaltura_media)
invalid_kaltura_media = KalturaMedia(
user=self.fixtures.instructor,
service_url="https://www.kaltura.com",
partner_id=123,
player_id=456,
upload_ks="upload_ks_mock",
upload_token_id="mocked_upload_token_id_invalid",
entry_id="def"
)
db.session.add(invalid_kaltura_media)
db.session.commit()
# test login required
rv = self.client.post(url)
self.assert401(rv)
with self.login(self.fixtures.instructor.username):
# test kaltura disabled
current_app.config['KALTURA_ENABLED'] = False
rv = self.client.post(url)
self.assert400(rv)
# test invalid upload_token_id
current_app.config['KALTURA_ENABLED'] = True
rv = self.client.post(invalid_url)
self.assert400(rv)
# test valid
rv = self.client.post(url)
self.assert200(rv)
self.assertEqual(rv.json['file']['id'], kaltura_media.files.all()[0].uuid)
self.assertEqual(kaltura_media.file_name, "uploaded_audio_file.mp3")
self.assertEqual(kaltura_media.entry_id, "mock_entry_id")
self.assertEqual(kaltura_media.download_url, "http://www.download/url.com")
self.assertEqual(kaltura_media.service_url, "https://www.kaltura.com")
mocked_kaltura_session_start.assert_called_once_with("test@test.com")
mocked_kaltura_session_start.reset_mock()
self.assertEqual(mocked_kaltura_session_end.call_count, 2)
mocked_kaltura_session_end.assert_any_call("ks_mock")
mocked_kaltura_session_end.assert_any_call("upload_ks_mock")
mocked_kaltura_session_end.reset_mock()
mocked_upload_token_get.assert_called_once_with("ks_mock", "mocked_upload_token_id")
mocked_upload_token_get.reset_mock()
mocked_kaltura_media_add.assert_called_once_with("ks_mock", 5)
mocked_kaltura_media_add.reset_mock()
mocked_kaltura_media_add_content.assert_called_once_with("ks_mock", "mock_entry_id", "mocked_upload_token_id")
mocked_kaltura_media_add_content.reset_mock()
# test direct download from kaltura via /attachment
kaltura_attachment_url = self.base_url + '/attachment/' + rv.json['file']['name'] + '?name=uploaded_audio_file.mp3'
rv = self.client.get(kaltura_attachment_url)
self.assertTrue(rv.location.startswith(kaltura_media.download_url)) # redirecting to Kaltura
mocked_kaltura_session_start.assert_called_once_with("test@test.com", \
expiry=60, \
privileges='sview:'+kaltura_media.entry_id+',urirestrict:/url.com/*'
)
mocked_kaltura_session_start.reset_mock()
# use global unique identifer (user has no global unique identifer)
current_app.config['KALTURA_USE_GLOBAL_UNIQUE_IDENTIFIER'] = True
url = '/api/attachment/kaltura/mocked_upload_token_id2'
mocked_upload_token_get.return_value = {
"id": "mocked_upload_token_id2",
"fileName": "uploaded_audio_file2.mp3"
}
mocked_kaltura_media_add.return_value = {
"id": "mock_entry_id2"
}
mocked_kaltura_media_add_content.return_value = {
"id": "mock_entry_id2",
"downloadUrl": "www.download/url2.com"
}
rv = self.client.post(url)
self.assert200(rv)
self.assertEqual(rv.json['file']['id'], kaltura_media2.files.all()[0].uuid)
self.assertEqual(kaltura_media2.file_name, "uploaded_audio_file2.mp3")
self.assertEqual(kaltura_media2.entry_id, "mock_entry_id2")
self.assertEqual(kaltura_media2.download_url, "www.download/url2.com")
self.assertEqual(kaltura_media2.service_url, "https://www.kaltura.com")
mocked_kaltura_session_start.assert_called_once_with("test@test.com")
mocked_kaltura_session_start.reset_mock()
self.assertEqual(mocked_kaltura_session_end.call_count, 2)
mocked_kaltura_session_end.assert_any_call("ks_mock")
mocked_kaltura_session_end.assert_any_call("upload_ks_mock2")
mocked_kaltura_session_end.reset_mock()
mocked_upload_token_get.assert_called_once_with("ks_mock", "mocked_upload_token_id2")
mocked_upload_token_get.reset_mock()
mocked_kaltura_media_add.assert_called_once_with("ks_mock", 5)
mocked_kaltura_media_add.reset_mock()
mocked_kaltura_media_add_content.assert_called_once_with("ks_mock", "mock_entry_id2", "mocked_upload_token_id2")
mocked_kaltura_media_add_content.reset_mock()
# use global unique identifer (user has global unique identifer)
self.fixtures.instructor.global_unique_identifier = "1234567890@test.com"
url = '/api/attachment/kaltura/mocked_upload_token_id3'
mocked_upload_token_get.return_value = {
"id": "mocked_upload_token_id3",
"fileName": "uploaded_audio_file3.mp3"
}
mocked_kaltura_media_add.return_value = {
"id": "mock_entry_id3"
}
mocked_kaltura_media_add_content.return_value = {
"id": "mock_entry_id3",
"downloadUrl": "www.download/url3.com"
}
rv = self.client.post(url)
self.assert200(rv)
self.assertEqual(rv.json['file']['id'], kaltura_media3.files.all()[0].uuid)
self.assertEqual(kaltura_media3.file_name, "uploaded_audio_file3.mp3")
self.assertEqual(kaltura_media3.entry_id, "mock_entry_id3")
self.assertEqual(kaltura_media3.download_url, "www.download/url3.com")
self.assertEqual(kaltura_media3.service_url, "https://www.kaltura.com")
mocked_kaltura_session_start.assert_called_once_with("1234567890@test.com")
mocked_kaltura_session_start.reset_mock()
self.assertEqual(mocked_kaltura_session_end.call_count, 2)
mocked_kaltura_session_end.assert_any_call("ks_mock")
mocked_kaltura_session_end.assert_any_call("upload_ks_mock3")
mocked_kaltura_session_end.reset_mock()
mocked_upload_token_get.assert_called_once_with("ks_mock", "mocked_upload_token_id3")
mocked_upload_token_get.reset_mock()
mocked_kaltura_media_add.assert_called_once_with("ks_mock", 5)
mocked_kaltura_media_add.reset_mock()
mocked_kaltura_media_add_content.assert_called_once_with("ks_mock", "mock_entry_id3", "mocked_upload_token_id3")
mocked_kaltura_media_add_content.reset_mock()
current_app.config['KALTURA_USE_GLOBAL_UNIQUE_IDENTIFIER'] = False
|
from urllib.parse import urljoin
from django.conf import settings
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
from . import const
from notifications.notifications import UserMessage
from common.utils import get_logger
from .models import Ticket
logger = get_logger(__file__)
class BaseTicketMessage(UserMessage):
title: ''
ticket: Ticket
content_title: str
@property
def ticket_detail_url(self):
tp = self.ticket.type
return urljoin(settings.SITE_URL, const.TICKET_DETAIL_URL.format(id=str(self.ticket.id)))
@property
def content_title(self):
raise NotImplementedError
@property
def subject(self):
raise NotImplementedError
def get_html_msg(self) -> dict:
context = dict(
title=self.content_title,
ticket_detail_url=self.ticket_detail_url,
body=self.ticket.body.replace('\n', '<br/>'),
)
message = render_to_string('tickets/_msg_ticket.html', context)
return {
'subject': self.subject,
'message': message
}
@classmethod
def gen_test_msg(cls):
return None
class TicketAppliedToAssignee(BaseTicketMessage):
def __init__(self, user, ticket):
self.ticket = ticket
super().__init__(user)
@property
def content_title(self):
return _('Your has a new ticket, applicant - {}').format(
str(self.ticket.applicant_display)
)
@property
def subject(self):
title = _('New Ticket - {} ({})').format(
self.ticket.title, self.ticket.get_type_display()
)
return title
@classmethod
def gen_test_msg(cls):
from .models import Ticket
from users.models import User
ticket = Ticket.objects.first()
user = User.objects.first()
return cls(user, ticket)
class TicketProcessedToApplicant(BaseTicketMessage):
def __init__(self, user, ticket, processor):
self.ticket = ticket
self.processor = processor
super().__init__(user)
@property
def content_title(self):
return _('Your ticket has been processed, processor - {}').format(str(self.processor))
@property
def subject(self):
title = _('Ticket has processed - {} ({})').format(
self.ticket.title, self.ticket.get_type_display()
)
return title
@classmethod
def gen_test_msg(cls):
from .models import Ticket
from users.models import User
ticket = Ticket.objects.first()
user = User.objects.first()
processor = User.objects.last()
return cls(user, ticket, processor)
|
"""
Django settings for day12bbs project.
Generated by 'django-admin startproject' using Django 1.10.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = 'pm&9tjq@pv46t*f87mh)v0tyjbx+7@^@po))x^93zl^h$3gft3'
DEBUG = True
ALLOWED_HOSTS = []
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'day12bbs.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'day12bbs.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
|
" Ninja Web Util "
__version__ = ' 0.8 '
__license__ = ' GPL '
__author__ = ' juancarlospaco '
__email__ = ' juancarlospaco@ubuntu.com '
__url__ = ''
__date__ = ' 15/08/2013 '
__prj__ = ' webutil '
__docformat__ = 'html'
__source__ = ''
__full_licence__ = ''
from os import path
from sip import setapi
try:
from urllib.request import urlopen
except ImportError:
from urllib import urlopen # lint:ok
from PyQt4.QtGui import (QLabel, QCompleter, QDirModel, QPushButton, QWidget,
QFileDialog, QDockWidget, QVBoxLayout, QCursor, QLineEdit, QIcon, QGroupBox,
QCheckBox, QGraphicsDropShadowEffect, QGraphicsBlurEffect, QColor, QComboBox,
QApplication, QMessageBox, QScrollArea, QProgressBar)
from PyQt4.QtCore import Qt, QDir
try:
from PyKDE4.kdeui import KTextEdit as QPlainTextEdit
except ImportError:
from PyQt4.QtGui import QPlainTextEdit # lint:ok
from ninja_ide.gui.explorer.explorer_container import ExplorerContainer
from ninja_ide.core import plugin
from css_minifica import *
from html_minifica import *
from js_minifica import *
(setapi(a, 2) for a in ("QDate", "QDateTime", "QString", "QTime", "QUrl",
"QTextStream", "QVariant"))
HELPMSG = '''
<h3>Ninja Web Util</h3>
This is an HTML5/CSS3/JS Optimizer Non-Obfuscating Compressor tool for Ninja.
<ul>
<li>Average compress better than YUI Compressor
<li>The only tool to remove optional HTML tags
<li>The only tool to compress HTML tags
<li>The only tool to compress Percentage/Pixel CSS values
<li>Does Not Obfuscate JS (its a feature or a bug, you decide)
</ul>
<br><br>
''' + ''.join((__doc__, __version__, __license__, 'by', __author__, __email__))
SAMPLE_TEXT = '''
/* -----------------------------------------------------------------------------
----------------------------------------------------------------------------- */
.chun.li {
color: rgb(255, 255, 255);
width: 100%;
height: 1000px;
font-weight: normal;
backgroud: url("example.com/img.gif");
color: #00ff00;
line-height: 0.5;
border: 0px solid yellow;
} ;;
empty.selector.will.be.removed {}
/*--------------------------------------------------------------------------- */
'''
class Main(plugin.Plugin):
" Main Class "
def initialize(self, *args, **kwargs):
" Init Main Class "
ec = ExplorerContainer()
super(Main, self).initialize(*args, **kwargs)
self.editor_s = self.locator.get_service('editor')
# directory auto completer
self.completer = QCompleter(self)
self.dirs = QDirModel(self)
self.dirs.setFilter(QDir.AllEntries | QDir.NoDotAndDotDot)
self.completer.setModel(self.dirs)
self.completer.setCaseSensitivity(Qt.CaseInsensitive)
self.completer.setCompletionMode(QCompleter.PopupCompletion)
self.group0 = QGroupBox()
self.group0.setTitle(' Source ')
self.source = QComboBox()
self.source.addItems(['Clipboard', 'Local File', 'Remote URL', 'Ninja'])
self.source.currentIndexChanged.connect(self.on_source_changed)
self.infile = QLineEdit(path.expanduser("~"))
self.infile.setPlaceholderText(' /full/path/to/file.html ')
self.infile.setCompleter(self.completer)
self.open = QPushButton(QIcon.fromTheme("folder-open"), 'Open')
self.open.setCursor(QCursor(Qt.PointingHandCursor))
self.open.clicked.connect(lambda: self.infile.setText(str(
QFileDialog.getOpenFileName(self.dock, "Open a File to read from",
path.expanduser("~"), ';;'.join(['{}(*.{})'.format(e.upper(), e)
for e in ['css', 'html', 'js', 'txt', '*']])))))
self.inurl = QLineEdit('http://www.')
self.inurl.setPlaceholderText('http://www.full/url/to/remote/file.html')
self.output = QPlainTextEdit(SAMPLE_TEXT)
vboxg0 = QVBoxLayout(self.group0)
for each_widget in (self.source, self.infile, self.open, self.inurl,
self.output, ):
vboxg0.addWidget(each_widget)
[a.hide() for a in iter((self.infile, self.open, self.inurl))]
self.group1 = QGroupBox()
self.group1.setTitle(' CSS3 ')
self.group1.setCheckable(True)
self.group1.setGraphicsEffect(QGraphicsBlurEffect(self))
self.group1.graphicsEffect().setEnabled(False)
self.group1.toggled.connect(self.toggle_css_group)
self.ckcss1 = QCheckBox('Remove unnecessary Comments')
self.ckcss2 = QCheckBox('Remove unnecessary Whitespace characters')
self.ckcss3 = QCheckBox('Remove unnecessary Semicolons')
self.ckcss4 = QCheckBox('Remove unnecessary Empty rules')
self.ckcss5 = QCheckBox('Condense and Convert Colors from RGB to HEX')
self.ckcss6 = QCheckBox('Condense all Zero units')
self.ckcss7 = QCheckBox('Condense Multidimensional Zero units')
self.ckcss8 = QCheckBox('Condense Floating point numbers')
self.ckcss9 = QCheckBox('Condense HEX Colors')
self.ckcss10 = QCheckBox('Condense multiple adjacent Whitespace chars')
self.ckcss11 = QCheckBox('Condense multiple adjacent semicolon chars')
self.ckcss12 = QCheckBox('Wrap the lines of the to 80 character length')
self.ckcss13 = QCheckBox('Condense Font Weight values')
self.ckcss14 = QCheckBox('Condense the 17 Standard Named Colors values')
self.ckcss15 = QCheckBox('Condense the 124 Extra Named Colors values')
self.ckcss16 = QCheckBox('Condense all Percentages values when posible')
self.ckcss17 = QCheckBox('Condense all Pixels values when posible')
self.ckcss18 = QCheckBox('Remove unnecessary quotes from url()')
self.ckcss19 = QCheckBox('Add standard Encoding Declaration if missing')
vboxg1 = QVBoxLayout(self.group1)
for each_widget in (self.ckcss1, self.ckcss2, self.ckcss3, self.ckcss4,
self.ckcss5, self.ckcss6, self.ckcss7, self.ckcss8, self.ckcss9,
self.ckcss10, self.ckcss11, self.ckcss12, self.ckcss13,
self.ckcss14, self.ckcss15, self.ckcss16, self.ckcss17,
self.ckcss18, self.ckcss19):
vboxg1.addWidget(each_widget)
each_widget.setToolTip(each_widget.text())
self.group2 = QGroupBox()
self.group2.setTitle(' HTML5 ')
self.group2.setCheckable(True)
self.group2.setGraphicsEffect(QGraphicsBlurEffect(self))
self.group2.graphicsEffect().setEnabled(False)
self.group2.toggled.connect(self.toggle_html_group)
self.ckhtml0 = QCheckBox('Condense Style and Script HTML Tags')
self.ckhtml1 = QCheckBox('Condense DOCTYPE to new HTML5 Tags')
self.ckhtml2 = QCheckBox('Condense Href and Src to protocol agnostic')
self.ckhtml4 = QCheckBox('Remove unnecessary Tags but keep HTML valid')
self.help1 = QLabel('''<a href=
"https://developers.google.com/speed/articles/optimizing-html">
<small><center>Help about Unneeded Unnecessary HTML tags ?</a>''')
self.help1.setTextInteractionFlags(Qt.LinksAccessibleByMouse)
self.help1.setOpenExternalLinks(True)
vboxg2 = QVBoxLayout(self.group2)
for each_widget in (self.ckhtml0, self.ckhtml1, self.ckhtml2,
self.ckhtml4, self.help1, ):
vboxg2.addWidget(each_widget)
each_widget.setToolTip(each_widget.text())
self.group3 = QGroupBox()
self.group3.setTitle(' Javascript ')
self.ckjs0 = QCheckBox('Condense and Compress Javascript')
self.ckjs1 = QCheckBox('Condense $(document).ready(function(){ });')
vboxg2 = QVBoxLayout(self.group3)
for each_widget in (self.ckjs0, self.ckjs1):
vboxg2.addWidget(each_widget)
each_widget.setToolTip(each_widget.text())
self.group4 = QGroupBox()
self.group4.setTitle(' General ')
self.chckbx1 = QCheckBox('Lower case ALL the text')
self.chckbx2 = QCheckBox('Remove Spaces, Tabs, New Lines, Empty Lines')
self.befor, self.after = QProgressBar(), QProgressBar()
self.befor.setFormat("%v Chars")
self.after.setFormat("%v Chars")
vboxg4 = QVBoxLayout(self.group4)
for each_widget in (self.chckbx1, self.chckbx2,
QLabel('<b>Before:'), self.befor, QLabel('<b>After:'), self.after):
vboxg4.addWidget(each_widget)
each_widget.setToolTip(each_widget.text())
[a.setChecked(True) for a in iter((self.ckcss1, self.ckcss2,
self.ckcss3, self.ckcss4, self.ckcss5, self.ckcss6, self.ckcss7,
self.ckcss8, self.ckcss9, self.ckcss10, self.ckcss11, self.ckcss12,
self.ckcss13, self.ckcss14, self.ckcss15, self.ckcss16,
self.ckcss17, self.ckcss18, self.ckcss19, self.ckjs1, self.ckhtml0,
self.ckhtml1, self.ckhtml2, self.ckhtml4, self.chckbx1,
self.chckbx2))]
self.button = QPushButton(QIcon.fromTheme("face-cool"), 'Process Text')
self.button.setCursor(QCursor(Qt.PointingHandCursor))
self.button.setMinimumSize(100, 50)
self.button.clicked.connect(self.run)
def must_glow(widget_list):
' apply an glow effect to the widget '
for glow, each_widget in enumerate(widget_list):
try:
if each_widget.graphicsEffect() is None:
glow = QGraphicsDropShadowEffect(self)
glow.setOffset(0)
glow.setBlurRadius(99)
glow.setColor(QColor(99, 255, 255))
each_widget.setGraphicsEffect(glow)
glow.setEnabled(True)
except:
pass
must_glow((self.button, ))
class TransientWidget(QWidget):
' persistant widget thingy '
def __init__(self, widget_list):
' init sub class '
super(TransientWidget, self).__init__()
vbox = QVBoxLayout(self)
for each_widget in widget_list:
vbox.addWidget(each_widget)
tw = TransientWidget((QLabel('<b>HTML5/CSS3/JS Optimizer Compressor'),
self.group0, self.group1, self.group2, self.group3, self.group4,
self.button, ))
self.scrollable = QScrollArea()
self.scrollable.setWidgetResizable(True)
self.scrollable.setWidget(tw)
self.dock = QDockWidget()
self.dock.setWindowTitle(__doc__)
self.dock.setStyleSheet('QDockWidget::title{text-align: center;}')
self.dock.setMinimumWidth(350)
self.dock.setWidget(self.scrollable)
ec.addTab(self.dock, "Web")
QPushButton(QIcon.fromTheme("help-about"), 'About', self.dock
).clicked.connect(lambda: QMessageBox.information(self.dock, __doc__,
HELPMSG))
def run(self):
' run the string replacing '
if self.source.currentText() == 'Local File':
with open(path.abspath(str(self.infile.text()).strip()), 'r') as f:
txt = f.read()
elif self.source.currentText() == 'Remote URL':
txt = urlopen(str(self.inurl.text()).strip()).read()
elif self.source.currentText() == 'Clipboard':
txt = str(self.output.toPlainText()) if str(self.output.toPlainText()) is not '' else str(QApplication.clipboard().text())
else:
txt = self.editor_s.get_text()
self.output.clear()
self.befor.setMaximum(len(txt) + 10)
self.after.setMaximum(len(txt) + 10)
self.befor.setValue(len(txt))
txt = txt.lower() if self.chckbx1.isChecked() is True else txt
txt = condense_style(txt) if self.ckhtml0.isChecked() is True else txt
txt = condense_script(txt) if self.ckhtml0.isChecked() is True else txt
txt = condense_doctype(txt) if self.ckhtml1.isChecked() is True else txt
txt = condense_href_src(txt) if self.ckhtml2 is True else txt
txt = clean_unneeded_tags(txt) if self.ckhtml4.isChecked() is True else txt
txt = condense_doc_ready(txt) if self.ckjs1.isChecked() is True else txt
txt = jsmin(txt) if self.ckjs0.isChecked() is True else txt
txt = remove_comments(txt) if self.ckcss1.isChecked() is True else txt
txt = condense_whitespace(txt) if self.ckcss10.isChecked() is True else txt
txt = remove_empty_rules(txt) if self.ckcss4.isChecked() is True else txt
txt = remove_unnecessary_whitespace(txt) if self.ckcss2.isChecked() is True else txt
txt = remove_unnecessary_semicolons(txt) if self.ckcss3.isChecked() is True else txt
txt = condense_zero_units(txt) if self.ckcss6.isChecked() is True else txt
txt = condense_multidimensional_zeros(txt) if self.ckcss7.isChecked() is True else txt
txt = condense_floating_points(txt) if self.ckcss8.isChecked() is True else txt
txt = normalize_rgb_colors_to_hex(txt) if self.ckcss5.isChecked() is True else txt
txt = condense_hex_colors(txt) if self.ckcss9.isChecked() is True else txt
txt = wrap_css_lines(txt, 80) if self.ckcss12.isChecked() is True else txt
txt = condense_semicolons(txt) if self.ckcss11.isChecked() is True else txt
txt = condense_font_weight(txt) if self.ckcss13.isChecked() is True else txt
txt = condense_std_named_colors(txt) if self.ckcss14.isChecked() is True else txt
# txt = condense_xtra_named_colors(txt) if self.ckcss14.isChecked() is True else txt # FIXME
txt = condense_percentage_values(txt) if self.ckcss16.isChecked() is True else txt
txt = condense_pixel_values(txt) if self.ckcss17.isChecked() is True else txt
txt = remove_url_quotes(txt) if self.ckcss18.isChecked() is True else txt
txt = add_encoding(txt) if self.ckcss19.isChecked() is True else txt
txt = " ".join(txt.strip().split()) if self.chckbx2.isChecked() is True else txt
self.after.setValue(len(txt))
self.output.setPlainText(txt)
self.output.show()
self.output.setFocus()
self.output.selectAll()
def on_source_changed(self):
' do something when the desired source has changed '
if self.source.currentText() == 'Local File':
self.open.show()
self.infile.show()
self.inurl.hide()
self.output.hide()
elif self.source.currentText() == 'Remote URL':
self.inurl.show()
self.open.hide()
self.infile.hide()
self.output.hide()
elif self.source.currentText() == 'Clipboard':
self.output.show()
self.open.hide()
self.infile.hide()
self.inurl.hide()
self.output.setText(QApplication.clipboard().text())
else:
self.output.show()
self.open.hide()
self.infile.hide()
self.inurl.hide()
self.output.setText(self.editor_s.get_text())
def toggle_css_group(self):
' toggle on or off the css checkboxes '
if self.group1.isChecked() is True:
[a.setChecked(True) for a in iter((self.ckcss1, self.ckcss2,
self.ckcss3, self.ckcss4, self.ckcss5, self.ckcss6, self.ckcss7,
self.ckcss8, self.ckcss9, self.ckcss10, self.ckcss11, self.ckcss12,
self.ckcss13, self.ckcss14, self.ckcss15, self.ckcss16,
self.ckcss17, self.ckcss18, self.ckcss19))]
self.group1.graphicsEffect().setEnabled(False)
else:
[a.setChecked(False) for a in iter((self.ckcss1, self.ckcss2,
self.ckcss3, self.ckcss4, self.ckcss5, self.ckcss6, self.ckcss7,
self.ckcss8, self.ckcss9, self.ckcss10, self.ckcss11, self.ckcss12,
self.ckcss13, self.ckcss14, self.ckcss15, self.ckcss16,
self.ckcss17, self.ckcss18, self.ckcss19))]
self.group1.graphicsEffect().setEnabled(True)
def toggle_html_group(self):
' toggle on or off the css checkboxes '
if self.group2.isChecked() is True:
[a.setChecked(True) for a in iter((self.ckhtml0, self.ckhtml1,
self.ckhtml2, self.ckhtml4))]
self.group2.graphicsEffect().setEnabled(False)
else:
[a.setChecked(False) for a in iter((self.ckhtml0, self.ckhtml1,
self.ckhtml2, self.ckhtml4))]
self.group2.graphicsEffect().setEnabled(True)
if __name__ == "__main__":
print(__doc__)
|
import unittest
"""883. Projection Area of 3D Shapes
https://leetcode.com/problems/projection-area-of-3d-shapes/description/
On a `N * N` grid, we place some `1 * 1 * 1 `cubes that are axis-aligned with
the x, y, and z axes.
Each value `v = grid[i][j]` represents a tower of `v` cubes placed on top of
grid cell `(i, j)`.
Now we view the _projection_ of these cubes onto the xy, yz, and zx planes.
A projection is like a shadow, that maps our 3 dimensional figure to a 2
dimensional plane.
Here, we are viewing the "shadow" when looking at the cubes from the top, the
front, and the side.
Return the total area of all three projections.
**Example 1:**
**Input:** [[2]]
**Output:** 5
**Example 2:**
**Input:** [[1,2],[3,4]]
**Output:** 17
**Explanation:**
Here are the three projections ( "shadows") of the shape made with each axis-aligned plane.

**Example 3:**
**Input:** [[1,0],[0,2]]
**Output:** 8
**Example 4:**
**Input:** [[1,1,1],[1,0,1],[1,1,1]]
**Output:** 14
**Example 5:**
**Input:** [[2,2,2],[2,1,2],[2,2,2]]
**Output:** 21
**Note:**
* `1 <= grid.length = grid[0].length <= 50`
* `0 <= grid[i][j] <= 50`
Similar Questions:
"""
class Solution(object):
def projectionArea(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
xy = sum(len([j for j in i if j > 0]) for i in grid)
xz = sum(max(i) for i in grid)
yz = sum(max(i) for i in zip(*grid))
return xy + xz + yz
class T(unittest.TestCase):
def test(self):
s = Solution()
self.assertEqual(s.projectionArea([[2]]), 5)
self.assertEqual(s.projectionArea([[1, 2], [3, 4]]), 17)
self.assertEqual(s.projectionArea([[1, 0], [0, 2]]), 8)
self.assertEqual(s.projectionArea([[1, 1, 1], [1, 0, 1], [1, 1, 1]]), 14)
self.assertEqual(s.projectionArea([[2, 2, 2], [2, 1, 2], [2, 2, 2]]), 21)
if __name__ == "__main__":
unittest.main()
|
happy=False
food='YUMMY'
def smile(kind): #takes in a string
'''documentation goes here'''
''' draws a smile and rates happyness of the user'''
#initialize variables here!
happyness_factor=0 #0-10 10 is max :)
global happy
#code here
if kind=="happy":
print (':)')
happyness_factor=10
happy=True
elif kind=='sad':
print(':(')
happyness_factor=3
happy=False
elif kind=='crying':
print(': (')
happyness_factor=0
happy=False
#smile(kind) # function recursion. Can be helpful if function is to be called multiple times. In this case it would lead to infinite loop
return happyness_factor #return value is an integer as no values returned are floats or another type!
def hap():
#functions can be also called in functions. Function being called in function has to be already initialized
'''just calls smile cuz that function is that good'''
j=smile(food) # as function returns something it has to be stored. This will not change anything as food is an exception.
'''be aware that if you would call function now with a non exceptionit could change global variable if kind would be different
from user input. This would
matter only if this function is called before we are done working with usr input'''
hap()
usr=raw_input('are you happy,sad or crying? ')
print ('Your happyness factor is %i') % smile(usr) #function call can be substituted by returns_integer
hap() #wont alter our answer to usr
j=smile('food') # you can also give just string
|
from .colorentry import ColorPropertyEditor
from .tkvarentry import TkVarPropertyEditor
from .imageentry import ImagePropertyEditor
from .propertyeditor import EntryPropertyEditor, SpinboxPropertyEditor
from .propertyeditor import TextPropertyEditor, ChoicePropertyEditor
from .propertyeditor import CheckbuttonPropertyEditor
from .whentry import WHPropertyEditor
from .dynamicpropeditor import DynamicPropertyEditor
from .fontentry import FontPropertyEditor
from .stickyentry import StickyPropertyEditor
|
main():
none
if __name__ == "__main__":
main();
|
import its.device
import its.caps
import its.objects
import its.image
import os.path
import pylab
import matplotlib
import matplotlib.pyplot
def main():
"""Verify that the DNG raw model parameters are correct.
"""
NAME = os.path.basename(__file__).split(".")[0]
NUM_STEPS = 4
# Pass if the difference between expected and computed variances is small,
# defined as being within an absolute variance delta of 0.0005, or within
# 20% of the expected variance, whichever is larger; this is to allow the
# test to pass in the presence of some randomness (since this test is
# measuring noise of a small patch) and some imperfect scene conditions
# (since ITS doesn't require a perfectly uniformly lit scene).
DIFF_THRESH = 0.0005
FRAC_THRESH = 0.2
with its.device.ItsSession() as cam:
props = cam.get_camera_properties()
its.caps.skip_unless(its.caps.raw(props) and
its.caps.raw16(props) and
its.caps.manual_sensor(props) and
its.caps.read_3a(props) and
its.caps.per_frame_control(props))
white_level = float(props['android.sensor.info.whiteLevel'])
cfa_idxs = its.image.get_canonical_cfa_order(props)
# Expose for the scene with min sensitivity
sens_min, sens_max = props['android.sensor.info.sensitivityRange']
sens_step = (sens_max - sens_min) / NUM_STEPS
s_ae,e_ae,_,_,f_dist = cam.do_3a(get_results=True)
s_e_prod = s_ae * e_ae
sensitivities = range(sens_min, sens_max, sens_step)
var_expected = [[],[],[],[]]
var_measured = [[],[],[],[]]
for sens in sensitivities:
# Capture a raw frame with the desired sensitivity.
exp = int(s_e_prod / float(sens))
req = its.objects.manual_capture_request(sens, exp, f_dist)
cap = cam.do_capture(req, cam.CAP_RAW)
# Test each raw color channel (R, GR, GB, B):
noise_profile = cap["metadata"]["android.sensor.noiseProfile"]
assert((len(noise_profile)) == 4)
for ch in range(4):
# Get the noise model parameters for this channel of this shot.
s,o = noise_profile[cfa_idxs[ch]]
# Get a center tile of the raw channel, and compute the mean.
# Use a very small patch to ensure gross uniformity (i.e. so
# non-uniform lighting or vignetting doesn't affect the variance
# calculation).
plane = its.image.convert_capture_to_planes(cap, props)[ch]
black_level = its.image.get_black_level(
ch, props, cap["metadata"])
plane = (plane * white_level - black_level) / (
white_level - black_level)
tile = its.image.get_image_patch(plane, 0.49,0.49,0.02,0.02)
mean = tile.mean()
# Calculate the expected variance based on the model, and the
# measured variance from the tile.
var_measured[ch].append(
its.image.compute_image_variances(tile)[0])
var_expected[ch].append(s * mean + o)
for ch in range(4):
pylab.plot(sensitivities, var_expected[ch], "rgkb"[ch],
label=["R","GR","GB","B"][ch]+" expected")
pylab.plot(sensitivities, var_measured[ch], "rgkb"[ch]+"--",
label=["R", "GR", "GB", "B"][ch]+" measured")
pylab.xlabel("Sensitivity")
pylab.ylabel("Center patch variance")
pylab.legend(loc=2)
matplotlib.pyplot.savefig("%s_plot.png" % (NAME))
# Pass/fail check.
for ch in range(4):
diffs = [var_measured[ch][i] - var_expected[ch][i]
for i in range(NUM_STEPS)]
print "Diffs (%s):"%(["R","GR","GB","B"][ch]), diffs
for i,diff in enumerate(diffs):
thresh = max(DIFF_THRESH, FRAC_THRESH * var_expected[ch][i])
assert(diff <= thresh)
if __name__ == '__main__':
main()
|
import os
import stat
import ent
import grp
import pwd
import config
import signal
import subprocess
import time
import ldap
import pytest
import ds_openldap
import ldap_ent
import sssd_ldb
import sssd_id
from util import unindent
LDAP_BASE_DN = "dc=example,dc=com"
SSSD_DOMAIN = "LDAP"
SCHEMA_RFC2307 = "rfc2307"
SCHEMA_RFC2307_BIS = "rfc2307bis"
TS_ATTRLIST = ("dataExpireTimestamp", "originalModifyTimestamp")
@pytest.fixture(scope="module")
def ds_inst(request):
"""LDAP server instance fixture"""
ds_inst = ds_openldap.DSOpenLDAP(
config.PREFIX, 10389, LDAP_BASE_DN,
"cn=admin", "Secret123")
try:
ds_inst.setup()
except:
ds_inst.teardown()
raise
request.addfinalizer(lambda: ds_inst.teardown())
return ds_inst
@pytest.fixture(scope="module")
def ldap_conn(request, ds_inst):
"""LDAP server connection fixture"""
ldap_conn = ds_inst.bind()
ldap_conn.ds_inst = ds_inst
request.addfinalizer(lambda: ldap_conn.unbind_s())
return ldap_conn
def create_ldap_fixture(request, ldap_conn, ent_list):
"""Add LDAP entries and add teardown for removing them"""
for entry in ent_list:
ldap_conn.add_s(entry[0], entry[1])
def teardown():
for entry in ent_list:
try:
ldap_conn.delete_s(entry[0])
except ldap.NO_SUCH_OBJECT:
# if the test already removed an object, it's fine
# to not care in the teardown
pass
request.addfinalizer(teardown)
def create_conf_fixture(request, contents):
"""Generate sssd.conf and add teardown for removing it"""
conf = open(config.CONF_PATH, "w")
conf.write(contents)
conf.close()
os.chmod(config.CONF_PATH, stat.S_IRUSR | stat.S_IWUSR)
request.addfinalizer(lambda: os.unlink(config.CONF_PATH))
def stop_sssd():
pid_file = open(config.PIDFILE_PATH, "r")
pid = int(pid_file.read())
os.kill(pid, signal.SIGTERM)
while True:
try:
os.kill(pid, signal.SIGCONT)
except:
break
time.sleep(1)
def create_sssd_fixture(request):
"""Start sssd and add teardown for stopping it and removing state"""
if subprocess.call(["sssd", "-D", "-f"]) != 0:
raise Exception("sssd start failed")
def teardown():
try:
stop_sssd()
except:
pass
for path in os.listdir(config.DB_PATH):
os.unlink(config.DB_PATH + "/" + path)
for path in os.listdir(config.MCACHE_PATH):
os.unlink(config.MCACHE_PATH + "/" + path)
request.addfinalizer(teardown)
def load_data_to_ldap(request, ldap_conn, schema):
ent_list = ldap_ent.List(ldap_conn.ds_inst.base_dn)
ent_list.add_user("user1", 1001, 2001)
ent_list.add_user("user11", 1011, 2001)
ent_list.add_user("user21", 1021, 2001)
if schema == SCHEMA_RFC2307_BIS:
ent_list.add_group_bis("group1", 2001, ("user1", "user11", "user21"))
elif schema == SCHEMA_RFC2307:
ent_list.add_group("group1", 2001, ("user1", "user11", "user21"))
create_ldap_fixture(request, ldap_conn, ent_list)
def load_2307bis_data_to_ldap(request, ldap_conn):
return load_data_to_ldap(request, ldap_conn, SCHEMA_RFC2307_BIS)
def load_2307_data_to_ldap(request, ldap_conn):
return load_data_to_ldap(request, ldap_conn, SCHEMA_RFC2307)
@pytest.fixture
def setup_rfc2307bis(request, ldap_conn):
load_2307bis_data_to_ldap(request, ldap_conn)
conf = unindent("""\
[sssd]
domains = LDAP
services = nss
[nss]
memcache_timeout = 1
[domain/LDAP]
ldap_schema = rfc2307bis
id_provider = ldap
auth_provider = ldap
sudo_provider = ldap
ldap_group_object_class = groupOfNames
ldap_uri = {ldap_conn.ds_inst.ldap_url}
ldap_search_base = {ldap_conn.ds_inst.base_dn}
""").format(**locals())
create_conf_fixture(request, conf)
create_sssd_fixture(request)
return None
@pytest.fixture
def setup_rfc2307(request, ldap_conn):
load_2307_data_to_ldap(request, ldap_conn)
conf = unindent("""\
[sssd]
domains = LDAP
services = nss
[nss]
memcache_timeout = 1
[domain/LDAP]
ldap_schema = rfc2307
id_provider = ldap
auth_provider = ldap
sudo_provider = ldap
ldap_uri = {ldap_conn.ds_inst.ldap_url}
ldap_search_base = {ldap_conn.ds_inst.base_dn}
""").format(**locals())
create_conf_fixture(request, conf)
create_sssd_fixture(request)
return None
@pytest.fixture
def ldb_examine(request):
ldb_conn = sssd_ldb.SssdLdb('LDAP')
return ldb_conn
def invalidate_group(ldb_conn, name):
ldb_conn.invalidate_entry(name, sssd_ldb.TsCacheEntry.group, SSSD_DOMAIN)
def invalidate_user(ldb_conn, name):
ldb_conn.invalidate_entry(name, sssd_ldb.TsCacheEntry.user, SSSD_DOMAIN)
def get_attrs(ldb_conn, type, name, domain, attr_list):
sysdb_attrs = dict()
ts_attrs = dict()
for attr in attr_list:
sysdb_attrs[attr] = ldb_conn.get_entry_attr(
sssd_ldb.CacheType.sysdb,
type, name, domain, attr)
ts_attrs[attr] = ldb_conn.get_entry_attr(
sssd_ldb.CacheType.timestamps,
type, name, domain, attr)
return (sysdb_attrs, ts_attrs)
def get_group_attrs(ldb_conn, name, domain, attr_list):
return get_attrs(ldb_conn, sssd_ldb.TsCacheEntry.group,
name, domain, attr_list)
def get_user_attrs(ldb_conn, name, domain, attr_list):
return get_attrs(ldb_conn, sssd_ldb.TsCacheEntry.user,
name, domain, attr_list)
def assert_same_attrval(adict1, adict2, attr_name):
assert adict1.get(attr_name) is not None and \
adict1.get(attr_name) == adict2.get(attr_name)
def assert_diff_attrval(adict1, adict2, attr_name):
assert adict1.get(attr_name) is not None and \
adict1.get(attr_name) != adict2.get(attr_name)
def prime_cache_group(ldb_conn, name, members):
ent.assert_group_by_name(
name,
dict(mem=ent.contains_only(*members)))
sysdb_attrs, ts_attrs = get_group_attrs(ldb_conn, name,
SSSD_DOMAIN, TS_ATTRLIST)
assert_same_attrval(sysdb_attrs, ts_attrs, "dataExpireTimestamp")
assert_same_attrval(sysdb_attrs, ts_attrs, "originalModifyTimestamp")
# just to force different stamps and make sure memcache is gone
time.sleep(1)
invalidate_group(ldb_conn, name)
return sysdb_attrs, ts_attrs
def prime_cache_user(ldb_conn, name, primary_gid):
# calling initgroups would add the initgExpire timestamp attribute and
# make sure that sss_cache doesn't add it with a value of 1,
# triggering a sysdb update
(res, errno, gids) = sssd_id.call_sssd_initgroups(name, primary_gid)
assert res == sssd_id.NssReturnCode.SUCCESS
sysdb_attrs, ts_attrs = get_user_attrs(ldb_conn, name,
SSSD_DOMAIN, TS_ATTRLIST)
assert_same_attrval(sysdb_attrs, ts_attrs, "dataExpireTimestamp")
assert_same_attrval(sysdb_attrs, ts_attrs, "originalModifyTimestamp")
# just to force different stamps and make sure memcache is gone
time.sleep(1)
invalidate_user(ldb_conn, name)
return sysdb_attrs, ts_attrs
def test_group_2307bis_update_same_modstamp(ldap_conn,
ldb_examine,
setup_rfc2307bis):
"""
Test that a group update with the same modifyTimestamp does not trigger
sysdb cache update
"""
ldb_conn = ldb_examine
old_sysdb_attrs, old_ts_attrs = prime_cache_group(
ldb_conn, "group1",
("user1", "user11", "user21"))
ent.assert_group_by_name(
"group1",
dict(mem=ent.contains_only("user1", "user11", "user21")))
sysdb_attrs, ts_attrs = get_group_attrs(ldb_conn, "group1",
SSSD_DOMAIN, TS_ATTRLIST)
assert_same_attrval(sysdb_attrs, old_sysdb_attrs, "dataExpireTimestamp")
assert_same_attrval(sysdb_attrs, old_sysdb_attrs,
"originalModifyTimestamp")
assert_diff_attrval(ts_attrs, old_ts_attrs, "dataExpireTimestamp")
assert_same_attrval(ts_attrs, old_ts_attrs, "originalModifyTimestamp")
def test_group_2307bis_update_same_attrs(ldap_conn,
ldb_examine,
setup_rfc2307bis):
"""
Test that a group update with a different modifyTimestamp but the same
attrs does not trigger sysdb cache update
"""
ldb_conn = ldb_examine
old_sysdb_attrs, old_ts_attrs = prime_cache_group(
ldb_conn, "group1",
("user1", "user11", "user21"))
# modify an argument we don't save to the cache. This will bump the
# modifyTimestamp attribute, but the attributes themselves will be the same
# from sssd's point of view
ldap_conn.modify_s("cn=group1,ou=Groups," + ldap_conn.ds_inst.base_dn,
[(ldap.MOD_ADD, "description", b"group one")])
# wait for slapd to change its database
time.sleep(1)
ent.assert_group_by_name(
"group1",
dict(mem=ent.contains_only("user1", "user11", "user21")))
sysdb_attrs, ts_attrs = get_group_attrs(ldb_conn, "group1",
SSSD_DOMAIN, TS_ATTRLIST)
assert_same_attrval(sysdb_attrs, old_sysdb_attrs, "dataExpireTimestamp")
assert_same_attrval(sysdb_attrs, old_sysdb_attrs,
"originalModifyTimestamp")
assert_diff_attrval(ts_attrs, old_ts_attrs, "dataExpireTimestamp")
assert_diff_attrval(ts_attrs, old_ts_attrs, "originalModifyTimestamp")
def test_group_2307bis_update_diff_attrs(ldap_conn,
ldb_examine,
setup_rfc2307bis):
"""
Test that a group update with different attribute triggers cache update
"""
ldb_conn = ldb_examine
old_sysdb_attrs, old_ts_attrs = prime_cache_group(
ldb_conn, "group1",
("user1", "user11", "user21"))
user_dn = "uid=user1,ou=Users," + ldap_conn.ds_inst.base_dn
ldap_conn.modify_s("cn=group1,ou=Groups," + ldap_conn.ds_inst.base_dn,
[(ldap.MOD_DELETE, "member", user_dn.encode('utf-8'))])
# wait for slapd to change its database
time.sleep(1)
ent.assert_group_by_name(
"group1",
dict(mem=ent.contains_only("user11", "user21")))
sysdb_attrs, ts_attrs = get_group_attrs(ldb_conn, "group1",
SSSD_DOMAIN, TS_ATTRLIST)
assert_diff_attrval(sysdb_attrs, old_sysdb_attrs, "dataExpireTimestamp")
assert_diff_attrval(sysdb_attrs, old_sysdb_attrs,
"originalModifyTimestamp")
assert_diff_attrval(ts_attrs, old_ts_attrs, "dataExpireTimestamp")
assert_diff_attrval(ts_attrs, old_ts_attrs, "originalModifyTimestamp")
def test_group_2307bis_delete_group(ldap_conn,
ldb_examine,
setup_rfc2307bis):
"""
Test that deleting a group removes it from both caches
"""
ldb_conn = ldb_examine
old_sysdb_attrs, old_ts_attrs = prime_cache_group(
ldb_conn, "group1",
("user1", "user11", "user21"))
e = ldap_ent.group_bis(ldap_conn.ds_inst.base_dn, "group1", 2001)
ldap_conn.delete_s(e[0])
# wait for slapd to change its database
time.sleep(1)
with pytest.raises(KeyError):
grp.getgrnam("group1")
sysdb_attrs, ts_attrs = get_group_attrs(ldb_conn, "group1",
SSSD_DOMAIN, TS_ATTRLIST)
assert sysdb_attrs.get("dataExpireTimestamp") is None
assert sysdb_attrs.get("originalModifyTimestamp") is None
assert ts_attrs.get("dataExpireTimestamp") is None
assert ts_attrs.get("originalModifyTimestamp") is None
def test_group_2307_update_same_modstamp(ldap_conn,
ldb_examine,
setup_rfc2307):
"""
Test that a group update with the same modifyTimestamp does not trigger
sysdb cache update
"""
ldb_conn = ldb_examine
old_sysdb_attrs, old_ts_attrs = prime_cache_group(
ldb_conn, "group1",
("user1", "user11", "user21"))
ent.assert_group_by_name(
"group1",
dict(mem=ent.contains_only("user1", "user11", "user21")))
sysdb_attrs, ts_attrs = get_group_attrs(ldb_conn, "group1",
SSSD_DOMAIN, TS_ATTRLIST)
assert_same_attrval(sysdb_attrs, old_sysdb_attrs, "dataExpireTimestamp")
assert_same_attrval(sysdb_attrs, old_sysdb_attrs,
"originalModifyTimestamp")
assert_diff_attrval(ts_attrs, old_ts_attrs, "dataExpireTimestamp")
assert_same_attrval(ts_attrs, old_ts_attrs, "originalModifyTimestamp")
def test_group_2307_update_same_attrs(ldap_conn,
ldb_examine,
setup_rfc2307):
"""
Test that a group update with a different modifyTimestamp but the same
attrs does not trigger sysdb cache update
"""
ldb_conn = ldb_examine
old_sysdb_attrs, old_ts_attrs = prime_cache_group(
ldb_conn, "group1",
("user1", "user11", "user21"))
# modify an argument we don't save to the cache. This will bump the
# modifyTimestamp attribute, but the attributes themselves will be the same
# from sssd's point of view
ldap_conn.modify_s("cn=group1,ou=Groups," + ldap_conn.ds_inst.base_dn,
[(ldap.MOD_ADD, "description", b"group one")])
# wait for slapd to change its database
time.sleep(1)
ent.assert_group_by_name(
"group1",
dict(mem=ent.contains_only("user1", "user11", "user21")))
sysdb_attrs, ts_attrs = get_group_attrs(ldb_conn, "group1",
SSSD_DOMAIN, TS_ATTRLIST)
assert_same_attrval(sysdb_attrs, old_sysdb_attrs, "dataExpireTimestamp")
assert_same_attrval(sysdb_attrs, old_sysdb_attrs,
"originalModifyTimestamp")
assert_diff_attrval(ts_attrs, old_ts_attrs, "dataExpireTimestamp")
assert_diff_attrval(ts_attrs, old_ts_attrs, "originalModifyTimestamp")
def test_group_2307_update_diff_attrs(ldap_conn,
ldb_examine,
setup_rfc2307):
"""
Test that a group update with different attribute triggers cache update
"""
ldb_conn = ldb_examine
old_sysdb_attrs, old_ts_attrs = prime_cache_group(
ldb_conn, "group1",
("user1", "user11", "user21"))
ldap_conn.modify_s("cn=group1,ou=Groups," + ldap_conn.ds_inst.base_dn,
[(ldap.MOD_DELETE, "memberUid", b"user1")])
# wait for slapd to change its database
time.sleep(1)
ent.assert_group_by_name(
"group1",
dict(mem=ent.contains_only("user11", "user21")))
sysdb_attrs, ts_attrs = get_group_attrs(ldb_conn, "group1",
SSSD_DOMAIN, TS_ATTRLIST)
assert_diff_attrval(sysdb_attrs, old_sysdb_attrs, "dataExpireTimestamp")
assert_diff_attrval(sysdb_attrs, old_sysdb_attrs,
"originalModifyTimestamp")
assert_diff_attrval(ts_attrs, old_ts_attrs, "dataExpireTimestamp")
assert_diff_attrval(ts_attrs, old_ts_attrs, "originalModifyTimestamp")
def test_group_2307_delete_group(ldap_conn,
ldb_examine,
setup_rfc2307):
"""
Test that deleting a group removes it from both caches
"""
ldb_conn = ldb_examine
old_sysdb_attrs, old_ts_attrs = prime_cache_group(
ldb_conn, "group1",
("user1", "user11", "user21"))
e = ldap_ent.group_bis(ldap_conn.ds_inst.base_dn, "group1", 2001)
ldap_conn.delete_s(e[0])
# wait for slapd to change its database
time.sleep(1)
with pytest.raises(KeyError):
grp.getgrnam("group1")
sysdb_attrs, ts_attrs = get_group_attrs(ldb_conn, "group1",
SSSD_DOMAIN, TS_ATTRLIST)
assert sysdb_attrs.get("dataExpireTimestamp") is None
assert sysdb_attrs.get("originalModifyTimestamp") is None
assert ts_attrs.get("dataExpireTimestamp") is None
assert ts_attrs.get("originalModifyTimestamp") is None
def test_user_update_same_modstamp(ldap_conn,
ldb_examine,
setup_rfc2307bis):
"""
Test that a user update with the same modifyTimestamp does not trigger
sysdb cache update
"""
ldb_conn = ldb_examine
old_sysdb_attrs, old_ts_attrs = prime_cache_user(ldb_conn, "user1", 2001)
ent.assert_passwd_by_name("user1", dict(name="user1"))
sysdb_attrs, ts_attrs = get_user_attrs(ldb_conn, "user1",
SSSD_DOMAIN, TS_ATTRLIST)
assert_same_attrval(sysdb_attrs, old_sysdb_attrs, "dataExpireTimestamp")
assert_same_attrval(sysdb_attrs, old_sysdb_attrs,
"originalModifyTimestamp")
assert_diff_attrval(ts_attrs, old_ts_attrs, "dataExpireTimestamp")
assert_same_attrval(ts_attrs, old_ts_attrs, "originalModifyTimestamp")
def test_user_update_same_attrs(ldap_conn,
ldb_examine,
setup_rfc2307bis):
"""
Test that a user update with the same modifyTimestamp does not trigger
sysdb cache update
"""
ldb_conn = ldb_examine
old_sysdb_attrs, old_ts_attrs = prime_cache_user(ldb_conn, "user1", 2001)
# modify an argument we don't save to the cache. This will bump the
# modifyTimestamp attribute, but the attributes themselves will be the same
# from sssd's point of view
ldap_conn.modify_s("uid=user1,ou=Users," + ldap_conn.ds_inst.base_dn,
[(ldap.MOD_ADD, "description", b"user one")])
# wait for slapd to change its database
time.sleep(1)
ent.assert_passwd_by_name("user1", dict(name="user1"))
sysdb_attrs, ts_attrs = get_user_attrs(ldb_conn, "user1",
SSSD_DOMAIN, TS_ATTRLIST)
assert_same_attrval(sysdb_attrs, old_sysdb_attrs, "dataExpireTimestamp")
assert_same_attrval(sysdb_attrs, old_sysdb_attrs,
"originalModifyTimestamp")
assert_diff_attrval(ts_attrs, old_ts_attrs, "dataExpireTimestamp")
assert_diff_attrval(ts_attrs, old_ts_attrs, "originalModifyTimestamp")
def test_user_update_diff_attrs(ldap_conn,
ldb_examine,
setup_rfc2307bis):
"""
Test that a user update with the same modifyTimestamp does not trigger
sysdb cache update
"""
ldb_conn = ldb_examine
old_sysdb_attrs, old_ts_attrs = prime_cache_user(ldb_conn, "user1", 2001)
# modify an argument we don't save to the cache. This will bump the
# modifyTimestamp attribute, but the attributes themselves will be the same
# from sssd's point of view
ldap_conn.modify_s("uid=user1,ou=Users," + ldap_conn.ds_inst.base_dn,
[(ldap.MOD_REPLACE, "loginShell", b"/bin/zsh")])
# wait for slapd to change its database
time.sleep(1)
ent.assert_passwd_by_name("user1", dict(name="user1"))
sysdb_attrs, ts_attrs = get_user_attrs(ldb_conn, "user1",
SSSD_DOMAIN, TS_ATTRLIST)
assert_diff_attrval(sysdb_attrs, old_sysdb_attrs, "dataExpireTimestamp")
assert_diff_attrval(sysdb_attrs, old_sysdb_attrs,
"originalModifyTimestamp")
assert_diff_attrval(ts_attrs, old_ts_attrs, "dataExpireTimestamp")
assert_diff_attrval(ts_attrs, old_ts_attrs, "originalModifyTimestamp")
def test_user_2307bis_delete_user(ldap_conn,
ldb_examine,
setup_rfc2307bis):
"""
Test that deleting a user removes it from both caches
"""
ldb_conn = ldb_examine
old_sysdb_attrs, old_ts_attrs = prime_cache_user(ldb_conn, "user1", 2001)
e = ldap_ent.user(ldap_conn.ds_inst.base_dn, "user1", 1001, 2001)
ldap_conn.delete_s(e[0])
# wait for slapd to change its database
time.sleep(1)
with pytest.raises(KeyError):
pwd.getpwnam("user1")
sysdb_attrs, ts_attrs = get_user_attrs(ldb_conn, "user1",
SSSD_DOMAIN, TS_ATTRLIST)
assert sysdb_attrs.get("dataExpireTimestamp") is None
assert sysdb_attrs.get("originalModifyTimestamp") is None
assert ts_attrs.get("dataExpireTimestamp") is None
assert ts_attrs.get("originalModifyTimestamp") is None
def test_sss_cache_invalidate_user(ldap_conn,
ldb_examine,
setup_rfc2307bis):
"""
Test that sss_cache invalidate user in both caches
"""
ldb_conn = ldb_examine
old_sysdb_attrs, old_ts_attrs = prime_cache_user(ldb_conn, "user1", 2001)
subprocess.call(["sss_cache", "-u", "user1"])
sysdb_attrs, ts_attrs = get_user_attrs(ldb_conn, "user1",
SSSD_DOMAIN, TS_ATTRLIST)
assert sysdb_attrs.get("dataExpireTimestamp") == '1'
assert ts_attrs.get("dataExpireTimestamp") == '1'
time.sleep(1)
pwd.getpwnam("user1")
sysdb_attrs, ts_attrs = get_user_attrs(ldb_conn, "user1",
SSSD_DOMAIN, TS_ATTRLIST)
assert sysdb_attrs.get("dataExpireTimestamp") == '1'
assert_diff_attrval(ts_attrs, sysdb_attrs, "dataExpireTimestamp")
def test_sss_cache_invalidate_group(ldap_conn,
ldb_examine,
setup_rfc2307bis):
"""
Test that sss_cache invalidate group in both caches
"""
ldb_conn = ldb_examine
old_sysdb_attrs, old_ts_attrs = prime_cache_group(
ldb_conn, "group1",
("user1", "user11", "user21"))
subprocess.call(["sss_cache", "-g", "group1"])
sysdb_attrs, ts_attrs = get_group_attrs(ldb_conn, "group1",
SSSD_DOMAIN, TS_ATTRLIST)
assert sysdb_attrs.get("dataExpireTimestamp") == '1'
assert ts_attrs.get("dataExpireTimestamp") == '1'
time.sleep(1)
grp.getgrnam("group1")
sysdb_attrs, ts_attrs = get_group_attrs(ldb_conn, "group1",
SSSD_DOMAIN, TS_ATTRLIST)
assert sysdb_attrs.get("dataExpireTimestamp") == '1'
assert_diff_attrval(ts_attrs, sysdb_attrs, "dataExpireTimestamp")
|
import datetime
import hashlib
import hmac
import requests # pip install requests
import xmltodict
from bs4 import BeautifulSoup
try:
from urllib import quote, urlencode
except ImportError:
from urllib.parse import quote, urlencode
URLINFO_RESPONSE_GROUPS = ",".join(
["RelatedLinks", "Categories", "Rank", "ContactInfo", "RankByCountry",
"UsageStats", "Speed", "Language", "OwnedDomains", "LinksInCount",
"SiteData", "AdultContent"])
TRAFFICINFO_RESPONSE_GROUPS = "History"
CATEGORYBROWSE_RESPONSE_GROUPS = ",".join(["Categories", "RelatedCategories", "LanguageCategories", "LetterBars"])
SITESLINKINGIN_RESPONSE_GROUP = "SitesLinkingIn"
def is_string(obj):
try:
return isinstance(obj, basestring) # python 2
except NameError:
return isinstance(obj, str) # python 3
class CallAwis(object):
def __init__(self, access_id, secret_access_key):
self.access_id = access_id
self.secret_access_key = secret_access_key
def create_v4_signature(self, request_params):
'''
Create URI and signature headers based on AWS V4 signing process.
Refer to https://docs.aws.amazon.com/AlexaWebInfoService/latest/ApiReferenceArticle.html for request params.
:param request_params: dictionary of request parameters
:return: URL and header to be passed to requests.get
'''
method = 'GET'
service = 'awis'
host = 'awis.us-west-1.amazonaws.com'
region = 'us-west-1'
endpoint = 'https://awis.amazonaws.com/api'
request_parameters = urlencode([(key, request_params[key]) for key in sorted(request_params.keys())])
# Key derivation functions. See:
# http://docs.aws.amazon.com/general/latest/gr/signature-v4-examples.html#signature-v4-examples-python
def sign(key, msg):
return hmac.new(key, msg.encode('utf-8'), hashlib.sha256).digest()
def getSignatureKey(key, dateStamp, regionName, serviceName):
kDate = sign(('AWS4' + key).encode('utf-8'), dateStamp)
kRegion = sign(kDate, regionName)
kService = sign(kRegion, serviceName)
kSigning = sign(kService, 'aws4_request')
return kSigning
# Create a date for headers and the credential string
t = datetime.datetime.utcnow()
amzdate = t.strftime('%Y%m%dT%H%M%SZ')
datestamp = t.strftime('%Y%m%d') # Date w/o time, used in credential scope
# Create canonical request
canonical_uri = '/api'
canonical_querystring = request_parameters
canonical_headers = 'host:' + host + '\n' + 'x-amz-date:' + amzdate + '\n'
signed_headers = 'host;x-amz-date'
payload_hash = hashlib.sha256(''.encode('utf8')).hexdigest()
canonical_request = method + '\n' + canonical_uri + '\n' + canonical_querystring + '\n' + canonical_headers + '\n' + signed_headers + '\n' + payload_hash
# Create string to sign
algorithm = 'AWS4-HMAC-SHA256'
credential_scope = datestamp + '/' + region + '/' + service + '/' + 'aws4_request'
string_to_sign = algorithm + '\n' + amzdate + '\n' + credential_scope + '\n' + hashlib.sha256(canonical_request.encode('utf8')).hexdigest()
# Calculate signature
signing_key = getSignatureKey(self.secret_access_key, datestamp, region, service)
# Sign the string_to_sign using the signing_key
signature = hmac.new(signing_key, (string_to_sign).encode('utf-8'), hashlib.sha256).hexdigest()
# Add signing information to the request
authorization_header = algorithm + ' ' + 'Credential=' + self.access_id + '/' + credential_scope + ', ' + 'SignedHeaders=' + signed_headers + ', ' + 'Signature=' + signature
headers = {'X-Amz-Date':amzdate, 'Authorization':authorization_header, 'Content-Type': 'application/xml', 'Accept': 'application/xml'}
# Create request url
request_url = endpoint + '?' + canonical_querystring
return request_url, headers
def urlinfo(self, domain, response_group = URLINFO_RESPONSE_GROUPS):
'''
Provide information about supplied domain as specified by the response group
:param domain: Any valid URL
:param response_group: Any valid urlinfo response group
:return: Traffic and/or content data of the domain in XML format
'''
params = {
'Action': "UrlInfo",
'Url': domain,
'ResponseGroup': response_group
}
url, headers = self.create_v4_signature(params)
return self.return_output(url, headers)
def traffichistory(self, domain, response_group=TRAFFICINFO_RESPONSE_GROUPS, myrange=31, start=20070801):
'''
Provide traffic history of supplied domain
:param domain: Any valid URL
:param response_group: Any valid traffic history response group
:return: Traffic and/or content data of the domain in XML format
'''
params = {
'Action': "TrafficHistory",
'Url': domain,
'ResponseGroup': response_group,
'Range': myrange,
'Start': start,
}
url, headers = self.create_v4_signature(params)
return self.return_output(url, headers)
def siteslinkingin(self, domain, response_group=SITESLINKINGIN_RESPONSE_GROUP):
params = {
'Action': "SitesLinkingIn",
'Url': domain,
'ResponseGroup': response_group,
}
url, headers = self.create_v4_signature(params)
return self.return_output(url, headers)
def cat_browse(self, domain, path, response_group=CATEGORYBROWSE_RESPONSE_GROUPS, descriptions='True'):
'''
Provide category browse information of specified domain
:param domain: Any valid URL
:param path: Valid category path
:param response_group: Any valid traffic history response group
:return: Traffic and/or content data of the domain in XML format
'''
params = {
'Action': "CategoryListings",
'ResponseGroup': 'Listings',
'Path': quote(path),
'Descriptions': descriptions
}
url, headers = self.create_v4_signature(params)
return self.return_output(url, headers)
def return_output(self, url, headers):
'''
Use Session() to keep connection open
Retry request until successful (handles throttling)
'''
s = requests.Session()
while True:
try:
r = s.get(url, headers = headers)
if (r.status_code == requests.codes.ok):
break
except Exception as e:
#print('Error fetching ' + url + ': ' + str(e))
continue
soup = BeautifulSoup(r.text.encode('utf-8'), 'xml')
return soup
def flatten_urlinfo(urlinfo, shorter_keys=True):
""" Takes a urlinfo object and returns a flat dictionary."""
def flatten(value, prefix=""):
if is_string(value):
_result[prefix[1:]] = value
return
try:
len(value)
except (AttributeError, TypeError): # a leaf
_result[prefix[1:]] = value
return
try:
items = value.items()
except AttributeError: # an iterable, but not a dict
last_prefix = prefix.split(".")[-1]
if shorter_keys:
prefix = "." + last_prefix
if last_prefix == "Country":
for v in value:
country = v.pop("@Code")
flatten(v, ".".join([prefix, country]))
elif last_prefix in ["RelatedLink", "CategoryData"]:
for i, v in enumerate(value):
flatten(v, ".".join([prefix, str(i)]))
elif value[0].get("TimeRange"):
for v in value:
time_range = ".".join(tuple(v.pop("TimeRange").items())[0])
# python 3 odict_items don't support indexing
if v.get("DataUrl"):
time_range = ".".join([v.pop("DataUrl"), time_range])
flatten(v, ".".join([prefix, time_range]))
else:
msg = prefix + " contains a list we don't know how to flatten."
raise NotImplementedError(msg)
else: # a dict, go one level deeper
for k, v in items:
flatten(v, ".".join([prefix, k]))
_result = {}
info = xmltodict.parse(str(urlinfo))
flatten(info["aws:UrlInfoResponse"]["Response"]["UrlInfoResult"]["Alexa"])
_result["OutputTimestamp"] = datetime.datetime.utcnow().strftime('%Y%m%dT%H%M%SZ')
return _result
|
import re
import collections
import sys
import os
import glob
def fail(args):
print(args)
sys.exit(1)
def main():
files = [f for f in glob.glob('gen/enum/*')]
pattern = re.compile(r'\s*(\w+)\s*(?:=\s*(\d+))?\s*(?:,\s*"([^"]*)")?\s*')
tab = " "
output_dir = "src/enums/"
do_not_modify = "/* THIS FILE IS AUTOGENERATED BY gen/gen_enums.py; DO NOT HAND-MODIFY */\n"
print("Genning enums")
header_file = os.path.join(output_dir, "enums.h")
source_file = os.path.join(output_dir, "enums.cc")
with open(header_file, "w") as header:
with open(source_file, "w") as source:
header.write("#pragma once\n")
header.write("\n")
header.write("#include <libconfig.h>\n")
header.write("#include \"../libconfig_shim.h\"\n")
header.write("\n")
header.write(do_not_modify)
header.write("\n")
header.write("\n")
header.write("template<typename T>\n")
header.write(tab + "T get_enum(const char *e);\n\n")
source.write(do_not_modify)
source.write("\n")
source.write("#include \"enums.h\"\n")
source.write("\n")
source.write("#include <cassert>\n")
source.write("#include <cstring>\n")
source.write("\n")
for fl in sorted(files):
print(" %s" % fl)
enum_name = fl.split(os.sep)[-1]
input_file = fl
next_value = 0
desc = ""
fields = collections.OrderedDict()
descs = collections.OrderedDict()
for line in open(input_file, 'r'):
line = line.strip()
if len(line) == 0:
continue
match = pattern.match(line)
if match is None:
fail("Syntax error: [" + line + "]")
field_name = match.group(1)
if field_name in fields:
fail("Double field name: " + field_name)
value_string = match.group(2)
desc_string = match.group(3)
if desc_string is not None:
desc = desc_string
else:
desc = enum_name
if value_string is not None:
value = int(value_string)
if value < next_value:
fail("Not a monotonic progression from " + next_value + " to " + value + " for enum field " + field_name)
next_value = value
fields[field_name] = next_value
descs[field_name] = desc
next_value += 1
header.write("\n")
header.write("// -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-\n")
header.write("enum class " + enum_name + "\n")
header.write("{\n")
for field_name, fieldValue in fields.items():
header.write(tab + "%s = %s,\n" % (field_name, str(fieldValue)))
header.write(tab + "upper_bound = %d,\n" % (next_value,))
header.write(tab + "invalid = -1,\n")
header.write("};\n")
header.write("\n")
header.write("const char* get_enum_description(%s value);\n" % enum_name)
header.write("\n")
header.write("const char* get_enum_string(%s value);\n" % enum_name)
header.write("\n")
header.write("template<> %s get_enum<%s>(const char *e);\n" % (enum_name, enum_name))
header.write("\n")
header.write("%s config_setting_get_%s(const config_setting_t *setting);\n" % (enum_name, enum_name))
header.write("\n")
header.write("int config_setting_set_%s(config_setting_t *setting, %s value);\n" % (enum_name, enum_name))
header.write("\n")
header.write("int config_setting_lookup_%s(const config_setting_t *setting, const char *name, %s *value);\n" % (enum_name, enum_name))
source.write("\n")
source.write("// -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-\n")
source.write("const char* get_enum_description(%s value) {\n" % enum_name)
source.write(tab + "switch(value)\n")
source.write(tab + "{\n")
for field_name in fields.keys():
source.write(tab + "case %s::%s:\n" % (enum_name, field_name))
source.write(tab + tab + "return \"%s\";\n" % descs[field_name])
source.write(tab + "default:\n")
source.write(tab + tab + "assert(false);\n")
source.write(tab + tab + "return nullptr;\n")
source.write(tab + "}\n")
source.write("}\n")
source.write("\n")
source.write("const char* get_enum_string(%s value) {\n" % enum_name)
source.write(tab + "switch(value)\n")
source.write(tab + "{\n")
for field_name in fields.keys():
source.write(tab + "case %s::%s:\n" % (enum_name, field_name))
source.write(tab + tab + "return \"%s\";\n" % field_name)
source.write(tab + "default:\n")
source.write(tab + tab + "assert(false);\n")
source.write(tab + tab + "return nullptr;\n")
source.write(tab + "}\n")
source.write("}\n")
source.write("\n")
source.write("template<> %s get_enum<%s>(const char *e) {\n" % (enum_name, enum_name))
source.write(tab + "auto val{%s::invalid};\n" % enum_name)
for field_name in fields.keys():
source.write(tab + "if (!strcmp(e, \"%s\")) {\n" % field_name)
source.write(tab + tab + "val = %s::%s;\n" % (enum_name, field_name))
source.write(tab + "}\n")
source.write(tab + "assert(val != %s::invalid);\n" % enum_name)
source.write(tab + "return val;\n")
source.write("}\n")
source.write("\n")
source.write("%s config_setting_get_%s(const config_setting_t *setting) {\n" % (enum_name, enum_name))
source.write(tab + "const char *str = config_setting_get_string(setting);\n")
source.write(tab + "return get_enum<%s>(str);\n" % enum_name)
source.write("}\n")
source.write("\n")
source.write("int config_setting_set_%s(config_setting_t *setting, %s value) {\n" % (enum_name, enum_name))
source.write(tab + "auto str = get_enum_string(value);\n")
source.write(tab + "return (config_setting_set_string(setting, str));\n")
source.write("}\n")
source.write("\n")
source.write("int config_setting_lookup_%s(const config_setting_t *setting, const char *name, %s *value) {\n" % (enum_name, enum_name))
source.write(tab + "auto *member = config_setting_get_member(setting, name);\n")
source.write(tab + "if(!member) {\n")
source.write(tab + tab + "return CONFIG_FALSE;\n")
source.write(tab + "}\n")
source.write("\n")
source.write(tab + "*value = (%s)config_setting_get_%s(member);\n" % (enum_name, enum_name))
source.write(tab + "return CONFIG_TRUE;\n")
source.write("}\n")
return 0
if __name__ == '__main__':
sys.exit(main())
|
"""A helper to keep the jvplot docstrings consistent."""
import argparse
import ast
import importlib
import os
import pkgutil
import re
_parser = argparse.ArgumentParser()
_parser.add_argument("-o", "--out", default="out",
help="directory to store output in")
_args = _parser.parse_args()
_DOCSTRING_RE = re.compile(r'(.*?\n *)""".*?"""\n', re.DOTALL)
DOC_DEFAULT = {
'x_lim': [
" x_lim (tuple): a pair of numbers, specifying the lower and upper",
" coordinate range for the horizontal axis."
],
'y_lim': [
" y_lim (tuple): a pair of numbers, specifying the lower and upper",
" coordinate range for the vertical axis."
],
'x_lab': [
" x_lab (str): the axis label for the horizontal axis."
],
'y_lab': [
" x_lab (str): the axis label for the vertical axis."
],
'aspect': [
" aspect (number): The aspect ratio of the axes; a value of 1",
" displays mathematical circles visually as circles, values >1",
" show circles as ellipses wider than high, and values <1 show",
" circles as ellipses higher than wide.",
]
}
def drop_prefix(text, prefix):
if text.startswith(prefix):
return text[len(prefix):]
return text
def stringify(node, lookup=None):
"""Turn the AST representation of dotted names (like a.b.c.) into
string.
"""
if lookup is None:
lookup = {}
parts = []
while isinstance(node, ast.Attribute):
parts.append(node.attr)
node = node.value
if isinstance(node, ast.Name):
name = node.id
if name in lookup:
name = lookup[name]
parts.append(name)
else:
raise TypeError(f"node must be Name or Attribute (not f{type(node)})")
return ".".join(reversed(parts))
def pretty(node, include_attributes=False, indent=' '):
"""Return a formatted dump of the tree in *node*. This is mainly
useful for debugging purposes. The returned string will show the
names and the values for fields. Attributes such as line numbers
and column offsets are not dumped by default. If this is wanted,
*include_attributes* can be set to True.
"""
def _format(node, level=0):
if isinstance(node, ast.AST):
fields = [(a, _format(b, level)) for a, b in ast.iter_fields(node)]
if include_attributes and node._attributes: #pylint: disable=W0212
fields.extend([(a, _format(getattr(node, a), level))
for a in node._attributes]) #pylint: disable=W0212
return ''.join([
node.__class__.__name__,
'(',
', '.join(('%s=%s' % field for field in fields)),
')'])
if isinstance(node, list):
lines = ['[']
lines.extend((indent * (level + 2) + _format(x, level + 2) + ','
for x in node))
if len(lines) > 1:
lines.append(indent * (level + 1) + ']')
else:
lines[-1] += ']'
return '\n'.join(lines)
return repr(node)
if not isinstance(node, (ast.AST, list)):
raise TypeError('expected AST, got %r' % node.__class__.__name__)
return _format(node)
def get_source(module_name):
"""Get the source code for a Python module."""
spec = importlib.util.find_spec(module_name)
return spec.loader.get_source(spec.name)
def load_ast(module_name):
"""Get the AST for a Python module."""
spec = importlib.util.find_spec(module_name)
source = spec.loader.get_source(spec.name)
return ast.parse(source, spec.origin)
def submodules(package_name):
"""Yield all sub-modules of a Python package.
The function returns a generator of all sub-module names, as
strings. Modules with names starting with "_" or "test_", or
ending wiht "_test" are omitted from the result.
"""
try:
spec = importlib.util.find_spec(package_name)
except ImportError:
return
yield package_name
if not spec.submodule_search_locations:
return
for info in pkgutil.iter_modules(spec.submodule_search_locations):
name = info.name
if name.startswith("_") or name.startswith("test_") or name.endswith("_test"):
continue
sub = package_name + "." + name
for subsub in submodules(sub):
yield subsub
class FindClasses(ast.NodeVisitor):
"""A helper to find all class definitions in an AST tree.
An AST tree can be analyzed by calling the `.visit()` method.
After the traversal, results can be found in the `res` dictionary
supplied to the constructor.
"""
def __init__(self, module_name, res):
"""Create a new FindClasses object.
Args:
module_name: The name of the module being parsed.
res (dict): A dictionary into which results will be stored
while traversing the tree. Keys are full parent class
names, values are lists of child class names. Classes
with no parent class are ignored.
"""
super().__init__()
self.module_name = module_name
self.imports = {}
self.res = res
def visit_Import(self, node): #pylint: disable=C0111
for alias in node.names:
global_name = alias.name
local_name = alias.asname
if not local_name:
local_name = global_name
self.imports[local_name] = global_name
def visit_ImportFrom(self, node): #pylint: disable=C0111
base = node.module
if not base:
base = ".".join(self.module_name.split(".")[:-1])
for alias in node.names:
global_name = alias.name
local_name = alias.asname
if not local_name:
local_name = global_name
self.imports[local_name] = base + "." + global_name
def visit_ClassDef(self, node): #pylint: disable=C0111
name = self.module_name + "." + node.name
for base in node.bases:
base_name = stringify(base, lookup=self.imports)
if base_name not in self.res:
self.res[base_name] = [name]
else:
self.res[base_name].append(name)
def find_descendants(root_class, package_name="jvplot"):
"""Find all (direct or indirect) subclasses of `root_class` in the
submodules of Python package `package_name`.
The function returns a set of full class names which derive from
`root_class`.
"""
children = {}
for mod_name in submodules(package_name):
tree = load_ast(mod_name)
FindClasses(mod_name, children).visit(tree)
todo = set([root_class])
targets = set()
while todo:
name = todo.pop()
if name in targets:
continue
targets.add(name)
if name in children:
todo.update(children[name])
return targets
class StyleInfo:
"""Store information about the different drawing classes in jvplot.
StyleInfo objects contain four fields:
targets: set of full names of the classes to gather
information about. This is set in the constructur once
and then never changed.
methods: A map, where the keys are method names, and the
values are 4-tuples stating (0) the full class name where
the method is defined, (1) the names of all arguments, (2)
a pair of line and column number where the method body
starts and (3) the docstring of the method.
uses: A map, where the keys are method names, and the values
are lists of jvplot style parameter names used in the
method.
calls: A map, where the keys are method names, and the values
are lists of drawing class methods called by this method.
"""
def __init__(self, targets):
"""Create a new StyleInfo object.
Args:
targets (set of strings): full names of the classes to gather
information about.
"""
self.targets = targets
self.methods = {}
self.uses = {}
self.calls = {}
def modules(self):
"""Return a set of modules we have found methods in."""
res = set()
for class_name, _, _, _ in self.methods.values():
module = ".".join(class_name.split(".")[:-1])
res.add(module)
return res
class FindStyleUsage(ast.NodeVisitor):
"""A helper to find all uses of jvplot style parameters in a method.
An AST tree can be analyzed by calling the `.visit()` method.
After the traversal, results can be found in the the `.uses` and
`.calls` fields of the FindStyleUsage object.
"""
def __init__(self):
super().__init__()
self.uses = set()
self.calls = set()
def visit_Call(self, node): #pylint: disable=C0111
func = node.func
if not (isinstance(func, ast.Attribute)
and isinstance(func.value, ast.Name)
and func.value.id == "self"):
return
if func.attr == "_get_param":
param_name = node.args[0]
assert isinstance(param_name, ast.Str), pretty(param_name)
self.uses.add(param_name.s)
else:
self.calls.add(func.attr)
class FindMethods(ast.NodeVisitor):
"""A helper to find all methods in a class.
This uses FindStyleUsage to find uses of jvplot style parameters
inside the method definition. Results are appended to the `info`
object supplied to the constructor.
"""
def __init__(self, info, class_name):
"""Create a new FindMethods object.
Args:
info (StyleInfo): A StyleInfo object to store the results
in.
class_name (string): The full name of the class being
analyzed.
"""
super().__init__()
self.info = info
self.class_name = class_name
def visit_FunctionDef(self, node): #pylint: disable=C0111
name = node.name
if name == "__init__":
name = self.class_name
if name == "get_param" or name.startswith("__"):
return
if name == "close":
# Method is defined on different sub-classes. Since this
# is the only overloaded name, it is easiest to skip this
# method for now.
return
assert name not in self.info.methods, f"{name} is overloaded"
args = node.args
arg_names = [arg.arg for arg in args.args]
if args.vararg:
arg_names.append('*' + args.vararg.arg)
arg_names.extend(arg.arg for arg in args.kwonlyargs)
assert node.args.kwarg is None
if arg_names and arg_names[0] == 'self':
arg_names = arg_names[1:]
docstring = ast.get_docstring(node, clean=True)
self.info.methods[name] = (self.class_name,
arg_names,
(node.lineno, node.col_offset),
docstring)
usage = FindStyleUsage()
usage.visit(node)
self.info.uses[name] = usage.uses
self.info.calls[name] = usage.calls
class FindClassMethods(ast.NodeVisitor):
"""A helper to find all jvplot drawing classes in a module.
This uses FindMethods to find uses of jvplot style parameters
inside the class' methods' definitions. Results are written to the
`info` object supplied to the constructor.
"""
def __init__(self, info, module_name):
"""Create a new FindMethods object.
Args:
info (StyleInfo): A StyleInfo object to store the results
in.
module_name (string): The full name of the module being
analyzed.
"""
super().__init__()
self.info = info
self.module_name = module_name
def visit_ClassDef(self, node): #pylint: disable=C0111
name = self.module_name + "." + node.name
if name in self.info.targets:
FindMethods(self.info, name).visit(node)
def fix_args(old_args, real_args, module_file, lineno):
arg_lines = {}
cur_name = None
cur_lines = []
for l in old_args:
l = l.rstrip()
if not cur_lines and not l:
continue
if l.startswith(" ") or not l:
cur_lines.append(l)
continue
if cur_lines:
arg_lines[cur_name] = cur_lines
cur_name = l.replace(":", " ").replace("(", " ").split()[0]
cur_lines = [l]
if cur_lines:
arg_lines[cur_name] = cur_lines
res = []
used = set()
for arg in real_args:
used.add(arg)
empty = [f" {arg} ():"]
default = DOC_DEFAULT.get(arg, empty)
ll = arg_lines.get(arg)
if not ll or ll == empty:
ll = default
res.extend(ll)
for arg, ll in arg_lines.items():
if arg in used:
continue
print(f"{module_file}:{lineno}: non-existing argument {arg} in docstring")
res.extend(ll)
return res
def fix_docstring(orig, real_args, module_file, lineno):
lines = orig.splitlines()
out = []
old_args = []
in_doc = False
has_args = False
def write_args():
nonlocal out
out.append("Args:")
out.extend(fix_args(old_args, real_args, module_file, lineno))
for line in lines:
line = line.rstrip()
if in_doc:
if not line or line[0].isspace():
old_args.append(line)
continue
else:
write_args()
out.append("")
in_doc = False
has_args = True
elif line.startswith("Args:"):
assert not has_args, "docstring cannot have two Args sections"
in_doc = True
continue
out.append(line)
if in_doc or (real_args and not has_args):
if not in_doc:
while out and out[-1] == "":
out = out[:-1]
out.append("")
write_args()
return out
def _main():
targets = find_descendants("jvplot.device.Device")
info = StyleInfo(targets)
modules = set()
for target in targets:
modules.add(".".join(target.split(".")[:-1]))
for module in modules:
tree = load_ast(module)
FindClassMethods(info, module).visit(tree)
try:
os.makedirs(_args.out)
except FileExistsError:
pass
for module in info.modules():
print("processing", module, "...")
module_file = drop_prefix(module, "jvplot.") + ".py"
body = get_source(module)
parts = []
pos = 1
def skip_to_line(l, drop=False):
nonlocal body, pos
m = re.match(r"(.*?\n){%d}" % (l-pos), body)
b = m.end()
if not drop:
parts.append(body[:b])
body = body[b:]
pos = l
for method, (class_name, args, (lineno, col), docstring) in info.methods.items():
if not class_name.startswith(module + "."):
continue
if method.startswith("_"):
continue
if method != class_name:
full_method = class_name + "." + method
else:
full_method = class_name + ".__init__"
if not docstring:
print(f"{module_file}:{lineno}: missing docstring for {full_method}")
continue
print(".", full_method)
# remove the original docstring
skip_to_line(lineno)
m = _DOCSTRING_RE.match(body)
skip_to_line(lineno + m.group(1).count('\n'))
skip_to_line(lineno + m.group(0).count('\n'), drop=True)
# add in the fixed docstring
doc_lines = fix_docstring(docstring, args, module_file, lineno)
pfx = " " * (col + 4)
parts.append(pfx + '"""' + doc_lines[0] + "\n")
parts.extend((pfx + l).rstrip() + "\n" for l in doc_lines[1:])
parts.append("\n")
parts.append(pfx + '"""\n')
parts.append(body)
out_name = os.path.join(_args.out, module_file)
with open(out_name, "w") as fd:
fd.write("".join(parts))
print()
if __name__ == "__main__":
_main()
|
__all__ = ['LibraryBase',
]
from gingerprawn.api.webop import automated as auto
class LibraryBase(object):
def __init__(self, baseurl, cache=None):
self._baseurl = baseurl
self._bot = auto.Automator(baseurl)
self._lastop = '__init__'
LibraryBase.init_cache(self, cache)
def __del__(self):
# update cache before deletion
self.sync_cache()
def chk_last_op(self, precede=None, logout_permitted=False):
if self._lastop == 'logout' and not logout_permitted:
raise RuntimeError('operation not permitted after logout')
if precede is not None and self._lastop != precede:
raise ValueError(
"logic error! last op should be '%s', actually was '%s'" % (
precede, self._lastop, ))
def init_cache(self, cfgobj):
self._cache = cfgobj
def sync_cache(self):
if self._cache is None:
# cache not set, silently ignore this fact
return
self._cache.writeback()
def set_user_info(self, *args, **kwargs):
raise NotImplementedError
def do_login(self, *args, **kwargs):
raise NotImplementedError
def do_logout(self, *args, **kwargs):
raise NotImplementedError
def get_basicinfo(self, *args, **kwargs):
raise NotImplementedError
def get_book_list(self, *args, **kwargs):
raise NotImplementedError
def renew_book(self, *args, **kwargs):
raise NotImplementedError
|
import math
def sine(angle):
'''sine(x) -> value
Return the sine of x in radians.'''
return math.sin(angle)
def cosine(angle):
'''cosine(x) -> value
Return the cosine of x in radians.'''
return math.cos(angle)
def square_rt(num1):
'''squre_rt(x) -> value
Return the square root of x.'''
return math.sqrt(num1)
def power(num, pwr):
'''power(x, y) -> value
Return the value of x raised to the power of y.'''
return pow(num, pwr)
|
from libmain import *
def generateMultByValTruthTable(val):
result = []
multiplier = hex2bin(val)
for i in range(2**octetSize):
result.append(galoisMultiplication(multiplier, int2bin(i)))
return result
def mixColumns():
equa = []
result = ['' for i in range(blockSize)]
tt2 = generateMultByValTruthTable('02')
tt3 = generateMultByValTruthTable('03')
mt2 = generateMoebiusTransform(tt2)
mt3 = generateMoebiusTransform(tt3)
equations2 = generateEquaMonomes(mt2)
equations3 = generateEquaMonomes(mt3)
equaAES2 = generateEquaMonomesAES(equations2)
equaAES3 = generateEquaMonomesAES(equations3)
binMon2 = generateBinaryMonomes(equaAES2)
binMon3 = generateBinaryMonomes(equaAES3)
bits = generateAllBits()
for cpt in range(4):
for i in range(octetSize):
val = i + (cpt*32)
result[val] = binMon2[val] + binMon3[val+8] + bits[val+16] + bits[val+24]
result[val+8] = bits[val] + binMon2[val+8] + binMon3[val+16] + bits[val+24]
result[val+16] = bits[val] + bits[val+8] + binMon2[val+16] + binMon3[val+24]
result[val+24] = binMon3[val] + bits[val+8] + bits[val+16] + binMon2[val+24]
for i in range(blockSize):
tmp = result[i].split('\n')
tmp.pop()
eq = ''
for monome in tmp:
t = monome.split('\t')
if t[0] == '1':
eq += '1+'
for bit in range(blockSize):
if t[1][bit] == '1':
eq += 'x_%s' % (bit)
eq += '+'
equa.append(eq.rstrip('+'))
return equa
def invMixColumns():
equa = []
result = ['' for i in range(blockSize)]
tt0b = generateMultByValTruthTable('0b')
tt0d = generateMultByValTruthTable('0d')
tt09 = generateMultByValTruthTable('09')
tt0e = generateMultByValTruthTable('0e')
mt0b = generateMoebiusTransform(tt0b)
mt0d = generateMoebiusTransform(tt0d)
mt09 = generateMoebiusTransform(tt09)
mt0e = generateMoebiusTransform(tt0e)
equations0b = generateEquaMonomes(mt0b)
equations0d = generateEquaMonomes(mt0d)
equations09 = generateEquaMonomes(mt09)
equations0e = generateEquaMonomes(mt0e)
equaAES0b = generateEquaMonomesAES(equations0b)
equaAES0d = generateEquaMonomesAES(equations0d)
equaAES09 = generateEquaMonomesAES(equations09)
equaAES0e = generateEquaMonomesAES(equations0e)
binMon0b = generateBinaryMonomes(equaAES0b)
binMon0d = generateBinaryMonomes(equaAES0d)
binMon09 = generateBinaryMonomes(equaAES09)
binMon0e = generateBinaryMonomes(equaAES0e)
bits = generateAllBits()
for cpt in range(4):
for i in range(octetSize):
val = i + (cpt*32)
result[val] = binMon0e[val] + binMon0b[val+8] + binMon0d[val+16] + binMon09[val+24]
result[val+8] = binMon09[val] + binMon0e[val+8] + binMon0b[val+16] + binMon0d[val+24]
result[val+16] = binMon0d[val] + binMon09[val+8] + binMon0e[val+16] + binMon0b[val+24]
result[val+24] = binMon0b[val] + binMon0d[val+8] + binMon09[val+16] + binMon0e[val+24]
for i in range(blockSize):
tmp = result[i].split('\n')
tmp.pop()
eq = ''
for monome in tmp:
t = monome.split('\t')
if t[0] == '1':
eq += '1+'
for bit in range(blockSize):
if t[1][bit] == '1':
eq += 'x_%s' % (bit)
eq += '+'
equa.append(eq.rstrip('+'))
return equa
|
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: ios_logging
version_added: "2.4"
author: "Trishna Guha (@trishnaguha)"
short_description: Manage logging on network devices
description:
- This module provides declarative management of logging
on Cisco Ios devices.
notes:
- Tested against IOS 15.6
options:
dest:
description:
- Destination of the logs.
choices: ['on', 'host', 'console', 'monitor', 'buffered']
name:
description:
- If value of C(dest) is I(file) it indicates file-name,
for I(user) it indicates username and for I(host) indicates
the host name to be notified.
size:
description:
- Size of buffer. The acceptable value is in range from 4096 to
4294967295 bytes.
default: 4096
facility:
description:
- Set logging facility.
level:
description:
- Set logging severity levels.
aggregate:
description: List of logging definitions.
state:
description:
- State of the logging configuration.
default: present
choices: ['present', 'absent']
"""
EXAMPLES = """
- name: configure host logging
ios_logging:
dest: host
name: 172.16.0.1
state: present
- name: remove host logging configuration
ios_logging:
dest: host
name: 172.16.0.1
state: absent
- name: configure console logging level and facility
ios_logging:
dest: console
facility: local7
level: debugging
state: present
- name: enable logging to all
ios_logging:
dest : on
- name: configure buffer size
ios_logging:
dest: buffered
size: 5000
- name: Configure logging using aggregate
ios_logging:
aggregate:
- { dest: console, level: notifications }
- { dest: buffered, size: 9000 }
- name: remove logging using aggregate
ios_logging:
aggregate:
- { dest: console, level: notifications }
- { dest: buffered, size: 9000 }
state: absent
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device
returned: always
type: list
sample:
- logging facility local7
- logging host 172.16.0.1
"""
import re
from copy import deepcopy
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network_common import remove_default_spec
from ansible.module_utils.ios import get_config, load_config
from ansible.module_utils.ios import ios_argument_spec, check_args
def validate_size(value, module):
if value:
if not int(4096) <= int(value) <= int(4294967295):
module.fail_json(msg='size must be between 4096 and 4294967295')
else:
return value
def map_obj_to_commands(updates, module):
commands = list()
want, have = updates
for w in want:
dest = w['dest']
name = w['name']
size = w['size']
facility = w['facility']
level = w['level']
state = w['state']
del w['state']
if state == 'absent' and w in have:
if dest == 'host':
commands.append('no logging host {0}'.format(name))
elif dest:
commands.append('no logging {0}'.format(dest))
else:
module.fail_json(msg='dest must be among console, monitor, buffered, host, on')
if facility:
commands.append('no logging facility {0}'.format(facility))
if state == 'present' and w not in have:
if facility:
commands.append('logging facility {0}'.format(facility))
if dest == 'host':
commands.append('logging host {0}'.format(name))
elif dest == 'on':
commands.append('logging on')
elif dest == 'buffered' and size:
commands.append('logging buffered {0}'.format(size))
else:
dest_cmd = 'logging {0}'.format(dest)
if level:
dest_cmd += ' {0}'.format(level)
commands.append(dest_cmd)
return commands
def parse_facility(line, dest):
facility = None
if dest == 'facility':
match = re.search(r'logging facility (\S+)', line, re.M)
if match:
facility = match.group(1)
return facility
def parse_size(line, dest):
size = None
if dest == 'buffered':
match = re.search(r'logging buffered (\S+)', line, re.M)
if match:
try:
int_size = int(match.group(1))
except ValueError:
int_size = None
if int_size:
if isinstance(int_size, int):
size = str(match.group(1))
else:
size = str(4096)
return size
def parse_name(line, dest):
if dest == 'host':
match = re.search(r'logging host (\S+)', line, re.M)
if match:
name = match.group(1)
else:
name = None
return name
def parse_level(line, dest):
level_group = ('emergencies', 'alerts', 'critical', 'errors', 'warnings',
'notifications', 'informational', 'debugging')
if dest == 'host':
level = 'debugging'
else:
match = re.search(r'logging {0} (\S+)'.format(dest), line, re.M)
if match:
if match.group(1) in level_group:
level = match.group(1)
else:
level = 'debugging'
else:
level = 'debugging'
return level
def map_config_to_obj(module):
obj = []
dest_group = ('console', 'host', 'monitor', 'buffered', 'on', 'facility')
data = get_config(module, flags=['| include logging'])
for line in data.split('\n'):
match = re.search(r'logging (\S+)', line, re.M)
if match:
if match.group(1) in dest_group:
dest = match.group(1)
obj.append({
'dest': dest,
'name': parse_name(line, dest),
'size': parse_size(line, dest),
'facility': parse_facility(line, dest),
'level': parse_level(line, dest)
})
return obj
def map_params_to_obj(module, required_if=None):
obj = []
aggregate = module.params.get('aggregate')
if aggregate:
for item in aggregate:
for key in item:
if item.get(key) is None:
item[key] = module.params[key]
module._check_required_if(required_if, item)
d = item.copy()
if d['dest'] != 'host':
d['name'] = None
if d['dest'] == 'buffered':
if 'size' in d:
d['size'] = str(validate_size(d['size'], module))
elif 'size' not in d:
d['size'] = str(4096)
else:
pass
if d['dest'] != 'buffered':
d['size'] = None
obj.append(d)
else:
if module.params['dest'] != 'host':
module.params['name'] = None
if module.params['dest'] == 'buffered':
if not module.params['size']:
module.params['size'] = str(4096)
else:
module.params['size'] = None
if module.params['size'] is None:
obj.append({
'dest': module.params['dest'],
'name': module.params['name'],
'size': module.params['size'],
'facility': module.params['facility'],
'level': module.params['level'],
'state': module.params['state']
})
else:
obj.append({
'dest': module.params['dest'],
'name': module.params['name'],
'size': str(validate_size(module.params['size'], module)),
'facility': module.params['facility'],
'level': module.params['level'],
'state': module.params['state']
})
return obj
def main():
""" main entry point for module execution
"""
element_spec = dict(
dest=dict(type='str', choices=['on', 'host', 'console', 'monitor', 'buffered']),
name=dict(type='str'),
size=dict(type='int'),
facility=dict(type='str'),
level=dict(type='str', default='debugging'),
state=dict(default='present', choices=['present', 'absent']),
)
aggregate_spec = deepcopy(element_spec)
# remove default in aggregate spec, to handle common arguments
remove_default_spec(aggregate_spec)
argument_spec = dict(
aggregate=dict(type='list', elements='dict', options=aggregate_spec),
)
argument_spec.update(element_spec)
argument_spec.update(ios_argument_spec)
required_if = [('dest', 'host', ['name'])]
module = AnsibleModule(argument_spec=argument_spec,
required_if=required_if,
supports_check_mode=True)
warnings = list()
check_args(module, warnings)
result = {'changed': False}
if warnings:
result['warnings'] = warnings
want = map_params_to_obj(module, required_if=required_if)
have = map_config_to_obj(module)
commands = map_obj_to_commands((want, have), module)
result['commands'] = commands
if commands:
if not module.check_mode:
load_config(module, commands)
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main()
|
'''
Graphics tests
==============
Testing the simple vertex instructions
'''
import unittest
from kivy.tests.common import GraphicUnitTest
class VertexInstructionTest(GraphicUnitTest):
def test_circle(self):
from kivy.uix.widget import Widget
from kivy.graphics import Ellipse, Color
r = self.render
# basic circle
wid = Widget()
with wid.canvas:
Color(1, 1, 1)
Ellipse(pos=(100, 100), size=(100, 100))
r(wid)
# reduced circle
wid = Widget()
with wid.canvas:
Color(1, 1, 1)
Ellipse(pos=(100, 100), size=(100, 100), segments=10)
r(wid)
# moving circle
wid = Widget()
with wid.canvas:
Color(1, 1, 1)
self.e = Ellipse(pos=(100, 100), size=(100, 100))
self.e.pos = (10, 10)
r(wid)
def test_ellipse(self):
from kivy.uix.widget import Widget
from kivy.graphics import Ellipse, Color
r = self.render
# ellipse
wid = Widget()
with wid.canvas:
Color(1, 1, 1)
self.e = Ellipse(pos=(100, 100), size=(200, 100))
r(wid)
def test_point(self):
from kivy.uix.widget import Widget
from kivy.graphics import Point, Color
r = self.render
# 1 point
wid = Widget()
with wid.canvas:
Color(1, 1, 1)
Point(points=(10, 10))
r(wid)
# 25 points
wid = Widget()
with wid.canvas:
Color(1, 1, 1)
Point(points=[x * 5 for x in range(50)])
r(wid)
def test_point_add(self):
from kivy.uix.widget import Widget
from kivy.graphics import Point, Color
r = self.render
wid = Widget()
with wid.canvas:
Color(1, 1, 1)
p = Point(pointsize=10)
p.add_point(10, 10)
p.add_point(90, 10)
p.add_point(10, 90)
p.add_point(50, 50)
p.add_point(10, 50)
p.add_point(50, 10)
r(wid)
class FBOInstructionTestCase(unittest.TestCase):
def test_fbo_pixels(self):
from kivy.graphics import Fbo, ClearColor, ClearBuffers, Ellipse
fbo = Fbo(size=(512, 512))
with fbo:
ClearColor(0, 0, 0, 1)
ClearBuffers()
Ellipse(pos=(100, 100), size=(100, 100))
fbo.draw()
data = fbo.pixels
import pygame
surface = pygame.image.fromstring(data, (512, 512), 'RGBA', True)
pygame.image.save(surface, "results.png")
def tearDown(self):
import os
if os.path.exists('results.png'):
os.unlink('results.png')
|
"""Tasks to check if the incoming record already exist."""
from __future__ import absolute_import, division, print_function
import datetime
from functools import wraps
from flask import current_app
from invenio_db import db
from invenio_workflows import workflow_object_class
from inspire_matcher.api import match
from inspire_utils.dedupers import dedupe_list
from inspirehep.utils.datefilter import date_older_than
from inspirehep.utils.record import get_arxiv_categories, get_arxiv_id
from ..utils import with_debug_logging
@with_debug_logging
def is_too_old(record, days_ago=5):
"""Return True if the record is more than days_ago days old.
If the record is older then it's probably an update of an earlier
record, and we don't want those.
"""
date_format = "%Y-%m-%d"
earliest_date = record.get('earliest_date', '')
if not earliest_date:
earliest_date = record.get('preprint_date', '')
if earliest_date:
try:
parsed_date = datetime.datetime.strptime(
earliest_date,
date_format,
)
except ValueError as err:
raise ValueError(
(
'Unrecognized earliest_date format "%s", valid formats is '
'%s: %s'
) % (earliest_date, date_format, err)
)
if not date_older_than(
parsed_date,
datetime.datetime.utcnow(),
days=days_ago,
):
return False
return True
@with_debug_logging
def article_exists(obj, eng):
"""Return ``True`` if the record is already present in the system.
Uses the default configuration of the ``inspire-matcher`` to find
duplicates of the current workflow object in the system.
Also sets the ``record_matches`` property in ``extra_data`` to the list of
control numbers that matched.
Arguments:
obj: a workflow object.
eng: a workflow engine.
Returns:
bool: ``True`` if the workflow object has a duplicate in the system
``False`` otherwise.
"""
matches = dedupe_list(match(obj.data))
record_ids = [el['_source']['control_number'] for el in matches]
if record_ids:
obj.extra_data['record_matches'] = record_ids
return True
return False
@with_debug_logging
def is_being_harvested_on_legacy(record):
"""Return True if the record is being harvested on Legacy.
If the record belongs to one of the CORE arXiv categories then it
is already being harvested on Legacy.
"""
arxiv_categories = get_arxiv_categories(record)
legacy_categories = current_app.config.get(
'ARXIV_CATEGORIES_ALREADY_HARVESTED_ON_LEGACY', [])
return len(set(arxiv_categories) & set(legacy_categories)) > 0
@with_debug_logging
def already_harvested(obj, eng):
"""Check if record is already harvested."""
if is_being_harvested_on_legacy(obj.data):
obj.log.info((
'Record with arXiv id {arxiv_id} is'
' already being harvested on Legacy.'
).format(arxiv_id=get_arxiv_id(obj.data)))
return True
return False
def previously_rejected(days_ago=None):
"""Check if record exist on INSPIRE or already rejected."""
@with_debug_logging
@wraps(previously_rejected)
def _previously_rejected(obj, eng):
if days_ago is None:
_days_ago = current_app.config.get('INSPIRE_ACCEPTANCE_TIMEOUT', 5)
else:
_days_ago = days_ago
if is_too_old(obj.data, days_ago=_days_ago):
obj.log.info("Record is likely rejected previously.")
return True
return False
return _previously_rejected
@with_debug_logging
def pending_in_holding_pen(obj, eng):
"""Return ``True`` if the record is already present in the Holding Pen.
Uses a custom configuration of the ``inspire-matcher`` to find duplicates
of the current workflow object in the Holding Pen.
Also sets ``holdingpen_matches`` in ``extra_data`` to the list of ids that
matched.
Arguments:
obj: a workflow object.
eng: a workflow engine.
Returns:
bool: ``True`` if the workflow object has a duplicate in the Holding
Pen, ``False`` otherwise.
"""
config = {
'algorithm': [
{
'queries': [
{
'path': 'arxiv_eprints.value',
'search_path': 'metadata.arxiv_eprints.value.raw',
'type': 'exact',
},
{
'path': 'dois.value',
'search_path': 'metadata.dois.value.raw',
'type': 'exact',
},
],
},
],
'doc_type': 'hep',
'index': 'holdingpen-hep',
}
matches = dedupe_list(match(obj.data, config))
holdingpen_ids = [int(el['_id']) for el in matches if int(el['_id']) != obj.id]
if holdingpen_ids:
obj.extra_data['holdingpen_matches'] = holdingpen_ids
return True
return False
@with_debug_logging
def delete_self_and_stop_processing(obj, eng):
"""Delete both versions of itself and stops the workflow."""
db.session.delete(obj.model)
eng.skip_token()
@with_debug_logging
def stop_processing(obj, eng):
"""Stop processing for object and return as completed."""
eng.stopProcessing()
@with_debug_logging
def update_existing_workflow_object(obj, eng):
"""Update the data of the old object with the new data."""
holdingpen_ids = obj.extra_data.get('holdingpen_matches', [])
for matched_id in holdingpen_ids:
existing_obj = workflow_object_class.get(matched_id)
if (
obj.data.get('acquisition_source') and
existing_obj.data.get('acquisition_source')
):
if (
obj.data['acquisition_source'].get('method') ==
existing_obj.data['acquisition_source'].get('method')
):
# Method is the same, update obj
existing_obj.data.update(obj.data)
existing_obj.save()
break
else:
msg = "Cannot update old object, non valid ids: %s"
obj.log.error(msg, holdingpen_ids)
raise Exception(msg % holdingpen_ids)
|
"""
Some helper classes
"""
class reading():
"""Simple wrapper for the data of a reading from the p1 interface"""
def __init__ (self):
self.timestamp = None
self.t1 = 0
self.t2 = 0
self.consumption = 0
def isComplete(self):
return self.timestamp != None \
and self.t1 != 0 \
and self.t2 != 0 \
and self.consumption != 0
|
"""
@author: kevinhikali
@email: hmingwei@gmail.com
"""
from keras.models import Sequential
from keras.layers.core import Dense, Activation
from keras.utils.visualize_util import plot
from visualize_plots import figures
from keras.models import load_model
import matplotlib.pyplot as plt
import numpy as np
x_train = np.linspace(-2*np.pi, 2*np.pi, 1000)
x_train = np.array(x_train).reshape((len(x_train), 1))
n = 0.1*np.random.rand(len(x_train), 1)
y_train = np.sin(x_train) + n
model = Sequential()
model.add(Dense(100, init = 'uniform', input_dim = 1))
model.add(Activation('relu'))
model.add(Dense(50))
model.add(Activation('relu'))
model.add(Dense(1))
model.add(Activation('tanh'))
model.compile(loss = 'mse',
optimizer = 'rmsprop',
metrics = ['accuracy'])
hist = model.fit(x_train, y_train, batch_size = 10,
nb_epoch = 40, shuffle = True,
validation_split = 0.2)
score = model.evaluate(x_train, y_train, batch_size = 10)
x_test = np.linspace(-2*np.pi, 2*np.pi, 150)
y_test = model.predict(x_test, batch_size = 1)
fig = plt.figure()
plot_train, = plt.plot(x_train, y_train, 'b')
plot_test, = plt.plot(x_test, y_test, 'ro')
plt.legend([plot_train, plot_test], ('train data', 'test data'), 'best', numpoints = 1)
plt.title('Regression Result')
plt.xlabel('x')
plt.ylabel('y')
plt.savefig('ml_regression_result.png')
plt.show()
plot(model, to_file = 'ml_regression_model.png')
model.save('ml_regression.h5')
figures(hist, 'ml_regression_epoch.png')
|
import os, sys, glob
from Bio import SeqIO, Entrez
import argparse
def main():
opts=argparse.ArgumentParser(sys.argv[0],
description="fetch genbank files given accessions",
prefix_chars="-",
add_help=True,
epilog="Written by Chrispin Chaguza, MLW, 2014")
opts.add_argument("-a",action="store",nargs=1,metavar="ACCESSION FILE",dest="ACCESSION",help="specify accessions file",required=True)
options=opts.parse_args()
input_file=options.ACCESSION[0]
indata=""
ncounter=0
try:
indata=[str(j).strip() for j in open(str(input_file),"rU")]
except IOError as inError:
print "Error: Failed to open input file "+str(input_file)+"..."
print "\n\nException caught: \n",inError
sys.exit()
try:
for i in list(indata):
ncounter=ncounter+1
print "Parsing...\t"+str(i)+"\t"+str(ncounter)+"/"+str(len(indata))
seqfile=Entrez.efetch(email="xpnc@me.com",db="nucleotide",id=str(i),retmode="text",rettype="gb")
gb_record=SeqIO.read(seqfile,"genbank")
SeqIO.write(gb_record,str(i)+".gb","genbank")
SeqIO.write(gb_record,str(i)+".fasta","fasta")
except Exception as exError:
print "Error: Problem encountered while downloading files..."
sys.exit()
if __name__ == "__main__":
main()
|
import matplotlib
matplotlib.backend = 'Qt4Agg'
import matplotlib.pyplot as plt
import numpy as np
np.set_printoptions(precision=4)
import os
import sys
import threading
import time
from collections import deque
import agents
import goals
import q_networks
ARM_LENGTH_1 = 12.0
ARM_LENGTH_2 = 18.0
ANGULAR_ARM_VELOCITY = 1.0*np.pi/180.0
GOAL_THRESHOLD = 0.02
HEIGHT = 70
MAX_STEPS = 500
NUM_OF_ACTIONS = 4
NUM_OF_ACTORS = 1
NUM_OF_PLOTS_X = 2
NUM_OF_PLOTS_Y = 1
NUM_OF_STATES = 6
WIDTH = 70
class Actor(threading.Thread):
def __init__(self, threadID, goal_threshold=GOAL_THRESHOLD, max_steps=MAX_STEPS):
threading.Thread.__init__(self)
self.agent = None # place-holder for agent
self.goal = None # placer-holder for goal
self.GOAL_THRESHOLD = goal_threshold # desired distance to goal; episode is finished early if threshold is achieved
self.MAX_STEPS = max_steps # maximal steps per episode
self.THREAD_ID = threadID # thread id (integer)
self.path = deque([], maxlen=500)
def get_state(self):
# state is composed by agent + goal states
return np.hstack((self.agent.get_state(), self.goal.get_state()))
def episode_finished(self):
agent_pos = self.agent.get_position()
goal_pos = self.goal.get_position()
distance = np.linalg.norm(agent_pos[:2] - goal_pos[:2])
if distance < GOAL_THRESHOLD:
return True # episode finished if agent already at goal
else:
return False
def plot(self):
# stepwise refreshing of plot
ax[0,self.THREAD_ID].clear()
# plotting of AGENT, GOAL and set AXIS LIMITS
self.goal.plot(ax[0,self.THREAD_ID])
self.agent.plot(ax[0,self.THREAD_ID])
ax[0,self.THREAD_ID].set_xlim([-WIDTH/2, WIDTH/2])#[0,WIDTH])
ax[0,self.THREAD_ID].set_ylim([-HEIGHT/2, HEIGHT/2])#[0,HEIGHT])
for point in self.path:
ax[0,self.THREAD_ID].plot(point[0],point[1],'co')
def run(self):
while True:
# init new episode
plotting_lock.acquire()
scene_id = np.random.choice([0,1,2,3])
self.agent = agents.Arm(scene_id, angular_velocity_1=ANGULAR_ARM_VELOCITY, angular_velocity_2=ANGULAR_ARM_VELOCITY, arm_length_1=ARM_LENGTH_1, arm_length_2=ARM_LENGTH_2)
self.goal = goals.Goal_Arm(scene_id, ARM_LENGTH_1, ARM_LENGTH_2)
plotting_lock.release()
for step in range(self.MAX_STEPS):
# produce experience
state = self.get_state()
self.path.append(self.agent.get_end_effector_position())
# get lock to synchronize threads
networks_lock.acquire()
q = networks.online_net.predict(state.reshape(1,NUM_OF_STATES), batch_size=1)
networks_lock.release()
action = np.argmax(q) # choose best action from Q(s,a)
# take action, observe next state s'
self.agent.set_action(action)
self.agent.update()
next_state = self.get_state()
# check if agent at goal
terminal = self.episode_finished()
# plot the scene
plotting_lock.acquire()
self.plot()
plotting_lock.release()
if terminal:
break # start new episode
# episodic refreshing of plot
#plotting_lock.acquire()
#ax[0,self.THREAD_ID].clear()
#plotting_lock.release()
if __name__ == "__main__":
# create GLOBAL thread-locks
console_lock = threading.Lock()
networks_lock = threading.Lock()
plotting_lock = threading.Lock()
# create GLOBAL Q-NETWORKS
networks = q_networks.QNetworks(NUM_OF_ACTIONS, NUM_OF_STATES)
# initialize GLOBAL plotting
fig, ax = plt.subplots(NUM_OF_PLOTS_Y,NUM_OF_PLOTS_X)
ax = ax.reshape(1, ax.shape[0]*ax.shape[1])
plt.ion()
# create threads
threads = []
threads.extend([Actor(i) for i in range(NUM_OF_ACTORS)])
# set daemon, allowing Ctrl-C
for i in range(len(threads)):
threads[i].daemon = True
# start new Threads
[threads[i].start() for i in range(len(threads))]
# show plot
plt.show()
while True:
plotting_lock.acquire()
fig.canvas.flush_events()
plotting_lock.release()
time.sleep(0.1)
|
from tasty.types.driver import TestDriver
__params__ = {'la': 32, 'lb': 32}
driver = TestDriver()
def protocol(client, server, params):
la = params['la']
lb = params['lb']
client.a = Unsigned(bitlen=la).input(src=driver, desc='a')
client.b = Unsigned(bitlen=lb).input(src=driver, desc='b')
client.ga = Garbled(val=client.a)
client.gb = Garbled(val=client.b)
client.gc = client.ga * client.gb
server.gc <<= client.gc
server.c = Unsigned(val=server.gc)
server.c.output(dest=driver, desc='c')
|
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext_lazy as _
from exercise.models import LearningObject, CourseChapter, \
BaseExercise, LTIExercise, StaticExercise, ExerciseWithAttachment
from course.models import LearningObjectCategory, CourseModule
from .exercise_forms import CourseChapterForm, BaseExerciseForm, \
LTIExerciseForm, ExerciseWithAttachmentForm, StaticExerciseForm, \
ExerciseCollectionExerciseForm
from .course_forms import LearningObjectCategoryForm, CourseModuleForm
from exercise.exercisecollection_models import ExerciseCollection
class ModelManager(object):
object_class = None
instance_field = "course_instance"
form_class = None
name = None
def get_object(self, instance, object_id):
fields = {
"id": object_id,
self.instance_field: instance,
}
return get_object_or_404(self.object_class, **fields)
def new_object(self, instance, parent_id, type):
return self.object_class(course_instance=instance)
def get_form_class(self, obj):
return self.form_class
def can_delete(self, obj):
return True
class ExerciseContainerMixin(object):
def can_delete(self, obj):
return obj.learning_objects.count() == 0
class CategoryManager(ExerciseContainerMixin, ModelManager):
object_class = LearningObjectCategory
form_class = LearningObjectCategoryForm
name = _("category")
class ModuleManager(ExerciseContainerMixin, ModelManager):
object_class = CourseModule
form_class = CourseModuleForm
name = _("module")
def new_object(self, instance, parent_id, type):
return self.object_class(
course_instance=instance,
order=(instance.course_modules.count() + 1)
)
class ExerciseManager(ModelManager):
object_class = LearningObject
instance_field = "course_module__course_instance"
name = _("learning object")
def get_object(self, instance, object_id):
obj = super().get_object(instance, object_id)
return obj.as_leaf_class()
def new_object(self, instance, parent_id, type):
CLASSES = {
None: BaseExercise,
"lti": LTIExercise,
"chapter": CourseChapter,
"static": StaticExercise,
"attachment": ExerciseWithAttachment,
}
if not type in CLASSES:
raise Http404()
object_class = CLASSES[type]
module = get_object_or_404(
CourseModule,
id=parent_id,
course_instance=instance
)
kwargs = {
"course_module": module,
"order": module.learning_objects.filter(parent__isnull=True).count() + 1,
}
first_category = instance.categories.first()
if first_category:
kwargs["category"] = first_category
return object_class(**kwargs)
def get_form_class(self, obj):
FORMS = {
CourseChapter: CourseChapterForm,
BaseExercise: BaseExerciseForm,
LTIExercise: LTIExerciseForm,
StaticExercise: StaticExerciseForm,
ExerciseWithAttachment: ExerciseWithAttachmentForm,
ExerciseCollection: ExerciseCollectionExerciseForm,
}
if obj.__class__ not in FORMS:
raise TypeError("No form known for the object type: %s",
obj.__class__)
return FORMS[obj.__class__]
|
import boto3
from infra_data import profiles, regions
def lb_report(pth):
for account in profiles.profile_list():
print ('Current account: ' + account)
for region in regions.region_list():
print ('Current region: ' + region)
session = boto3.Session(profile_name=account)
client = session.client('elb',
region_name=region
)
response = client.describe_load_balancers()
# print(str(response))
for r in response['LoadBalancerDescriptions']:
try:
with open(pth, "a") as myfile:
myfile.write(account + ',' + region + ',' +
r['CanonicalHostedZoneName'] + '\n')
except Exception, msg:
print (msg)
|
xxx = [1, 2, 4, 8, 16, 32]
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.ticker import FuncFormatter
STR_CONCURRENT_REQS_PER_SERVER = "Concurrent reqs/server"
STR_THROUGHPUT = "Throughput"
STR_NUMBER_OF_SERVERS = "Number of servers"
STR_THROUGHPUT_NEW_ORDER = "Throughput (New-order/s)"
STR_CPU_UTILIZATION = "CPU Utilization"
STR_LATENCY_MS = "Latency(ms)"
STR_ATTEMPT_NEW_ORDER = "Attempts(new-order/s)"
STR_NUMBER_OF_TRIES_PER_COMMIT = "Number of tries per commit"
STR_LATENCY_MS_IN_LOG_SCALE = "Latency(ms) in log scale"
STR_COMMIT_RATE = "commit rate"
mpl.rcParams['font.sans-serif'] = ['SimHei']
STR_CONCURRENT_REQS_PER_SERVER = u"并发请求数/服务器"
STR_THROUGHPUT = u"吞吐量"
STR_NUMBER_OF_SERVERS = u"服务器数量"
STR_THROUGHPUT_NEW_ORDER = u"吞吐量(New-order/s)"
STR_CPU_UTILIZATION = u"CPU使用率"
STR_LATENCY_MS = u"延迟(ms)"
STR_ATTEMPT_NEW_ORDER = u"尝试次数(New-order/s)"
STR_NUMBER_OF_TRIES_PER_COMMIT = u"每次成功提交需要尝试次数"
STR_LATENCY_MS_IN_LOG_SCALE = u"延迟(ms)"
STR_COMMIT_RATE = u"提交成功率"
ROCOCO = "Rococo"
ROCOCO = "DepTran"
SHOW = True
SHOW = False
X_LOG_SCALE = False
eb_dis = 0.3
fig_scale = 5.0/8
mpl.rcParams['font.size'] = 12.5
mpl.rcParams['legend.fontsize'] = 12.5
mpl.rcParams['lines.linewidth'] = 3
mpl.rcParams['lines.markersize'] = 14 * fig_scale
mpl.rcParams['axes.grid'] = True
txt_legends = ["OCC", ROCOCO, "2PL", "RO6"]
line_styles = ["v-", "cx--", "mo:", "kp-.", "yd-."]
colors = ['#F84E1A', 'black', '#1B77F9', '#535353', '#A2DCFD','black', 'blue', 'green', 'red', 'black',"cyan", 'magenta', 'yellow']
bar_colors = [
'#A2DCFD', '#1B77F9', 'blue', # blue
'#FFA07A', '#DC143C', '#8B0000', # red
'#DCDCDC', '#696969', 'black',
'#98FB98', '#3CB371', '#006400', # green
'#535353', 'black', 'green',
'red', 'black', "cyan",
'magenta', 'yellow']
bar_hatches = [
"///", "///", "///",
"", "", "",
"\\\\\\", "\\\\\\", "\\\\\\",
"...", "xxx", "ooo",
"\\|\\", "***", "/|/", "|||",]
def flat_log(x, pos):
return '%d' % x
def sort_legend(ax, ys):
order = range(0, len(ys))
sum_list = [sum(points) for points in ys]
i = 0
while (i < len(ys) - 1):
j = i
while (j < len(ys) - 1):
if (sum_list[j] < sum_list[j + 1]):
tmp = sum_list[j]
sum_list[j] = sum_list[j + 1]
sum_list[j + 1] = tmp
tmp2 = order[j]
order[j] = order[j + 1]
order[j + 1] = tmp2
j += 1
i += 1
handles, labels = ax.get_legend_handles_labels()
new_handles = []
new_labels = []
for index in order:
new_handles.append(handles[index])
new_labels.append(labels[index])
return new_handles, new_labels
def micro_tp(xs, ys, figname):
fig, ax = plt.subplots(figsize=(12 * fig_scale, 25.0 / 4 * fig_scale))
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
width = 0.4 / fig_scale
ind = np.arange(len(xs))
#legends=[ ["OCC 50%", "OCC 90%", "OCC 99%"],
# ["DepTran 50%", "DepTran 90%", "DepTran 99%"],
# ["2PL-P 50%", "2PL-P 90%", "2PL-P 99%"]]
#ax.bar(ind, ys, width, color = '#1B77F9')
ax.bar(ind, ys, width, color = 'black')
plt.xlim(-width/2, len(xs))
ys_max = 0.0
for k in ys:
if ys_max < k:
ys_max = k
plt.ylim(0, ys_max * 1.2)
#plt.xticks(xs+3, xs)
xs=['1 RPC', ' 1 RPC\n+1 DB', ' 3 RPC\n+ 3 DB', 'OCC', '2PL', ROCOCO]
#plt.legend(ncol=3, loc="upper center", mode="expand", bbox_to_anchor=(0., 1.1, 1, 0.1))
#plt.xlabel("")
plt.ylabel(STR_THROUGHPUT)
ax.set_xticks(ind +width/2)
plt.setp(ax.set_xticklabels(xs), fontsize=14)
plt.savefig(figname, bbox_inches="tight")
if SHOW: plt.show()
pass
def tpcc_sc_tp(val, figname):
fig, ax = plt.subplots(figsize=(8 * fig_scale, 5 * fig_scale))
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
#val_occ = [ v1/(v2+0.0) for v1, v2 in zip(val[0], val[1])]
#val_2pl = [ v1/(v2+0.0) for v1, v2 in zip(val[2], val[1])]
#plt.plot(xxx, val_occ, line_styles[0], label=txt_legends[0], color=colors[0])
#plt.plot(xxx, val_2pl, line_styles[2], label=txt_legends[2], color=colors[0])
for i in range(0, len(val)):
plt.plot(xxx, val[i], line_styles[i], label=txt_legends[i], color=colors[i])
handles, labels = sort_legend(ax, val)
plt.legend(handles, labels, ncol=1, loc="best")
plt.xlabel(STR_NUMBER_OF_SERVERS)
plt.ylabel(STR_THROUGHPUT_NEW_ORDER)
#plt.xticks(np.arange(len(txt_sizes)), txt_sizes)
plt.savefig(figname, bbox_inches="tight")
if SHOW: plt.show()
def tpcc_sc_cpu(val, figname):
fig, ax = plt.subplots(figsize=(8 * fig_scale, 5 * fig_scale))
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
#val_occ = [ v1/(v2+0.0) for v1, v2 in zip(val[0], val[1])]
#val_2pl = [ v1/(v2+0.0) for v1, v2 in zip(val[2], val[1])]
#plt.plot(xxx, val_occ, line_styles[0], label=txt_legends[0], color=colors[0])
#plt.plot(xxx, val_2pl, line_styles[2], label=txt_legends[2], color=colors[0])
for i in range(0, len(val)):
plt.plot(xxx, val[i], line_styles[i], label=txt_legends[i], color=colors[i])
handles, labels = sort_legend(ax, val)
plt.legend(handles, labels, ncol=1, loc="best")
plt.xlabel(STR_NUMBER_OF_SERVERS)
plt.ylabel(STR_CPU_UTILIZATION)
#plt.xticks(np.arange(len(txt_sizes)), txt_sizes)
plt.savefig(figname, bbox_inches="tight")
if SHOW: plt.show()
def tpcc_ct_tp(val, figname):
fig, ax = plt.subplots(figsize=(8 * fig_scale, 5 * fig_scale))
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
if X_LOG_SCALE: ax.set_xscale('log')
#val_occ = [ v1/(v2+0.0) for v1, v2 in zip(val[0], val[1])]
#val_2pl = [ v1/(v2+0.0) for v1, v2 in zip(val[2], val[1])]
#plt.plot(xxx, val_occ, line_styles[0], label=txt_legends[0], color=colors[0])
#plt.plot(xxx, val_2pl, line_styles[2], label=txt_legends[2], color=colors[0])
for i in range(0, len(val)):
plt.plot(xxx, val[i], line_styles[i], label=txt_legends[i], color=colors[i])
handles, labels = sort_legend(ax, val)
plt.legend(handles, labels, ncol=1, loc="best")
plt.xlabel(STR_CONCURRENT_REQS_PER_SERVER)
plt.ylabel(STR_THROUGHPUT_NEW_ORDER)
#plt.xticks(np.arange(len(txt_sizes)), txt_sizes)
plt.savefig(figname, bbox_inches="tight")
if SHOW: plt.show()
def tpcc_ct_cpu(val, figname):
fig, ax = plt.subplots(figsize=(8 * fig_scale, 5 * fig_scale))
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
#val_occ = [ v1/(v2+0.0) for v1, v2 in zip(val[0], val[1])]
#val_2pl = [ v1/(v2+0.0) for v1, v2 in zip(val[2], val[1])]
#plt.plot(xxx, val_occ, line_styles[0], label=txt_legends[0], color=colors[0])
#plt.plot(xxx, val_2pl, line_styles[2], label=txt_legends[2], color=colors[0])
for i in range(0, len(val)):
plt.plot(xxx, val[i], line_styles[i], label=txt_legends[i], color=colors[i])
handles, labels = sort_legend(ax, val)
plt.legend(handles, labels, ncol=1, loc="best")
plt.xlabel(STR_CONCURRENT_REQS_PER_SERVER)
plt.ylabel(STR_CPU_UTILIZATION)
#plt.xticks(np.arange(len(txt_sizes)), txt_sizes)
plt.savefig(figname, bbox_inches="tight")
if SHOW: plt.show()
def tpcc_ct_nt_eb(val_50, val_90, val_99, figname):
fig, ax = plt.subplots(figsize=(8 * fig_scale, 5 * fig_scale))
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('none')
ax.yaxis.set_ticks_position('left')
# ax.set_yscale('log')
xs = np.arange(1, 21, 1)
# width = 2
for i in range(0, len(val_50)):
v50 = [v for x, v in zip(xxx, val_50[i]) if x <= 20]
v90 = [v for x, v in zip(xxx, val_90[i]) if x <= 20]
v99 = [v for x, v in zip(xxx, val_99[i]) if x <= 20]
yerr1 = [(v1-v2) for v1, v2 in zip(v90, v50)]
yerr2 = [(v1-v2) for v1, v2 in zip(v99, v90)]
#plt.plot(v90, label=txt_legends[i])
plt.errorbar(xs + i*eb_dis, v90, yerr=[yerr1, yerr2], label=txt_legends[i], elinewidth=3)
plt.xlim(1, 21)
#plt.ylim(0,10)
#plt.xticks(xs+3, xs)
#plt.legend(ncol=3, loc="upper center", mode="expand", bbox_to_anchor=(0., 1.1, 1, 0.1))
handles, labels = sort_legend(ax, val_90)
plt.legend(handles, labels, ncol=1, loc="best")
plt.xlabel(STR_CONCURRENT_REQS_PER_SERVER)
plt.ylabel(STR_NUMBER_OF_TRIES_PER_COMMIT)
plt.savefig(figname, bbox_inches="tight")
if SHOW: plt.show()
pass
def tpcc_ct_lt_eb(val_50, val_90, val_99, figname):
fig, ax = plt.subplots(figsize=(8 * fig_scale, 5 * fig_scale))
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('none')
ax.yaxis.set_ticks_position('left')
ax.set_yscale('log')
y_formatter = FuncFormatter(flat_log)
ax.yaxis.set_major_formatter(y_formatter)
#ax.set_xscale('log')
maxx1 = 20
maxx2 = 100
xs = np.arange(1, maxx1+1, 1)
tmp = np.arange(maxx1+10, maxx2+1, 10)
xs = np.concatenate((xs, tmp))
# width = 2
for i in range(0, len(val_50)):
v50 = [v for x, v in zip(xxx, val_50[i]) if x <= maxx2]
v90 = [v for x, v in zip(xxx, val_90[i]) if x <= maxx2]
v99 = [v for x, v in zip(xxx, val_99[i]) if x <= maxx2]
yerr1 = [(v1-v2) for v1, v2 in zip(v90, v50)]
yerr2 = [(v1-v2) for v1, v2 in zip(v99, v90)]
#print(len(xs))
#print(len(v90))
#plt.plot(v90, label=txt_legends[i])
plt.errorbar(xs + i*eb_dis, v90, yerr=[yerr1, yerr2], label=txt_legends[i], elinewidth=3, color=colors[i])
plt.xlim(1, maxx2+1)
#plt.ylim(0,10)
#plt.xticks(xs+3, xs)
#plt.legend(ncol=3, loc="upper center", mode="expand", bbox_to_anchor=(0., 1.1, 1, 0.1))
handles, labels = sort_legend(ax, val_90)
plt.legend(handles, labels, ncol=1, loc="best")
plt.xlabel(STR_CONCURRENT_REQS_PER_SERVER)
plt.ylabel(STR_LATENCY_MS)
plt.savefig(figname, bbox_inches="tight")
if SHOW: plt.show()
pass
def tpcc_ct_lt_bar(val_min, val_50, val_90, val_99, figname):
fig, ax = plt.subplots(figsize=(8 * fig_scale, 5 * fig_scale))
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('none')
ax.yaxis.set_ticks_position('left')
ax.set_yscale('log')
# 10, 20, 30, ... , to 100.
xs = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100];
xs = np.arange(10, 101, 10)
width = 2
legends=[ ["OCC 50%", "OCC 90%", "OCC 99%"],
["DepTran 50%", "DepTran 90%", "DepTran 99%"],
["2PL-P 50%", "2PL-P 90%", "2PL-P 99%"]]
for i in range(0, len(val_50)):
v50 = [v for x, v in zip(xxx, val_50[i]) if x % 10 == 0]
v90 = [v for x, v in zip(xxx, val_90[i]) if x % 10 == 0]
v99 = [v for x, v in zip(xxx, val_99[i]) if x % 10 == 0]
vmin = [v for x, v in zip(xxx, val_min[i]) if x % 10 == 0]
bottom = np.zeros(len(xs))
yerr1 = [(v1-v2) for v1, v2 in zip(v50, vmin)]
yerr2 = [0] * len(v50)
plt.bar(xs+i*width, v50, bottom=bottom, color=bar_colors[i*3+0], hatch=bar_hatches[i*3+0], width=width, log=True, label=legends[i][0], yerr=[yerr1, yerr2])
bottom+=v50
plt.bar(xs+i*width, v90, bottom=bottom, color=bar_colors[i*3+1], hatch=bar_hatches[i*3+1], width=width, log=True, label=legends[i][1])
bottom+=v90
plt.bar(xs+i*width, v99, bottom=bottom, color=bar_colors[i*3+2], hatch=bar_hatches[i*3+2], width=width, log=True, label=legends[i][2])
bottom+=v99
plt.xlim(9,110)
plt.ylim(0,10)
plt.xticks(xs+3, xs)
plt.legend(ncol=3, loc="upper center", mode="expand", bbox_to_anchor=(0., 1.1, 1, 0.1))
plt.xlabel(STR_CONCURRENT_REQS_PER_SERVER)
plt.ylabel(STR_LATENCY_MS_IN_LOG_SCALE)
plt.savefig(figname, bbox_inches="tight")
if SHOW: plt.show()
pass
def tpcc_ct_nt(val, figname):
fig, ax = plt.subplots(figsize=(8 * fig_scale, 5 * fig_scale))
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
for i in range(0, len(val)):
plt.plot(xxx, val[i], line_styles[i], label=txt_legends[i], color=colors[i])
handles, labels = sort_legend(ax, val)
plt.legend(handles, labels, ncol=1, loc="upper left")
plt.xlabel(STR_CONCURRENT_REQS_PER_SERVER)
plt.ylabel(STR_NUMBER_OF_TRIES_PER_COMMIT)
#plt.xticks(np.arange(len(txt_sizes)), txt_sizes)
plt.savefig(figname, bbox_inches="tight")
if SHOW: plt.show()
def tpcc_ct_lt(val, figname):
fig, ax = plt.subplots(figsize=(8 * fig_scale, 5 * fig_scale))
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
for i in range(0, len(val)):
plt.plot(xxx, val[i], line_styles[i], label=txt_legends[i], color=colors[i])
handles, labels = sort_legend(ax, val)
plt.legend(handles, labels, ncol=1, loc="upper left")
plt.xlabel(STR_CONCURRENT_REQS_PER_SERVER)
plt.ylabel(STR_LATENCY_MS)
#plt.xticks(np.arange(len(txt_sizes)), txt_sizes)
plt.savefig(figname, bbox_inches="tight")
if SHOW: plt.show()
def tpcc_ct_at(val, figname):
fig, ax = plt.subplots(figsize=(8 * fig_scale, 5 * fig_scale))
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
for i in range(0, len(val)):
plt.plot(xxx, val[i], line_styles[i], label=txt_legends[i], color=colors[i])
handles, labels = sort_legend(ax, val)
plt.legend(handles, labels, ncol=1, loc="upper left")
plt.xlabel(STR_CONCURRENT_REQS_PER_SERVER)
plt.ylabel(STR_ATTEMPT_NEW_ORDER)
#plt.xticks(np.arange(len(txt_sizes)), txt_sizes)
plt.savefig(figname, bbox_inches="tight")
if SHOW: plt.show()
def tpcc_ct_cr(val, figname):
fig, ax = plt.subplots(figsize=(8 * fig_scale, 5 * fig_scale))
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
if X_LOG_SCALE: ax.set_xscale('log')
for i in range(0, len(val)):
plt.plot(xxx, val[i], line_styles[i], label=txt_legends[i], color=colors[i])
handles, labels = sort_legend(ax, val)
plt.legend(handles, labels, ncol=1, loc="best")
plt.xlabel(STR_CONCURRENT_REQS_PER_SERVER)
plt.ylabel(STR_COMMIT_RATE)
plt.ylim(0,1.2)
#plt.xticks(np.arange(len(txt_sizes)), txt_sizes)
plt.savefig(figname, bbox_inches="tight")
if SHOW: plt.show()
|
from __future__ import unicode_literals
from frappe import _
def get_data():
return [
{
"label": _("Purchasing"),
"icon": "fa fa-star",
"items": [
{
"type": "doctype",
"name": "Purchase Order",
"onboard": 1,
"dependencies": ["Item", "Supplier"],
"description": _("Purchase Orders given to Suppliers."),
},
{
"type": "doctype",
"name": "Material Request",
"onboard": 1,
"dependencies": ["Item"],
"description": _("Request for purchase."),
},
{
"type": "doctype",
"name": "Request for Quotation",
"onboard": 1,
"dependencies": ["Item", "Supplier"],
"description": _("Request for quotation."),
},
{
"type": "doctype",
"name": "Supplier Quotation",
"dependencies": ["Item", "Supplier"],
"description": _("Quotations received from Suppliers."),
},
]
},
{
"label": _("Items and Pricing"),
"items": [
{
"type": "doctype",
"name": "Item",
"onboard": 1,
"description": _("All Products or Services."),
},
{
"type": "doctype",
"name": "Item Price",
"description": _("Multiple Item prices."),
"onboard": 1,
"route": "#Report/Item Price"
},
{
"type": "doctype",
"name": "Price List",
"description": _("Price List master.")
},
{
"type": "doctype",
"name": "Product Bundle",
"description": _("Bundle items at time of sale."),
},
{
"type": "doctype",
"name": "Item Group",
"icon": "fa fa-sitemap",
"label": _("Item Group"),
"link": "Tree/Item Group",
"description": _("Tree of Item Groups."),
},
{
"type": "doctype",
"name": "Pricing Rule",
"description": _("Rules for applying pricing and discount.")
},
]
},
{
"label": _("Settings"),
"icon": "fa fa-cog",
"items": [
{
"type": "doctype",
"name": "Buying Settings",
"settings": 1,
"description": _("Default settings for buying transactions.")
},
{
"type": "doctype",
"name": "Purchase Taxes and Charges Template",
"description": _("Tax template for buying transactions.")
},
{
"type": "doctype",
"name":"Terms and Conditions",
"label": _("Terms and Conditions Template"),
"description": _("Template of terms or contract.")
},
]
},
{
"label": _("Supplier"),
"items": [
{
"type": "doctype",
"name": "Supplier",
"onboard": 1,
"description": _("Supplier database."),
},
{
"type": "doctype",
"name": "Supplier Group",
"description": _("Supplier Group master.")
},
{
"type": "doctype",
"name": "Contact",
"description": _("All Contacts."),
},
{
"type": "doctype",
"name": "Address",
"description": _("All Addresses."),
},
]
},
{
"label": _("Key Reports"),
"icon": "fa fa-table",
"items": [
{
"type": "report",
"is_query_report": True,
"name": "Purchase Analytics",
"reference_doctype": "Purchase Order",
"onboard": 1
},
{
"type": "report",
"is_query_report": True,
"name": "Supplier-Wise Sales Analytics",
"reference_doctype": "Stock Ledger Entry",
"onboard": 1
},
{
"type": "report",
"is_query_report": True,
"name": "Purchase Order Trends",
"reference_doctype": "Purchase Order",
"onboard": 1,
},
{
"type": "report",
"is_query_report": True,
"name": "Requested Items To Be Ordered",
"reference_doctype": "Material Request",
"onboard": 1,
},
]
},
{
"label": _("Supplier Scorecard"),
"items": [
{
"type": "doctype",
"name": "Supplier Scorecard",
"description": _("All Supplier scorecards."),
},
{
"type": "doctype",
"name": "Supplier Scorecard Variable",
"description": _("Templates of supplier scorecard variables.")
},
{
"type": "doctype",
"name": "Supplier Scorecard Criteria",
"description": _("Templates of supplier scorecard criteria."),
},
{
"type": "doctype",
"name": "Supplier Scorecard Standing",
"description": _("Templates of supplier standings."),
},
]
},
{
"label": _("Other Reports"),
"icon": "fa fa-list",
"items": [
{
"type": "report",
"is_query_report": True,
"name": "Items To Be Requested",
"reference_doctype": "Item",
"onboard": 1,
},
{
"type": "report",
"is_query_report": True,
"name": "Item-wise Purchase History",
"reference_doctype": "Item",
"onboard": 1,
},
{
"type": "report",
"is_query_report": True,
"name": "Material Requests for which Supplier Quotations are not created",
"reference_doctype": "Material Request"
},
{
"type": "report",
"is_query_report": True,
"name": "Address And Contacts",
"label": "Supplier Addresses And Contacts",
"reference_doctype": "Address",
"route_options": {
"party_type": "Supplier"
}
}
]
},
{
"label": _("Help"),
"items": [
{
"type": "help",
"label": _("Customer and Supplier"),
"youtube_id": "anoGi_RpQ20"
},
{
"type": "help",
"label": _("Material Request to Purchase Order"),
"youtube_id": "4TN9kPyfIqM"
},
{
"type": "help",
"label": _("Purchase Order to Payment"),
"youtube_id": "EK65tLdVUDk"
},
{
"type": "help",
"label": _("Managing Subcontracting"),
"youtube_id": "ThiMCC2DtKo"
},
]
},
]
|
"""
AM_ifSensorsOK_release
Assertion monitor
For use in Human-Robot Interaction simulator.
Created by David Western, July 2015.
Implements assertion: if humanOk and sensorsOk and contact_robot_hand_and_object ==1 then assert contact_robot_hand_and_object == 0
OR: if human and sensors are OK and robot is grabbing piece, then object is released
"""
from assertion_monitor_baseclass import AssertionMonitor
import rospy
from bert2_simulator.msg import *
import dist_between_links
from std_msgs.msg import Float64
from std_msgs.msg import Int8
class AM_ifSensorsOK_release(AssertionMonitor):
def __init__(self,trace_label='0'):
# Required (but adjust the list entries to reflect your own AM):
self.precon_list = ['precon']
self.postcon_list = ['Wait','Decide']
# Make sure /use_sim_time is true, for use in Wait():
use_sim_time = rospy.get_param('/use_sim_time')
if not use_sim_time:
rospy.set_param('/use_sim_time',True)
print "use_sim_time changed to True"
self.T = 3 # Wait period
# This particular AM depends on values published on ROS topics.
# Hence, let's set up some subscribers.
rospy.Subscriber("human_signals",Human, self.h_signals_callback)
rospy.Subscriber("gpl_is_ok", Int8, self.sensorsOK_callback)
rospy.Subscriber("gpl_is_ok", Int8, self.object_robot_hand_callback) # Fudge: assumes perfect sensing.
# Related initialisations:
self.h_ready = 0
self.sensorsOK = 0
self.decision = 0
# Required at the end of __init__:
# super(YOUR_AM_CLASSNAME_HERE,self).__init__(trace_label) # Execute constructor of base class.
super(AM_ifSensorsOK_release,self).__init__(trace_label) # Execute constructor of base class.
"""
Define member functions representing each stage of the PREcondition below here.
Each stage of the precondition must return 1 (satisfied), 0 (not yet satisfied),
or -1 (invalidated)...
1 - SATISFIED:
The assertion monitor will progress to the next stage ON THE NEXT simulation
time-step. A special case is that, when the last stage of the precondition
evaluates as 1, the first stage of the POSTcondition will be evaluated
in the SAME time-step.
0 - NOT YET SATISFIED:
The assertion monitor will evaluate the same stage again on the next
simulation time-step.
-1 - INVALIDATED:
The stage has not been satisfied and cannot be satisfied in future evaluations.
Hence the assertion monitor will reset (evaluate the first stage of the
precondition on the next simulation time-step).
TO DO: Allow forking of the AM instance so that multiple independent precon (and
then postcon) checks can run concurrently; add a self.fork() member
function in assertion_monitor.py, to be invoked in the stage member
functions below?
"""
def precon(self):
dist = dist_between_links.check('object','bert2::left_wrist_flex_link')
self.object_in_robot_hand = dist<=0.1
if self.h_ready==1 and self.sensorsOK and self.object_in_robot_hand:
self.t_WaitStart = rospy.get_time()
self.object_hand = 0 # In case it's been set prematurely.
self.decision = 0 # In case it's been set prematurely.
return 1
else:
return 0
"""
Define member functions representing each stage of the POSTcondition below here.
Each stage of the postcondition must return 1 (satisfied), 0 (not yet satisfied),
or -1 (violated)...
1 - SATISFIED:
The assertion monitor will progress to the next stage ON THE NEXT simulation
time-step. A special case is that, when the last stage of the postcondition
evaluates as 1, the assertion is deemed satisfied.
0 - NOT YET SATISFIED:
The assertion monitor will evaluate the same stage again on the next
simulation time-step.
-1 - VIOLATED:
The stage has not been satisfied and cannot be satisfied in future evaluations.
Hence the assertion deemed violated. A flag will be raised to indicate this.
Depending on mode settings (not yet implemented), either the simulation will
end or the monitor will reset.
"""
def Wait(self):
now = rospy.get_time()
if now-self.t_WaitStart>=self.T:
return 1
else:
return 0
def Decide(self):
dist = dist_between_links.check('object','bert2::left_wrist_flex_link')
self.object_in_robot_hand = dist<=0.1
if not object_in_robot_hand:
rospy.loginfo('Valid assertion')
return 1
else:
rospy.loginfo('Violation of property')
return -1
"""
Define callbacks for ROS subscribers.
"""
def h_signals_callback(self,data):
#Assuming perfect sensing and sensing delays
if data.humanIsReady==1:
self.h_ready = 1
def sensorsOK_callback(self,data):
#Assuming perfect sensing and sensing delays
if data.data==1:
self.sensorsOK = 1
def object_robot_hand_callback(self,data):
#Assuming perfect sensing and sensing delays
if data.data==1:
self.object_hand = 0
else:
self.object_hand = 1
|
import RPi.GPIO as GPIO
import time
import sys
GPIO.setmode(GPIO.BCM)
enable_pin = 18
coil_A_1_pin = 4
coil_A_2_pin = 17
coil_B_1_pin = 23
coil_B_2_pin = 24
GPIO.setup(enable_pin, GPIO.OUT)
GPIO.setup(coil_A_1_pin, GPIO.OUT)
GPIO.setup(coil_A_2_pin, GPIO.OUT)
GPIO.setup(coil_B_1_pin, GPIO.OUT)
GPIO.setup(coil_B_2_pin, GPIO.OUT)
GPIO.output(enable_pin, 1)
def forward(delay, steps):
for i in range(0, steps):
setStep(1, 0, 1, 0)
time.sleep(delay)
setStep(0, 1, 1, 0)
time.sleep(delay)
setStep(0, 1, 0, 1)
time.sleep(delay)
setStep(1, 0, 0, 1)
time.sleep(delay)
def backwards(delay, steps):
for i in range(0, steps):
setStep(1, 0, 0, 1)
time.sleep(delay)
setStep(0, 1, 0, 1)
time.sleep(delay)
setStep(0, 1, 1, 0)
time.sleep(delay)
setStep(1, 0, 1, 0)
time.sleep(delay)
def setStep(w1, w2, w3, w4):
GPIO.output(coil_A_1_pin, w1)
GPIO.output(coil_A_2_pin, w2)
GPIO.output(coil_B_1_pin, w3)
GPIO.output(coil_B_2_pin, w4)
angle = int(sys.argv[1])
steps = angle * 136/360
forward(0.005, steps)
GPIO.cleanup()
|
"""Interface to Analog to Digital Converters."""
import time
import spidev
class AnalogToDigitalConverter():
"""Class to represent MCP3004 analog to digital Converter"""
# Voltage dividers 1kOhm/1kOhm (channel 0-2) - 22kOhm/10kOhm(channel 3)
_facCh012 = 2
_facCh3 = 3.195
# Bytes for building read commands
start_byte = 0x01
channel_modifier = 0x08
end_byte = 0x00
def __init__(self):
self._spi = spidev.SpiDev()
self._spi.open(0, 0)
def _build_read_command(self, channel):
"""
Produce 3-byte read command.
The command is 1 byte, book-ended by start and end signifiers.
"""
return [self.start_byte, (self.channel_modifier + channel) << 4,
self.end_byte]
def _process_adc_value(self, channel, value):
"""Return result of processing analog to digital converter value."""
if channel < 3:
coefficient = self._facCh012
else:
coefficient = self._facCh3
return (((value[1] & 3) << 8) + value[2]) * 0.00322 * coefficient
def read_adc(self, channel):
"""Read ADC channel."""
if not 0 <= channel <= 3:
raise ValueError("ADC number must be a value of 0-3!")
r = self._spi.xfer2(self._build_read_command(channel))
return self._process_adc_value(channel, r)
def __enter__(self):
return self
def __exit__(self, *args):
self._spi.close()
if __name__ == '__main__':
with AnalogToDigitalConverter() as mcp3004:
while True:
for ch in range(4):
print("ADC Ch{0}[V]: {1}".format(ch, mcp3004.read_adc(ch)))
print('-' * 80 + '\n')
time.sleep(2)
|
import os
import sys
if len(sys.argv) > 1 and '--no-sugar' == sys.argv[1]:
# Remove the argument from the stack so we don't cause problems
# for distutils
sys.argv.pop(1)
import glob, os.path, string
from distutils.core import setup
DATA_FILES = [
('icons', glob.glob('icons/*')),
('images', glob.glob('images/*')),
('/usr/share/applications', ['turtleart.desktop'])
]
setup (name = 'Turtle Art',
description = "A LOGO-like tool for teaching programming",
author = "Walter Bender",
author_email = "walter.bender@gmail.com",
version = '0.9.4',
packages = ['TurtleArt'],
scripts = ['turtleart'],
data_files = DATA_FILES,
)
else:
from sugar3.activity import bundlebuilder
if __name__ == "__main__":
bundlebuilder.start()
|
import numpy as np
import pandas as pd
import statsmodels.api as sm
from tqdm import tqdm
from ...common import LOCALIZER
from ...common.math_helpers import exponential_decay_weight
from ...data import wind
from ..entities import get_estimation_universe
from .base import Descriptor, Factor
@Descriptor.register("Beta")
class BetaDescriptor(Descriptor):
r"""
Beta
Computed as the slope coefficient in a time-series regression of excess stock return,
:math:`r_t - r_{ft}`, against the cap-weighted excess return of the estimation universe :math:`R_t`,
.. math:: r_t-r_{ft} = \alpha + \beta R_t + e_t
The regression coefficients are estimated over the trailing 252 trading days of returns
with a half-life of 63 trading days.
"""
@LOCALIZER.wrap(filename="descriptors", const_key="beta")
def get_raw_value(self):
R = get_estimation_universe().get_returns().rename("R")
T = 252
halflife = 63
weights = exponential_decay_weight(halflife, T, reverse=True)
stock_rtns = wind.get_wind_data("AShareEODPrices", "s_dq_pctchange") / 100
df = pd.concat([R, stock_rtns.loc[R.index]], 1).truncate("2000-01-01")
df[df==0] = np.nan
result = []
for i in tqdm(range(T, len(df))):
sub_df = df.iloc[i-T:i].dropna(axis=1, thresh=T//2).dropna(subset=["R"])
X, Y = sub_df.iloc[:, 0], sub_df.iloc[:, 1:]
X, Y = (X - X.mean()).values, (Y - Y.mean()).values
XY = np.nansum(Y.T * X * weights, 1) / (~np.isnan(Y.T) @ weights)
XX = (X ** 2 * weights).sum()
result.append(pd.Series(XY / XX, index=sub_df.columns[1:], name=df.index[i]))
result = pd.concat(result, 1).T
return result
Beta = Factor("Beta", [BetaDescriptor()], [1.0])
|
from Crypto.PublicKey import RSA
from Crypto.Cipher import PKCS1_OAEP
from Crypto.Random import get_random_bytes
phrase = 'totallyRandomPassphrase'
message = open('message.txt', 'rb').readline()
key = RSA.generate(4096)
encrypted_key = key.exportKey(passphrase=phrase, pkcs=8, protection='scryptAndAES256-CBC')
with open('privateRSAkey.key', 'wb') as f:
f.write(encrypted_key)
with open('publicRSAkey.key', 'wb') as f:
f.write(key.publickey().exportKey())
public_key = RSA.importKey(open('publicRSAkey.key').read())
encrypt_cipher = PKCS1_OAEP.new(public_key)
nonce = get_random_bytes(16)
ciphertext = encrypt_cipher.encrypt(message)
print(ciphertext, type(ciphertext))
with open('encrypted_message.rsa', 'wb') as ef:
ef.write(nonce+ciphertext)
ef.close()
with open('encrypted_message.rsa', 'rb') as f:
x = f.read()
private_key = RSA.importKey(open('privateRSAkey.key').read(), phrase)
decrypt_cipher = PKCS1_OAEP.new(private_key)
plaintext = decrypt_cipher.decrypt(x[16:])
print(plaintext, type(plaintext))
|
import os
import re
import sys
import random
import argparse
def CheckFile(FileName,FileType,Regular = ''):
FileName,FileExt = os.path.splitext(FileName)
if FileExt[1:].lower() not in FileType:
return False
if Regular != '':
MatchFile = re.match(Regular,FileName)
if MatchFile is not None:
return False
return True
def GenSeqStr(Num,Length):
Num = str(Num)
while len(Num) < Length:
Num = '0' + Num
return Num
def returnStaticNode(duration,FilePath):
return '\t<static>\n\t\t<duration>' + str(duration) + '</duration>\n\t\t<file>' + str(FilePath) + '</file>\n\t</static>\n'
def returnTransitionNode(duration,FromFile,ToFile):
return '\t<transition>\n\t\t<duration>' + str(duration) + '</duration>\n\t\t<from>' + str(FromFile) + '</from>\n\t\t<to>' + str(ToFile) + '</to>\n\t</transition>\n'
def QuickRename(FilePath,FileTagName,FileType,SeqLength,isRandom,isXml):
FileList = os.listdir(FilePath)
WorkList = []
CheckList = []
if isRandom:
RandomNumList = random.sample(range(len(FileList)),len(FileList))
for EachFile,RandomNum in zip(FileList,RandomNumList):
if CheckFile(EachFile,FileType):
RandomFileName = 'This-is-a-meanningless-string-' + str(RandomNum) + os.path.splitext(EachFile)[1]
WorkList.append(RandomFileName)
os.rename(os.path.join(FilePath,EachFile),os.path.join(FilePath,RandomFileName))
else:
for EachFile in FileList:
if CheckFile(EachFile,FileType,'^' + FileTagName + '-[0-9]{' + str(SeqLength) + '}$'):
WorkList.append(EachFile)
else:
CheckList.append(os.path.splitext(EachFile)[0])
SeqNum = 0
for EachFile in WorkList:
while True:
NewName = FileTagName + '-' + GenSeqStr(SeqNum,SeqLength)
SeqNum += 1
if NewName not in CheckList:
break
os.rename(os.path.join(FilePath,EachFile),os.path.join(FilePath,NewName + os.path.splitext(EachFile)[1].lower()))
if isXml:
FileList = os.listdir(FilePath)
WorkList = []
KeepTime = 60
ChangeTime = 0
XmlFileContent = '''<background>
<starttime>
<year>1995</year>
<month>11</month>
<day>07</day>
<hour>00</hour>
<minute>00</minute>
<second>00</second>
</starttime>\n'''
for EachFile in FileList:
if CheckFile(EachFile,FileType):
WorkList.append(EachFile)
if (len(WorkList) > 1):
for i in range(len(WorkList) - 1):
XmlFileContent += returnStaticNode(KeepTime,os.path.join(FilePath,WorkList[i])) + returnTransitionNode(ChangeTime,os.path.join(FilePath,WorkList[i]),os.path.join(FilePath,WorkList[i + 1]))
XmlFileContent += returnStaticNode(KeepTime,os.path.join(FilePath,WorkList[len(WorkList) - 1])) + returnTransitionNode(ChangeTime,os.path.join(FilePath,WorkList[len(WorkList) - 1]),os.path.join(FilePath,WorkList[0])) + '</background>'
XmlF = open(os.path.join(FilePath,FileTagName + '.xml'),'w')
XmlF.write(XmlFileContent)
XmlF.close
def main():
parser = argparse.ArgumentParser(description = 'A tool to quickly rename.')
parser.add_argument('-p','--path',help = 'File Directory.',default = os.getcwd())
parser.add_argument('-t','--tag',help = 'Filename Identifier.',default = 'Wallpapers')
parser.add_argument('-f','--filetype',help = 'Filename Extension.',nargs = '*',default = ['jpg','png'])
parser.add_argument('-l','--length',help = 'The length of Sequence.',type = int,default = '3')
parser.add_argument('-r','--random',help = 'Whether random file.',action = 'store_true')
parser.add_argument('-x','--xml',help = 'Create Ubuntu Wallpaper XML file.',action = 'store_true')
args = parser.parse_args()
# Check Directory
if not os.path.exists(args.path):
raise ValueError('Directory does not exist.')
# Check Sequence
if args.length <= 0:
raise ValueError('The length of sequence must greater than zero.')
elif args.length < len(str(len(os.listdir(args.path)))):
raise ValueError('The length of sequence must greater than file.')
QuickRename(args.path,args.tag,args.filetype,args.length,args.random,args.xml)
if __name__ == '__main__':
main()
|
"""
This file is part of BOMtools.
BOMtools is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
BOMTools is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with BOMTools. If not, see <http://www.gnu.org/licenses/>.
"""
__author__ = 'srodgers'
import subprocess
import configparser
from tkinter import *
from tkinter.ttk import *
from tkinter.filedialog import askopenfilename
import pyperclip
from bommdb import *
defaultMpn = 'N/A'
defaultDb= '/etc/bommgr/parts.db'
defaultConfigLocations = ['/etc/bommgr/bommgr.conf','~/.bommgr/bommgr.conf','bommgr.conf']
firstPn = '800000-101'
defaultMID = 'M0000000'
listFrame = None
class FullScreenApp(object):
def __init__(self, master, **kwargs):
self.master=master
pad=3
self._geom='200x200+0+0'
master.geometry("{0}x{1}+0+0".format(
master.winfo_screenwidth()-pad, master.winfo_screenheight()-pad))
master.bind('<Escape>',self.toggle_geom)
def toggle_geom(self,event):
geom = self.master.winfo_geometry()
print(geom,self._geom)
self.master.geometry(self._geom)
self._geom=geom
class Dialog(Toplevel):
def __init__(self, parent, title = None, xoffset = 50, yoffset = 50):
Toplevel.__init__(self, parent)
self.transient(parent)
if title:
self.title(title)
self.parent = parent
self.result = None
self.bodyframe = Frame(self)
self.initial_focus = self.body(self.bodyframe)
self.bodyframe.pack(padx=5, pady=5)
self.buttonbox()
self.grab_set()
if not self.initial_focus:
self.initial_focus = self
self.protocol("WM_DELETE_WINDOW", self.cancel)
self.geometry("+%d+%d" % (parent.winfo_rootx()+xoffset,
parent.winfo_rooty()+yoffset))
self.initial_focus.focus_set()
self.wait_window(self)
#
# construction hooks
#
def body(self, master):
# create dialog body. return widget that should have
# initial focus. this method should be overridden
pass
def buttonbox(self):
# add standard button box. override if you don't want the
# standard buttons
box = Frame(self)
w = Button(box, text="OK", width=10, command=self.ok, default=ACTIVE)
w.pack(side=LEFT, padx=5, pady=5)
w = Button(box, text="Cancel", width=10, command=self.cancel)
w.pack(side=LEFT, padx=5, pady=5)
self.bind("<Return>", self.ok)
self.bind("<Escape>", self.cancel)
box.pack()
#
# standard button semantics
#
def ok(self, event=None):
if not self.validate():
self.initial_focus.focus_set() # put focus back
return
self.withdraw()
self.update_idletasks()
self.apply()
self.cancel()
def cancel(self, event=None):
# put focus back to the parent window
self.parent.focus_set()
self.destroy()
#
# command hooks
#
def validate(self):
return 1 # override
def apply(self):
pass # override
class ErrorPopUp(Dialog):
def __init__(self, parent, title = "Error", xoffset=50, yoffset=50, message=None):
if title is None or Message is None:
raise SystemError
self.message=message
Dialog.__init__(self, parent, title, xoffset, yoffset)
def buttonbox(self):
# Override
# standard buttons
box = Frame(self)
w = Button(box, text="OK", width=10, command=self.ok, default=ACTIVE)
w.pack(side=LEFT, padx=5, pady=5)
self.bind("<Return>", self.cancel)
self.bind("<Escape>", self.cancel)
box.pack()
def body(self, master):
# Print the error message
Label(master, text=self.message).pack(anchor=W)
class ViewPartsDialog(Dialog):
search_items = ['RES,0603%','RES,0805%','CAP,0603%','CAP,0805%','XSTR%','IC%']
def __init__(self, parent, title = "View Parts Like", xoffset=50, yoffset=50):
if title is None:
raise SystemError
Dialog.__init__(self, parent, title, xoffset, yoffset)
def body(self, master):
"""
Present combo box of search items
"""
patframe=Frame(master)
Label(patframe, text='Search Pattern').grid(row=0, column=0, sticky=W)
self.search_entry = Combobox(patframe, width=50, values=ViewPartsDialog.search_items)
self.search_entry.grid(row=0, column=1, sticky=W)
patframe.pack()
helpframe=Frame(master)
Label(helpframe, text='Use % as a wildcard character').pack()
helpframe.pack()
def validate(self):
return True
def apply(self):
self.selected = self.search_entry.get()
if self.selected not in ViewPartsDialog.search_items:
ViewPartsDialog.search_items.append(self.selected)
def get_selected(self):
return self.selected
class ViewMPNsDialog(Dialog):
mpn_search_items = []
def __init__(self, parent, title = "View MPN's Like", xoffset=50, yoffset=50):
if title is None:
raise SystemError
Dialog.__init__(self, parent, title, xoffset, yoffset)
def body(self, master):
"""
Present combo box of search items
"""
patframe=Frame(master)
Label(patframe, text='Search Pattern').grid(row=0, column=0, sticky=W)
self.search_entry = Combobox(patframe, width=50, values=ViewMPNsDialog.mpn_search_items)
self.search_entry.grid(row=0, column=1, sticky=W)
patframe.pack()
helpframe=Frame(master)
Label(helpframe, text='Use % as a wildcard character').pack()
helpframe.pack()
def validate(self):
return True
def apply(self):
self.selected = self.search_entry.get()
if self.selected not in ViewMPNsDialog.mpn_search_items:
ViewMPNsDialog.mpn_search_items.append(self.selected)
def get_selected(self):
print(self.selected)
return self.selected
class EditDescription(Dialog):
def __init__(self, parent, title = None, xoffset=50, yoffset=50, values=None, db=None):
if db is None or values is None or title is None:
raise SystemError
self.db = db
self.values = values
Dialog.__init__(self, parent, title, xoffset, yoffset)
def body(self, master):
Label(master, text='Description').grid(row=0, column=0, sticky=W)
self.title_entry = Entry(master, width=50)
partinfo = self.db.lookup_pn(self.values[0])
if partinfo is None:
raise SystemError
self.title_entry.insert(0, partinfo[1])
self.title_entry.grid(row=0, column=1, sticky=W)
def validate(self):
title_entry_text = self.title_entry.get()
if len(title_entry_text) < 5 or len(title_entry_text) > 50:
return False
return True
def apply(self):
title_entry_text = self.title_entry.get()
self.db.update_title(self.values[0], title_entry_text)
self.values[1] = title_entry_text
class EditManufacturer(Dialog):
def __init__(self, parent, title = None, xoffset=50, yoffset=50, values=None, db=None):
if db is None or values is None or title is None:
raise SystemError
self.db = db
self.values = values
Dialog.__init__(self, parent, title, xoffset, yoffset)
def body(self, master):
"""
Print dialog box body
"""
Label(master, text='Manufacturer').grid(row=0, column=0, sticky=W)
self.title_entry = Entry(master, width=30)
self.title_entry.insert(0, self.values[0])
self.title_entry.grid(row=0, column=1, sticky=W)
def validate(self):
"""
Validate dialog box contents
"""
self.newmfgname = self.title_entry.get()
if len(self.newmfgname) < 3 or len(self.newmfgname) > 30:
return False
# Did it change
if self.newmfgname != self.values[0]:
# Check to see if the user is defining a manufacturer already in the database
res = self.db.lookup_mfg(self.newmfgname)
if res is not None:
e=ErrorPopUp(self.bodyframe, message="Manufacturer already defined")
return False
return True
def apply(self):
"""
Apply dialog box changes
"""
res = self.db.lookup_mfg(self.values[0]) # Get old mfg info
if res is None:
raise SystemError
mid = res[1]
self.db.update_mfg(mid, self.newmfgname)
self.values[0] = self.newmfgname
class EditMPN(Dialog):
def __init__(self, parent, title = None, xoffset=50, yoffset=50, values=None, db=None, tags=None):
if db is None or values is None or title is None or tags is None:
raise SystemError
self.db = db
self.values = values
self.tags = tags
Dialog.__init__(self, parent, title, xoffset, yoffset)
def body(self, master):
Label(master, text='Manufacturer Part Number').grid(row=0, column=0, sticky=W)
self.mpn_entry = Entry(master, width=30)
partinfo = self.db.lookup_part_by_pn_mpn(self.tags[0], self.values[3])
if partinfo is None:
raise SystemError
self.mpn_entry.insert(0, partinfo[2])
self.mpn_entry.grid(row=0, column=1, sticky=W)
def validate(self):
title_entry_text = self.mpn_entry.get()
if len(title_entry_text) < 3 or len(title_entry_text) > 30:
return False
return True
def apply(self):
(pn, mname, mpn, mid) = self.db.lookup_part_by_pn_mpn(self.tags[0], self.values[3])
newmpn = self.mpn_entry.get()
self.db.update_mpn(pn, mpn,
newmpn, mid)
self.values[3] = newmpn
class AddAlternateSourceDialog(Dialog):
"""
Add part dialog box
"""
def __init__(self, parent, title = "Add Alternate Source", xoffset=50, yoffset=50, db=None, pn=None):
"""
:param parent: Parent window
:param title: Title of add part dialog box
:param xoffset: Offset in X direction
:param yoffset: Offset in Y direction
:param db: Database object
:return: N/A
"""
if db is None or title is None or pn is None:
raise SystemError
self.db = db
self.pn = pn
self.success = False
Dialog.__init__(self, parent, title, xoffset, yoffset)
def body(self, master):
"""
Display the fields for the manufacturer part number
:param master: Parent window
"""
self.mfgrs = self.db.get_mfgr_list()
def_sel = self.mfgrs.index(defaultMfgr)
Label(master, text='Manufacturer').grid(row=2, column=0, sticky=W)
self.mfgr_entry = Combobox(master, width=30, values=self.mfgrs)
self.mfgr_entry.current(def_sel)
self.mfgr_entry.grid(row=2, column=1, sticky=W)
Label(master, text='Manufacturer Part Number').grid(row=3, column=0, sticky=W)
self.mpn_entry = Entry(master, width=30)
self.mpn_entry.insert(0, defaultMpn)
self.mpn_entry.grid(row=3, column=1, sticky=W)
def validate(self):
"""
Validate manufacturer part number
"""
x = len(self.mpn_entry.get())
if x < 3 or x > 30:
return False
# Validate manufacturer, and add a new manufacturer if need be
self.new_mname = self.mfgr_entry.get()
if self.new_mname not in self.mfgrs:
confirm_mfg = AddMfgrDialog(self.parent, new_mfg=self.new_mname)
if confirm_mfg.confirmed() is False:
return False
else:
nextmid = nextFreeMID(self.db)
# Add manufacturer and MID to manufacturer list
self.db.add_mfg_to_mlist(self.new_mname, nextmid)
self.mfgrs.append(self.new_mname)
# Get the mid for the manufacturer name
res = self.db.lookup_mfg(self.new_mname)
if res is None:
raise SystemError
self.new_mid = res[1]
# Check for duplicate manufacturer part record
sources = self.db.lookup_mpn_by_pn(self.pn)
self.new_mpn = self.mpn_entry.get()
for item in sources:
if self.new_mname == item['mname'] and self.new_mpn == item['mpn']:
return False # Item already a valid source
return True
def apply(self):
"""
Write the new manufacturer part record to the database
"""
self.db.add_mpn(self.pn, self.new_mid, self.new_mpn)
self.success = True
def get_new_mfgpartrec(self):
"""
Return new manufacturer and mpn
:return: Dict with pn, mid, mfg and mpn if successful else none
"""
if self.success:
return {'pn': self.pn, 'mid': self.new_mid, 'mfg': self.new_mname, 'mpn': self.new_mpn}
else:
return None
class AddMfgrDialog(Dialog):
def __init__(self, parent, title="Add Manufacturer", xoffset=50, yoffset=50, new_mfg=None):
if new_mfg is None:
raise SystemError
self.confirm = False
self.new_mfg = new_mfg
self.titlestr = title
Dialog.__init__(self, parent, title, xoffset, yoffset)
def body(self, master):
label_text = self.titlestr+': '+self.new_mfg+'?'
Label(master, text=label_text).pack()
def apply(self):
self.confirm = True
def confirmed(self):
return self.confirm
class RemoveSourceDialog(Dialog):
def __init__(self, parent, title="Remove Source", xoffset=50, yoffset=50, db=None, pn=None, mfg=None, mpn=None):
"""
:param parent: Parent window
:param title: Title of add part dialog box
:param xoffset: Offset in X direction
:param yoffset: Offset in Y direction
:param db: Database object
:param pn: Part number
:param mfg: Manufacturer
:param mpn: Manufacturer part number
:return: N/A
"""
if(db is None or pn is None or mfg is None or mpn is None):
raise SystemError
self.db = db
self.pn = pn
self.mfg = mfg
self.mpn = mpn
Dialog.__init__(self, parent, title, xoffset, yoffset)
def body(self, master):
Label(master, text='Part Number').grid(row=0, column=0, sticky=W)
Label(master, text=self.pn, relief=SUNKEN).grid(row=0, column=1, sticky=W)
Label(master, text='Manufacturer').grid(row=1, column=0, sticky=W)
Label(master, text=self.mfg, relief=SUNKEN).grid(row=1, column=1, sticky=W)
Label(master, text='Manufacturer Part Number').grid(row=2, column=0, sticky=W)
Label(master, text=self.mpn, relief=SUNKEN).grid(row=2, column=1, sticky=W)
Label(master, text='').grid(row=3, column=0, sticky=W)
Label(master, text='').grid(row=3, column=1, sticky=W)
Label(master, text='Type YES in the box to confirm deletion').grid(row=4, column=0, sticky=W)
self.yes_entry = Entry(master, width=3)
self.yes_entry.grid(row=4, column=1, sticky=W)
def validate(self):
if self.yes_entry.get() == 'YES':
return True
else:
return False
def apply(self):
res = self.db.lookup_mfg(self.mfg)
if res is None:
raise SystemError
mid = res[1]
self.db.remove_source(self.pn, mid, self.mpn)
class AddPartDialog(Dialog):
"""
Add part dialog box
"""
def __init__(self, parent, title = "Add Part", xoffset=50, yoffset=50, db=None, pnhint='', deschint=''):
"""
:param parent: Parent window
:param title: Title of add part dialog box
:param xoffset: Offset in X direction
:param yoffset: Offset in Y direction
:param db: Database object
:param pnhint: part number hint
:param deschint: description hint
:return: N/A
"""
if db is None or title is None:
raise SystemError
self.db = db
self.pnhint = pnhint
self.deschint = deschint
Dialog.__init__(self, parent, title, xoffset, yoffset)
def new_pn(self):
"""
Return the next available part number from the database
:return: Part number string
"""
res = self.db.last_pn()
# If this is the very first part number added use the default for firstpn
if res is None or res[0] is None:
pn = firstPn
else:
pn = res
(prefix, suffix) = pn.split('-')
nextnum = int(prefix) + 1
pn = '{prefix:06d}-{suffix:03d}'.format(prefix=nextnum, suffix=101)
return pn
def body(self, master):
if self.pnhint != '':
nextpn = self.pnhint
else:
nextpn = self.new_pn()
self.mfgrs = self.db.get_mfgr_list()
def_sel = self.mfgrs.index(defaultMfgr)
Label(master, text='Part Number').grid(row=0, column=0, sticky=W)
self.pn_entry = Entry(master, width=10)
self.pn_entry.insert(0, nextpn)
self.pn_entry.grid(row=0, column=1, sticky=W)
Label(master, text='Description').grid(row=1, column=0, sticky=W)
self.desc_entry = Entry(master, width=50)
if self.deschint is not '':
self.desc_entry.insert(0, self.deschint)
self.desc_entry.grid(row=1, column=1, sticky=W)
Label(master, text='Manufacturer').grid(row=2, column=0, sticky=W)
self.mfgr_entry = Combobox(master, width=30, values=self.mfgrs)
self.mfgr_entry.current(def_sel)
self.mfgr_entry.grid(row=2, column=1, sticky=W)
Label(master, text='Manufacturer Part Number').grid(row=3, column=0, sticky=W)
self.mpn_entry = Entry(master, width=30)
self.mpn_entry.insert(0, defaultMpn)
self.mpn_entry.grid(row=3, column=1, sticky=W)
def validate(self):
# Validate part number
pn = self.pn_entry.get()
x = len(pn)
if x != 10:
return False
if pn[6] != '-':
return False
# Validate description
x = len(self.desc_entry.get())
if x < 5 or x > 50:
return False
# Validate manufacturer part number
x = len(self.mpn_entry.get())
if x < 3 or x > 30:
return False
# Validate manufacturer, and add a new manufacturer if need be
selected = self.mfgr_entry.get()
if selected not in self.mfgrs:
confirm_mfg = AddMfgrDialog(self.parent, new_mfg=selected)
if confirm_mfg.confirmed() is False:
return False
else:
# Assign a new mid
mid = self.db.last_mid()
if mid is not None:
mid = int(mid[1:]) + 1
else:
mid = 0
nextmid = 'M{num:07d}'.format(num=mid)
# Add manufacturer and MID to manufacturer list
self.db.add_mfg_to_mlist(selected, nextmid)
self.mfgrs.append(selected)
return True
def apply(self):
# Retreive all fields
pn = self.pn_entry.get()
desc = self.desc_entry.get()
mfgr = self.mfgr_entry.get()
mpn = self.mpn_entry.get()
# Get the mid for the manufacturer name
res = self.db.lookup_mfg(mfgr)
if res is None:
raise SystemError
mid = res[1]
# Create the part record and manufacturer part record
self.db.add_pn(pn, desc, mid, mpn)
class DisplayFrame:
frame = None # Class variable shared between siblings
def __init__(self, parent, db):
self.parent = parent
self.db = db
class ShowManufacturers(DisplayFrame):
def __init__(self, parent, db):
DisplayFrame.__init__(self, parent, db)
self.empopupmenu = Menu(self.parent, tearoff=0)
self.empopupmenu.add_command(label="Edit Manufacturer...", command=self.edit_mfg)
def refresh(self):
"""
Refresh screen with current list entries
:return: N/A
"""
if(DisplayFrame.frame is not None):
DisplayFrame.frame.destroy()
DisplayFrame.frame = Frame(self.parent)
self.frame = DisplayFrame.frame
self.frame.pack(side=TOP, fill=BOTH, expand=Y)
self.ltree = Treeview(height="26", columns=("Manufacturer"))
ysb = Scrollbar(orient='vertical', command=self.ltree.yview)
xsb = Scrollbar(orient='horizontal', command=self.ltree.xview)
self.ltree.configure(xscroll=xsb.set, yscroll=ysb.set)
self.ltree.heading('#1', text='Manufacturer', anchor=W)
self.ltree.column('#1', stretch=YES, minwidth=0, width=200)
self.ltree.column('#0', stretch=NO, minwidth=0, width=0) #width 0 for special heading
self.ltree.bind("<Button-3>", self.popup)
manufacturers = self.db.get_mfgrs()
for manuf in manufacturers:
parent_iid = self.ltree.insert("", "end", "", tag=[manuf,'mfgrec'], values=(manuf[0],))
# add tree and scrollbars to frame
self.ltree.grid(in_=self.frame, row=0, column=0, sticky=NSEW)
ysb.grid(in_=self.frame, row=0, column=1, sticky=NS)
xsb.grid(in_=self.frame, row=1, column=0, sticky=EW)
# set frame resizing priorities
self.frame.rowconfigure(0, weight=1)
self.frame.columnconfigure(0, weight=1)
def popup(self, event):
"""
Act on right click
:param event:
:return: N/A
"""
# select row under mouse
iid = self.ltree.identify_row(event.y)
self.itemid = iid
if iid:
# mouse pointer over item
self.ltree.selection_set(iid)
item = self.ltree.item(iid)
self.itemvalues = item['values']
self.itemtags = item['tags']
self.empopupmenu.tk_popup(event.x_root, event.y_root)
def edit_mfg(self):
"""
:return: N/A
"""
title = 'Edit Manufacturer: ' + self.itemvalues[0]
e = EditManufacturer(self.parent, values=self.itemvalues, db=self.db, title=title)
self.ltree.item(self.itemid, values=self.itemvalues)
class ShowParts(DisplayFrame):
def __init__(self, parent, db):
DisplayFrame.__init__(self, parent, db)
self.dsdir = general.get('datasheets', None)
if self.dsdir is not None:
self.dsdir = os.path.expanduser(self.dsdir)
self.pdfviewer = general.get('pdfviewer', None)
# create a popup menu
self.pnpopupmenu = Menu(self.parent, tearoff=0)
self.pnpopupmenu.add_command(label="Copy part number to clipboard", command=self.copy_pn)
self.pnpopupmenu.add_command(label="Edit Description",command=self.edit_description)
self.pnpopupmenu.add_command(label="Add tabulated part number", command=self.add_tabulated_part)
self.pnpopupmenu.add_command(label="Add alternate source", command=self.add_alternate_source)
self.mpnpopupmenu = Menu(self.parent, tearoff=0)
self.mpnpopupmenu.add_command(label="Copy manufacturer part number to clipboard", command=self.copy_pn)
self.hdc = self.db.mfg_table_has_datasheet_col()
self.mpnpopupmenu.add_command(label="Open Data Sheet", command=self.open_data_sheet, state = DISABLED)
self.mpnpopupmenu.add_command(label="Edit Manufacturer Part Number", command=self.edit_mpn)
self.mpnpopupmenu.add_command(label="Associate Data Sheet...", command=self.associate_data_sheet, state=DISABLED)
self.mpnpopupmenu.add_command(label="Remove this source", command=self.remove_source, state=DISABLED)
def refresh_mpn_processor(self, like):
"""
Process refresh items (default)
:param like: - search string
:return: N/A
"""
parts = self.db.lookup_mpn_like(like)
for row,(pn,mpn) in enumerate(parts):
res = self.db.lookup_pn(pn)
desc = res[1]
parent_iid = self.ltree.insert("", "end", "", tag=[pn,'partrec'], values=((pn, desc, '', '')))
self.populate_source_list(pn, parent_iid)
children = self.ltree.get_children(parent_iid)
for child in children:
self.ltree.see(child)
def refresh_default_processor(self, like):
"""
Process refresh items (default)
:param like: - search string
:return: N/A
"""
parts = self.db.get_parts(like)
for row,(pn,desc) in enumerate(parts):
mfg = defaultMfgr
mpn = defaultMpn
parent_iid = self.ltree.insert("", "end", "", tag=[pn,'partrec'], values=((pn, desc, '', '')))
self.populate_source_list(pn, parent_iid)
def refresh(self, like=None, processor='DEFAULT'):
"""
Refresh screen with current list entries
:param: like - match string
:param: callback - processing function. Use default if set to None
:return: N/A
"""
self.like = like
if(DisplayFrame.frame is not None):
DisplayFrame.frame.destroy()
DisplayFrame.frame = Frame(self.parent)
self.frame = DisplayFrame.frame
self.frame.pack(side=TOP, fill=BOTH, expand=Y)
self.ltree = Treeview(height="26", columns=("Part Number","Description","Manufacturer","Manufacturer Part Number"), selectmode="extended")
ysb = Scrollbar(orient='vertical', command=self.ltree.yview)
xsb = Scrollbar(orient='horizontal', command=self.ltree.xview)
self.ltree.configure(xscroll=xsb.set, yscroll=ysb.set)
self.ltree.heading('#1', text='Part Number', anchor=W)
self.ltree.heading('#2', text='Description', anchor=W)
self.ltree.heading('#3', text='Manufacturer', anchor=W)
self.ltree.heading('#4', text='Manufacturer Part Number', anchor=W)
self.ltree.column('#1', stretch=NO, minwidth=0, width=200)
self.ltree.column('#2', stretch=NO, minwidth=0, width=500)
self.ltree.column('#3', stretch=NO, minwidth=0, width=300)
self.ltree.column('#4', stretch=YES, minwidth=0, width=300)
self.ltree.column('#0', stretch=NO, minwidth=0, width=0) #width 0 for special heading
self.ltree.bind("<Button-3>", self.popup)
# Process items to view on screen
if processor == 'DEFAULT':
self.refresh_default_processor(like)
elif processor == 'MPN':
self.refresh_mpn_processor(like)
# add tree and scrollbars to frame
self.ltree.grid(in_=self.frame, row=0, column=0, sticky=NSEW)
ysb.grid(in_=self.frame, row=0, column=1, sticky=NS)
xsb.grid(in_=self.frame, row=1, column=0, sticky=EW)
# set frame resizing priorities
self.frame.rowconfigure(0, weight=1)
self.frame.columnconfigure(0, weight=1)
def popup(self, event):
"""
Act on right click
:param event:
:return: N/A
"""
# select row under mouse
iid = self.ltree.identify_row(event.y)
self.itemid = iid
if iid:
# mouse pointer over item
self.ltree.selection_set(iid)
item = self.ltree.item(iid)
self.itemvalues = item['values']
self.itemtags = item['tags']
if item['tags'][1] == 'partrec':
# Remember part number
self.pnpopupmenu.tk_popup(event.x_root, event.y_root)
elif item['tags'][1] == 'mfgpartrec':
sources = self.db.lookup_mpn_by_pn(item['tags'][0])
# Pull datasheet path from database if it is available
self.datasheet = None
for source in sources:
if source['mpn'] == str(self.itemvalues[3]):
self.datasheet = source['datasheet']
# Enable the datasheet selection if there is a path specified in the config file
# The datasheet is specified in the manufacturer table,
# the default manufacturer is not specified, and
# the path to the pdf viewer is specified in the config file
if self.hdc is True and self.datasheet is not None \
and self.dsdir is not None\
and self.pdfviewer is not None\
and self.itemvalues[2] != defaultMfgr:
self.mpnpopupmenu.entryconfig(1, state=NORMAL)
else:
self.mpnpopupmenu.entryconfig(1, state=DISABLED)
# If we have the datasheet column
if self.hdc is True:
# Enable if not the default manufacturer
if self.itemvalues[2] != defaultMfgr:
self.mpnpopupmenu.entryconfig(3, state=NORMAL)
else:
self.mpnpopupmenu.entryconfig(3, state=DISABLED)
# If more than one source, then enable the removal of a source
if len(sources) > 1:
self.mpnpopupmenu.entryconfig(4 , state=NORMAL)
else:
self.mpnpopupmenu.entryconfig(4, state=DISABLED)
self.mpnpopupmenu.tk_popup(event.x_root, event.y_root)
else:
# mouse pointer not over item
# occurs when items do not fill frame
# no action required
pass
def copy_pn(self):
"""
Copy part number or manufacturer part number to clipboard
:return: N/A
"""
if self.itemvalues[0] != '':
pyperclip.copy(self.itemvalues[0])
else:
pyperclip.copy(self.itemvalues[3])
def edit_description(self):
"""
Display Dialog box and allow user to edit part description
:return: N/A
"""
title = 'Edit Description: ' + self.itemvalues[0]
e = EditDescription(self.parent, values=self.itemvalues, db=self.db, title=title)
self.ltree.item(self.itemid, values=self.itemvalues)
def edit_mpn(self):
"""
Display Dialog box and allow user to edit the manufacturer part number
:return: N/A
"""
title = 'Edit Manufacturer Part Number: ' + str(self.itemvalues[3])
e = EditMPN(self.parent, values=self.itemvalues, tags=self.itemtags, db=self.db, title=title)
self.ltree.item(self.itemid, values=self.itemvalues)
def add_tabulated_part(self):
"""
Add a tabulated part number with the same prefix as the part number clicked on
:return: N/A
"""
pnsplit = self.itemvalues[0].split('-')
pnhint = pnsplit[0]+'-'
deschint = self.itemvalues[1]
a = AddPartDialog(self.parent,title='Add Tabulated Part',db=self.db, pnhint=pnhint, deschint=deschint)
self.refresh(self.like)
def populate_source_list(self, pn, itemid):
"""
Build the list of sources (mfg, mpn)
:param pn:
:param itemid:
:return:
"""
res = self.db.lookup_mpn_by_pn(pn)
# If no MFG/MPN, use default
if res == []:
res =[{'mname':defaultMfgr,'mpn':defaultMpn}]
for item in res:
mfg = item['mname']
mpn = item['mpn']
self.ltree.insert(itemid, "end", "", tag=[pn,'mfgpartrec'], values=(('', '', mfg, mpn)))
def rebuild_source_list(self, pn, itemid):
"""
Rebuild the list of children
:param: parent part number
:param: parent item id
"""
children = self.ltree.get_children(itemid)
self.ltree.delete(children)
self.populate_source_list(pn, itemid)
def add_alternate_source(self):
"""
Display dialog box and allow user to add an alternate manufacturer and MPN
:return:N/A
"""
a = AddAlternateSourceDialog(self.parent, pn=self.itemtags[0], db=self.db, title="Add Alternate Source")
self.rebuild_source_list(self.itemtags[0], self.itemid)
def remove_source(self):
"""
Remove a source from the database
:return: N/A
"""
mpn = self.itemvalues[3]
mfg = self.itemvalues[2]
pn = self.itemtags[0]
r = RemoveSourceDialog(self.parent, db=self.db, pn=pn, mfg=mfg, mpn=mpn, title="Remove Source")
self.ltree.delete(self.itemid)
def open_data_sheet(self):
"""
Run the pdf viewer to display the datasheet
:return: N/A
"""
if self.datasheet[0] == os.pathsep:
pdfpath = self.datasheet[0] # Path is absolute
else:
pdfpath = os.path.join(self.dsdir, self.datasheet) # Path is relative to datasheet directory
subprocess.Popen((self.pdfviewer, pdfpath))
def associate_data_sheet(self):
"""
Associate a datasheet to a manufacturer part number
:return: N/A
"""
# Get the path to the datasheet from the user
path = askopenfilename(parent=root, initialdir=self.dsdir, defaultextension='.pdf', title='Associate Datasheet')
print(len(path))
# If something was entered, then store the path in the manufacturer table
if path is not None and len(path):
# If it starts with the datasheet directory, remove that from the path name plus the leading separator
if path.startswith(self.dsdir):
path = path[len(self.dsdir) + 1:]
# Get every key needed to do the update
pn = self.itemtags[0]
mpn = self.itemvalues[3]
res = self.db.lookup_mfg_by_pn_mpn(pn, mpn)
if res is None:
raise SystemError
mid = res[1]
#print(pn, mpn, mid, path)
# update the path
self.db.update_datasheet(pn, mid, mpn, path)
def nextFreeMID(db):
mid = db.last_mid()
if mid is not None:
mid = int(mid[1:]) + 1
else:
mid = 0
return 'M{num:07d}'.format(num=mid)
def addPN():
AddPartDialog(root, db=DB)
parts.refresh()
def viewPartsLike():
res = ViewPartsDialog(root)
selected=res.get_selected()
parts.refresh(selected)
def viewMPNsLike():
res = ViewMPNsDialog(root)
selected = res.get_selected()
parts.refresh(selected,'MPN')
if __name__ == '__main__':
## Customize default configurations to user's home directory
for i in range(0, len(defaultConfigLocations)):
defaultConfigLocations[i] = os.path.expanduser(defaultConfigLocations[i])
# Read the config file
config = configparser.ConfigParser()
configLocation = defaultConfigLocations
config.read(configLocation)
try:
general = config['general']
except KeyError:
print('Error: no config file found')
sys.exit(2)
# Open the database file
db = os.path.expanduser(general.get('db', defaultDb))
# Check to see if we can access the database file and that it is writable
if(os.path.isfile(db) == False):
print('Error: Database file {} doesn\'t exist'.format(db))
raise(SystemError)
if(os.access(db,os.W_OK) == False):
print('Error: Database file {} is not writable'.format(db))
raise(SystemError)
DB = BOMdb(db)
# Look up default manufacturer
res = DB.lookup_mfg_by_id(defaultMID)
if(res is None):
defaultMfgr = 'Default MFG Error'
else:
defaultMfgr = res[0]
# Set up the TCL add window
root = Tk()
root.title("Part Manager")
app=FullScreenApp(root)
parts = ShowParts(root, DB)
manufacturers = ShowManufacturers(root, DB)
menubar = Menu(root, tearoff = 0)
filemenu = Menu(menubar, tearoff=0)
filemenu.add_command(label="Exit", command=root.quit)
menubar.add_cascade(label="File", menu=filemenu)
# display the menu
root.config(menu=menubar)
editmenu = Menu(menubar, tearoff = 0)
menubar.add_cascade(label="Edit", menu=editmenu)
editmenu.add_command(label="Add part number...", command=addPN)
viewmenu = Menu(menubar, tearoff = 0)
viewmenu.add_command(label="View All Parts", command=parts.refresh)
viewmenu.add_command(label="View Parts Like...", command=viewPartsLike)
viewmenu.add_command(label="View View Manufacturer Part Numbers Like...", command=viewMPNsLike)
viewmenu.add_command(label="View Manufacturers", command=manufacturers.refresh)
menubar.add_cascade(label="View", menu=viewmenu)
# display the menu
root.config(menu=menubar)
parts.refresh()
root.mainloop()
|
import logging
import gi
import glob
import os
import sh
import threading
from typing import TYPE_CHECKING, Callable
from tailsgreeter.ui import _
from tailsgreeter.config import settings_dir, persistent_settings_dir, unsafe_browser_setting_filename
gi.require_version('GLib', '2.0')
gi.require_version('Gtk', '3.0')
from gi.repository import GLib, Gtk
if TYPE_CHECKING:
from tailsgreeter.settings.persistence import PersistenceSettings
class PersistentStorage(object):
def __init__(self, persistence_setting: "PersistenceSettings",
load_settings_cb, apply_settings_cb: Callable, builder):
self.persistence_setting = persistence_setting
self.load_settings_cb = load_settings_cb
self.apply_settings_cb = apply_settings_cb
self.box_storage = builder.get_object('box_storage')
self.box_storage_unlock = builder.get_object('box_storage_unlock')
self.box_storage_unlocked = builder.get_object('box_storage_unlocked')
self.button_storage_unlock = builder.get_object('button_storage_unlock')
self.checkbutton_storage_show_passphrase = builder.get_object('checkbutton_storage_show_passphrase')
self.entry_storage_passphrase = builder.get_object('entry_storage_passphrase')
self.image_storage_state = builder.get_object('image_storage_state')
self.infobar_persistence = builder.get_object('infobar_persistence')
self.label_infobar_persistence = builder.get_object('label_infobar_persistence')
self.spinner_storage_unlock = builder.get_object('spinner_storage_unlock')
self.button_start = builder.get_object("button_start")
self.checkbutton_storage_show_passphrase.connect(
'toggled', self.cb_checkbutton_storage_show_passphrase_toggled)
self.box_storage.set_focus_chain([
self.box_storage_unlock,
self.box_storage_unlocked,
self.checkbutton_storage_show_passphrase])
if self.persistence_setting.has_persistence():
self.box_storage_unlock.set_visible(True)
self.checkbutton_storage_show_passphrase.set_visible(True)
self.image_storage_state.set_visible(True)
self.entry_storage_passphrase.set_visible(True)
self.spinner_storage_unlock.set_visible(False)
else:
# XXX-future: We have a nice button to configure the persistence
# but nothing is implemented to do so currently. So let's
# hide the whole thing for now.
self.box_storage.set_visible(False)
@staticmethod
def passphrase_changed(editable):
# Remove warning icon
editable.set_icon_from_icon_name(Gtk.EntryIconPosition.SECONDARY, None)
def unlock(self):
self.entry_storage_passphrase.set_sensitive(False)
self.button_storage_unlock.set_sensitive(False)
self.button_storage_unlock.set_label(_("Unlocking…"))
self.checkbutton_storage_show_passphrase.set_visible(False)
self.image_storage_state.set_visible(False)
self.spinner_storage_unlock.set_visible(True)
passphrase = self.entry_storage_passphrase.get_text()
# Let's execute the unlocking in a thread
def do_unlock_storage(unlock_method, passphrase, unlocked_cb,
failed_cb):
if unlock_method(passphrase):
GLib.idle_add(unlocked_cb)
else:
GLib.idle_add(failed_cb)
unlocking_thread = threading.Thread(
target=do_unlock_storage,
args=(self.persistence_setting.unlock,
passphrase,
self.cb_unlocked,
self.cb_unlock_failed)
)
unlocking_thread.start()
def cb_unlock_failed(self):
logging.debug("Storage unlock failed")
self.entry_storage_passphrase.set_sensitive(True)
self.button_storage_unlock.set_sensitive(True)
self.button_storage_unlock.set_label(_("Unlock"))
self.checkbutton_storage_show_passphrase.set_visible(True)
self.image_storage_state.set_visible(True)
self.spinner_storage_unlock.set_visible(False)
self.label_infobar_persistence.set_label(
_("Cannot unlock encrypted storage with this passphrase."))
self.infobar_persistence.set_visible(True)
self.entry_storage_passphrase.select_region(0, -1)
self.entry_storage_passphrase.set_icon_from_icon_name(
Gtk.EntryIconPosition.SECONDARY,
'dialog-warning-symbolic')
self.entry_storage_passphrase.grab_focus()
def cb_unlocked(self):
logging.debug("Storage unlocked")
self.spinner_storage_unlock.set_visible(False)
self.entry_storage_passphrase.set_visible(False)
self.button_storage_unlock.set_visible(False)
self.infobar_persistence.set_visible(False)
self.image_storage_state.set_from_icon_name('tails-unlocked',
Gtk.IconSize.BUTTON)
self.image_storage_state.set_visible(True)
self.box_storage_unlocked.set_visible(True)
self.button_start.set_sensitive(True)
# Copy all settings from the "persistent settings directory". This is
# a workaround for an issue that caused the "Settings were loaded"-
# notification to be displayed even if no settings were actually
# loaded, including on the first boot after activating persistence (
# which is confusing for users). FTR, the explanation for this is:
#
# When persistence is activated, live-persist copies the mount
# destination directory (/var/lib/gdm3/settings) to the source
# directory (/live/persistence/TailsData_unlocked/greeter-settings),
# if the source directory doesn't exist yet.
# In addition with the fact that we immediately store the settings
# on the file system as soon as the user changes them, that means
# that when we look at the destination directory after activating
# persistence, and see that there are settings stored there, it's
# unclear whether those were loaded from the persistence or simply
# set by the user in the same Welcome Screen session before unlocking
# persistence.
# One workaround we tried was to check if the values of any of the
# settings on the filesystem are actually different than the values
# in memory, but that doesn't work well for the admin password, which
# is stored hashed on the filesystem, but in cleartext in memory.
#
# So the current workaround is to have this separate "persistent
# settings directory" instead of simply persisting the "normal"
# settings directory, copying all settings from the former
# to the latter after persistence was activated, and copying all
# settings back to persistent directory when the Welcome Screen
# is left. That means that even if the user already set settings
# in the Welcome Screen before unlocking persistence, those will
# be stored in the "normal" settings directory, so the "persistent"
# settings directory will always be empty if no settings were
# persisted yet.
#
# This workaround will no longer be necessary once #11529 is done,
# because with #11529, the source directory
# (/live/persistence/TailsData_unlocked/greeter-settings), will
# be created immediately, so live-persist will never copy the
# destination directory to the source directory.
#
# Both the commit which introduced the persistent settings directory
# (e5653981228b375c28bf4d1ace9be3367e080900) and the commit which
# extended its usage and introduced this lengthy comment, can be
# reverted once #11529 is done.
for setting in glob.glob(os.path.join(persistent_settings_dir, 'tails.*')):
sh.cp("-a", setting, settings_dir)
if not os.listdir(settings_dir):
self.apply_settings_cb()
else:
self.load_settings_cb()
def cb_checkbutton_storage_show_passphrase_toggled(self, widget):
self.entry_storage_passphrase.set_visibility(widget.get_active())
|
import Options, Utils, os, Logs, samba_utils, sys, Task, fnmatch, re, Build
from TaskGen import feature, before, after
abi_type_maps = {
'_Bool' : 'bool',
'struct __va_list_tag *' : 'va_list'
}
version_key = lambda x: map(int, x.split("."))
def normalise_signature(sig):
'''normalise a signature from gdb'''
sig = sig.strip()
sig = re.sub('^\$[0-9]+\s=\s\{*', '', sig)
sig = re.sub('\}(\s0x[0-9a-f]+\s<\w+>)?$', '', sig)
sig = re.sub('0x[0-9a-f]+', '0xXXXX', sig)
sig = re.sub('", <incomplete sequence (\\\\[a-z0-9]+)>', r'\1"', sig)
for t in abi_type_maps:
# we need to cope with non-word characters in mapped types
m = t
m = m.replace('*', '\*')
if m[-1].isalnum() or m[-1] == '_':
m += '\\b'
if m[0].isalnum() or m[0] == '_':
m = '\\b' + m
sig = re.sub(m, abi_type_maps[t], sig)
return sig
def normalise_varargs(sig):
'''cope with older versions of gdb'''
sig = re.sub(',\s\.\.\.', '', sig)
return sig
def parse_sigs(sigs, abi_match):
'''parse ABI signatures file'''
abi_match = samba_utils.TO_LIST(abi_match)
ret = {}
a = sigs.split('\n')
for s in a:
if s.find(':') == -1:
continue
sa = s.split(':')
if abi_match:
matched = False
for p in abi_match:
if p[0] == '!' and fnmatch.fnmatch(sa[0], p[1:]):
break
elif fnmatch.fnmatch(sa[0], p):
matched = True
break
if not matched:
continue
print "%s -> %s" % (sa[1], normalise_signature(sa[1]))
ret[sa[0]] = normalise_signature(sa[1])
return ret
def save_sigs(sig_file, parsed_sigs):
'''save ABI signatures to a file'''
sigs = ''
for s in sorted(parsed_sigs.keys()):
sigs += '%s: %s\n' % (s, parsed_sigs[s])
return samba_utils.save_file(sig_file, sigs, create_dir=True)
def abi_check_task(self):
'''check if the ABI has changed'''
abi_gen = self.ABI_GEN
libpath = self.inputs[0].abspath(self.env)
libname = os.path.basename(libpath)
sigs = Utils.cmd_output([abi_gen, libpath])
parsed_sigs = parse_sigs(sigs, self.ABI_MATCH)
sig_file = self.ABI_FILE
old_sigs = samba_utils.load_file(sig_file)
if old_sigs is None or Options.options.ABI_UPDATE:
if not save_sigs(sig_file, parsed_sigs):
raise Utils.WafError('Failed to save ABI file "%s"' % sig_file)
Logs.warn('Generated ABI signatures %s' % sig_file)
return
parsed_old_sigs = parse_sigs(old_sigs, self.ABI_MATCH)
# check all old sigs
got_error = False
for s in parsed_old_sigs:
if not s in parsed_sigs:
Logs.error('%s: symbol %s has been removed - please update major version\n\tsignature: %s' % (
libname, s, parsed_old_sigs[s]))
got_error = True
elif normalise_varargs(parsed_old_sigs[s]) != normalise_varargs(parsed_sigs[s]):
Logs.error('%s: symbol %s has changed - please update major version\n\told_signature: %s\n\tnew_signature: %s' % (
libname, s, parsed_old_sigs[s], parsed_sigs[s]))
got_error = True
for s in parsed_sigs:
if not s in parsed_old_sigs:
Logs.error('%s: symbol %s has been added - please mark it _PRIVATE_ or update minor version\n\tsignature: %s' % (
libname, s, parsed_sigs[s]))
got_error = True
if got_error:
raise Utils.WafError('ABI for %s has changed - please fix library version then build with --abi-update\nSee http://wiki.samba.org/index.php/Waf#ABI_Checking for more information' % libname)
t = Task.task_type_from_func('abi_check', abi_check_task, color='BLUE', ext_in='.bin')
t.quiet = True
if '--abi-check' in sys.argv:
Task.always_run(t)
@after('apply_link')
@feature('abi_check')
def abi_check(self):
'''check that ABI matches saved signatures'''
env = self.bld.env
if not env.ABI_CHECK or self.abi_directory is None:
return
# if the platform doesn't support -fvisibility=hidden then the ABI
# checks become fairly meaningless
if not env.HAVE_VISIBILITY_ATTR:
return
topsrc = self.bld.srcnode.abspath()
abi_gen = os.path.join(topsrc, 'buildtools/scripts/abi_gen.sh')
abi_file = "%s/%s-%s.sigs" % (self.abi_directory, self.name, self.vnum)
tsk = self.create_task('abi_check', self.link_task.outputs[0])
tsk.ABI_FILE = abi_file
tsk.ABI_MATCH = self.abi_match
tsk.ABI_GEN = abi_gen
def abi_process_file(fname, version, symmap):
'''process one ABI file, adding new symbols to the symmap'''
f = open(fname, mode='r')
for line in f:
symname = line.split(":")[0]
if not symname in symmap:
symmap[symname] = version
f.close()
def abi_write_vscript(vscript, libname, current_version, versions, symmap, abi_match):
'''write a vscript file for a library in --version-script format
:param vscript: Path to the vscript file
:param libname: Name of the library, uppercased
:param current_version: Current version
:param versions: Versions to consider
:param symmap: Dictionary mapping symbols -> version
:param abi_match: List of symbols considered to be public in the current version
'''
invmap = {}
for s in symmap:
invmap.setdefault(symmap[s], []).append(s)
f = open(vscript, mode='w')
last_key = ""
versions = sorted(versions, key=version_key)
for k in versions:
symver = "%s_%s" % (libname, k)
if symver == current_version:
break
f.write("%s {\n" % symver)
if k in invmap:
f.write("\tglobal: \n")
for s in invmap.get(k, []):
f.write("\t\t%s;\n" % s);
f.write("}%s;\n\n" % last_key)
last_key = " %s" % symver
f.write("%s {\n" % current_version)
f.write("\tglobal:\n")
for x in abi_match:
f.write("\t\t%s;\n" % x)
if abi_match != ["*"]:
f.write("\tlocal: *;\n")
f.write("};\n")
f.close()
def abi_build_vscript(task):
'''generate a vscript file for our public libraries'''
tgt = task.outputs[0].bldpath(task.env)
symmap = {}
versions = []
for f in task.inputs:
fname = f.abspath(task.env)
basename = os.path.basename(fname)
version = basename[len(task.env.LIBNAME)+1:-len(".sigs")]
versions.append(version)
abi_process_file(fname, version, symmap)
abi_write_vscript(tgt, task.env.LIBNAME, task.env.VERSION, versions, symmap,
task.env.ABI_MATCH)
def ABI_VSCRIPT(bld, libname, abi_directory, version, vscript, abi_match=None):
'''generate a vscript file for our public libraries'''
if abi_directory:
source = bld.path.ant_glob('%s/%s-[0-9]*.sigs' % (abi_directory, libname))
def abi_file_key(path):
return version_key(path[:-len(".sigs")].rsplit("-")[-1])
source = sorted(source.split(), key=abi_file_key)
else:
source = ''
libname = os.path.basename(libname)
version = os.path.basename(version)
libname = libname.replace("-", "_").replace("+","_").upper()
version = version.replace("-", "_").replace("+","_").upper()
t = bld.SAMBA_GENERATOR(vscript,
rule=abi_build_vscript,
source=source,
group='vscripts',
target=vscript)
if abi_match is None:
abi_match = ["*"]
else:
abi_match = samba_utils.TO_LIST(abi_match)
t.env.ABI_MATCH = abi_match
t.env.VERSION = version
t.env.LIBNAME = libname
t.vars = ['LIBNAME', 'VERSION', 'ABI_MATCH']
Build.BuildContext.ABI_VSCRIPT = ABI_VSCRIPT
|
from tkinter import *
import license
import about
root = Tk() #Tk initialize
root.title("Taller GIT")
root.minsize(500,500)
root.maxsize(500,500)
welcome_lbl = Label(root, text = "Bienvenidos al taller de GIT", font = ("calibri","18"), fg = "#000b98", width= 28, height = 1)
welcome_lbl.place(x = 20, y = 20)
def fetchLicenceView():
display_license_view()
licence_btn = Button(root, text="Licencia", command=fetchLicenceView, bg = "#000000", fg = "#ffffff", width = 50, height = 5)
licence_btn.place(x=35,y=180)
def fetchAboutView():
display_about_view()
about_btn = Button(root, text="Sobre este proyecto", command=fetchAboutView, bg = "#000000", fg = "#ffffff", width = 50, height = 5)
about_btn.place(x=35,y=180)
root.mainloop()
|
from paraview.simple import *
import paraview
assert (paraview.compatibility.GetVersion().GetVersion() == None),\
"ParaView modules should never force backwords compatibility to any version"
assert ((paraview.compatibility.GetVersion() < 4.1) == False),\
"less-than test should always fail when version is not specified."
assert ((paraview.compatibility.GetVersion() <= 4.1) == False),\
"less-equal test should always fail when version is not specified."
assert ((paraview.compatibility.GetVersion() > 4.1) == True),\
"greater-than test should always fail when version is not specified."
assert ((paraview.compatibility.GetVersion() >= 4.1) == True),\
"greater-equal test should always fail when version is not specified."
Sphere()
r = Show()
assert (len(r.ColorArrayName) == 2),\
"'ColorArrayName' must be a 2-tuple"
raisedException = False
try:
a = r.ColorAttributeType
except AttributeError:
raisedException = True
assert raisedException, "Accessing ColorAttributeType must have raised an exception."
paraview.compatibility.major = 4
paraview.compatibility.minor = 1
assert ((paraview.compatibility.GetVersion() < 4.1) == False), "version comparison failed"
assert ((paraview.compatibility.GetVersion() <= 4.1) == True), "version comparison failed"
assert ((paraview.compatibility.GetVersion() > 4.1) == False), "version comparison failed"
assert ((paraview.compatibility.GetVersion() >= 4.1) == True), "version comparison failed"
a = r.ColorAttributeType
assert (type(a) == str), "'ColorAttributeType' must return a string"
a = r.ColorArrayName
assert (type(a) == str), "'ColorArrayName' must return a string"
r.ColorAttributeType = "CELL_DATA"
r.ColorArrayName = "Alpha"
paraview.compatibility.major = None
paraview.compatibility.minor = None
assert (r.ColorArrayName[:] == ["CELLS", "Alpha"]), "'ColorArrayName' value is not as expected."
|
import os
from GTG import _
class GnomeConfig:
current_rep = os.path.dirname(os.path.abspath(__file__))
GLADE_FILE = os.path.join(current_rep, "pluginmanager.glade")
CANLOAD = _("Everything necessary to run this plugin is available.")
CANNOTLOAD = _("The plugin can not be loaded")
miss1 = _("Some python modules are missing")
miss2 = _("Please install the following python modules:")
MODULEMISSING = "%s \n%s" % (miss1, miss2)
dmiss1 = _("Some remote dbus objects are missing.")
dmiss2 = _("Please start the following applications:")
DBUSMISSING = "%s \n%s" % (dmiss1, dmiss2)
bmiss1 = _("Some modules and remote dbus objects are missing.")
bmiss2 = _("Please install or start the following components:")
MODULANDDBUS = "%s \n%s" % (bmiss1, bmiss2)
umiss1 = _("Unknown error while loading the plugin.")
umiss2 = _("Very helpful message, isn't it? Please report a bug.")
UNKNOWN = "%s \n%s" % (umiss1, umiss2)
|
import re
import string
from lib.parser.getcc import *
from lib.parser.getmail import *
from lib.parser.getip import *
from lib.parser.getssn import *
class parse:
def __init__(self,content):
self.content = content
def clean(self):
"""Clean HTML Response"""
self.content = re.sub('<em>','',self.content)
self.content = re.sub('<b>','',self.content)
self.content = re.sub('</b>','',self.content)
self.content = re.sub('<strong>','',self.content)
self.content = re.sub('</strong>','',self.content)
self.content = re.sub('</em>','',self.content)
self.content = re.sub('<wbr>','',self.content)
self.content = re.sub('</wbr>','',self.content)
self.content = re.sub('<li>','',self.content)
self.content = re.sub('</li>','',self.content)
for x in ('>', ':', '=', '<', '/', '\\', ';', '&', '%3A', '%3D', '%3C'):
self.content = string.replace(self.content,x,' ')
def getmail(self):
"""Get Emails"""
self.clean()
return getmail(self.content)
def getip(self):
""" Get IP """
self.clean()
return getip(self.content)
def getcc(self):
""" Get Credit Card"""
self.clean()
return getcc(self.content)
def getssn(self):
""" """
self.clean()
return getssn(self.content)
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rules', '0077_auto_20191002_0820'),
]
operations = [
migrations.AlterField(
model_name='ruleprocessingfilter',
name='action',
field=models.CharField(max_length=10),
),
]
|
"""
latex.preview
"""
from ..file import File
from ..tools import Tool, Job, ToolRunner
from ..tools.postprocess import RubberPostProcessor, GenericPostProcessor
from ..issues import MockStructuredIssueHandler
from environment import Environment
from gi.repository import GdkPixbuf
class ImageToolGenerator(object):
"""
This generates Tools for rendering images from LaTeX source
"""
FORMAT_PNG, FORMAT_JPEG, FORMAT_GIF = 1, 2, 3
PNG_MODE_MONOCHROME, PNG_MODE_GRAYSCALE, PNG_MODE_RGB, PNG_MODE_RGBA = 1, 2, 3, 4
def __init__(self):
self._names = {self.FORMAT_PNG : "PNG Image", self.FORMAT_JPEG : "JPEG Image", self.FORMAT_GIF : "GIF Image"}
self._png_modes = {self.PNG_MODE_MONOCHROME : "mono", self.PNG_MODE_GRAYSCALE : "gray", self.PNG_MODE_RGB : "16m",
self.PNG_MODE_RGBA : "alpha"}
# default settings
self.format = self.FORMAT_PNG
self.png_mode = self.PNG_MODE_RGBA
self.render_box = True
self.resolution = int(round(Environment().screen_dpi))
self.antialias_factor = 4
self.open = False
def generate(self):
"""
@return: a Tool object
"""
tool = Tool(label=self._names[self.format], jobs=[], description="", accelerator="", extensions=[])
# use rubber to render a DVI
tool.jobs.append(Job("rubber --force --short --inplace \"$filename\"", True, RubberPostProcessor))
if self.render_box:
# DVI -> PS
# -D num resolution in DPI
# -q quiet mode
# -E generate an EPSF file with a tight bounding box
tool.jobs.append(Job("dvips -D %s -q -E -o \"$shortname.eps\" \"$shortname.dvi\"" % self.resolution, True, GenericPostProcessor))
# EPS -> PNG|JPG|GIF
if self.format == self.FORMAT_PNG:
command = "$plugin_path/util/eps2png.pl -png%s -resolution=%s -antialias=%s \"$shortname.eps\"" % (self._png_modes[self.png_mode],
self.resolution, self.antialias_factor)
elif self.format == self.FORMAT_JPEG:
command = "$plugin_path/util/eps2png.pl -jpeg -resolution=%s -antialias=%s \"$shortname.eps\"" % (self.resolution, self.antialias_factor)
elif self.format == self.FORMAT_GIF:
command = "$plugin_path/util/eps2png.pl -gif -resolution=%s -antialias=%s \"$shortname.eps\"" % (self.resolution, self.antialias_factor)
tool.jobs.append(Job(command, True, GenericPostProcessor))
else:
# dvips
tool.jobs.append(Job("dvips -D %s -q -o \"$shortname.ps\" \"$shortname.dvi\"" % self.resolution, True, GenericPostProcessor))
if self.format == self.FORMAT_PNG:
tool.jobs.append(Job("gs -q -dNOPAUSE -r%s -dTextAlphaBits=%s -dGraphicsAlphaBits=%s -sDEVICE=png%s -sOutputFile=$shortname.png $shortname.ps quit.ps"
% (self.resolution, self.antialias_factor, self.antialias_factor, self._png_modes[self.png_mode]), True, GenericPostProcessor))
elif self.format == self.FORMAT_JPEG:
tool.jobs.append(Job("gs -q -dNOPAUSE -r%s -dTextAlphaBits=%s -dGraphicsAlphaBits=%s -sDEVICE=jpeg -sOutputFile=$shortname.jpg $shortname.ps quit.ps"
% (self.resolution, self.antialias_factor, self.antialias_factor), True, GenericPostProcessor))
elif self.format == self.FORMAT_GIF:
tool.jobs.append(Job("gs -q -dNOPAUSE -r%s -dTextAlphaBits=%s -dGraphicsAlphaBits=%s -sDEVICE=ppm -sOutputFile=$shortname.ppm $shortname.ps quit.ps"
% (self.resolution, self.antialias_factor, self.antialias_factor), True, GenericPostProcessor))
# ppmtogif
tool.jobs.append(Job("ppmtogif $shortname.ppm > $shortname.gif", True, GenericPostProcessor))
if self.open:
extension = {self.FORMAT_PNG : "png", self.FORMAT_JPEG: "jpg", self.FORMAT_GIF : "gif"}[self.format]
tool.jobs.append(Job("gvfs-open \"$shortname.%s\"" % extension, True, GenericPostProcessor))
return tool
from tempfile import NamedTemporaryFile
class PreviewRenderer(ToolRunner):
def render(self, source):
"""
Render a preview image from LaTeX source
@param source: some LaTeX source without \begin{document}
"""
# create temp file with source
self._temp_file = NamedTemporaryFile(mode="w", suffix=".tex")
self._temp_file.write("\\documentclass{article}\\pagestyle{empty}\\begin{document}%s\\end{document}" % source)
self._temp_file.flush()
# generate Tool
tool = ImageToolGenerator().generate()
self._file = File(self._temp_file.name)
issue_handler = MockStructuredIssueHandler()
# run the Tool
self.run(self._file, tool, issue_handler)
def _on_tool_succeeded(self):
# see ToolRunner._on_tool_succeeded
pixbuf = GdkPixbuf.Pixbuf.new_from_file(self._file.shortname + ".png")
self.__cleanup()
self._on_render_succeeded(pixbuf)
def _on_tool_failed(self):
# see ToolRunner._on_tool_failed
self.__cleanup()
self._on_render_failed()
def __cleanup(self):
"""
Remove the files created during the render process
"""
# delete the temp file
self._temp_file.close()
# delete all files created during the build process
for file in self._file.siblings:
try:
file.delete()
self._log.debug("Removed %s" % file)
except OSError:
self._log.error("Failed to remove '%s'" % file)
def _on_render_succeeded(self, pixbuf):
"""
The rendering process has finished successfully
@param pixbuf: a GdkPixbuf.Pixbuf containing the result image
"""
def _on_render_failed(self):
"""
The rendering process has failed
"""
|
common_globals = {}
execfile_('common.py', common_globals)
MODELS = [
u'VVX300',
u'VVX310',
u'VVX400',
u'VVX410',
u'VVX500',
u'VVX600',
u'VVX1500',
]
VERSION = u'4.1.7'
class PolycomPlugin(common_globals['BasePolycomPlugin']):
IS_PLUGIN = True
pg_associator = common_globals['BasePolycomPgAssociator'](MODELS, VERSION)
|
from .Localhost import Localhost
|
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponse
from django.template import RequestContext,Template
from django.template.loader import get_template
from django.shortcuts import render_to_response
from datetime import datetime, timedelta
from WeatherMappingStudio.WMI.models import Sender,Data,Location
import json
import dateutil.parser
@csrf_exempt
def create_hourly_json(request):
loc_id = ""
json = ""
counter = 0
time = 0
dataset_dt = ""
start_dt = ""
curr_dt = ""
pre_dt = ""
temp = ""
hum = ""
windvel =""
letter = 'A'
if request.method =="GET":
loc_id = request.GET['locId']
option = request.GET['period']
print 'OPTION :',option
if loc_id:
location_obj = Location.objects.get(id = loc_id)
data_set = Data.objects.filter(location = loc_id)
data = data_set.latest()
timestamp = data.time_stamp
data_query_set = Data.objects.filter(location = loc_id)#.distinct('time_stamp')
start_dt = timestamp
else :
return HttpResponse("Location not registered")
if (option == 'Daily'):
time = 24
elif(option == 'Weekly'):
time = 7
elif (option == 'Monthly'):
time = int(start_dt.day)
elif (option == 'ThirtyDays'):
time = 30
#for last 24 records
#result = data_query_set.order_by('time_stamp').reverse()[:24]
#for last 24hrs new
if option == 'Daily':
json +='['
for i in range(time):
if (counter>0):
json += ","
curr_dt = start_dt - timedelta(hours=i)
hour_query_set = data_query_set.filter(time_stamp = curr_dt)
hour = str(curr_dt.hour)
if hour_query_set:
tot_temp = 0
tot_hum = 0
tot_windvel = 0
for rows in hour_query_set:
tot_temp += rows.temperature
tot_hum += rows.humidity
tot_windvel += rows.windvelocity
length = len(hour_query_set)
temp = str(round(tot_temp/length,2))
hum = str(round(tot_hum/length,2))
windvel = str(round(tot_windvel/length,2))
json +='\n{"period":' + hour + ', "temp":' + temp + ', "hum":' + hum + ', "windvel":' + windvel + '}'
else:
json +='\n{"period":' + hour + ', "temp":"", "hum":"", "windvel":""}'
counter += 1
json += '\n]'
else:
#for past week / current month / past 30 days
json +='[\n'
for i in range(time):
if (counter>0):
json += ","
curr_dt = start_dt - timedelta(days=i)
day_query_set = data_query_set.filter(time_stamp__year = curr_dt.year, time_stamp__month = curr_dt.month, time_stamp__day = curr_dt.day)
day = str(curr_dt.day)
if day_query_set:
tot_temp = 0
tot_hum = 0
tot_windvel = 0
length = len(day_query_set)
for rows in day_query_set:
tot_temp += rows.temperature
tot_hum += rows.humidity
tot_windvel += rows.windvelocity
temp = str(round(tot_temp/length,2))
hum = str(round(tot_hum/length,2))
windvel = str(round(tot_windvel/length,2))
json +='\n{"period":' + day + ', "temp":' + temp + ', "hum":' + hum + ', "windvel":' + windvel + '}'
else:
json +='\n{"period":' + day + ', "temp":"", "hum":"", "windvel":""}'
counter += 1
json += '\n]'
#for last 24 hrs old
#time_threshold_less = timestamp - timedelta(hours=23)
#result = data_query_set.order_by('time_stamp').reverse().filter(time_stamp__range=(time_threshold_less,timestamp))
#curr_dt = start_dt
#json +='['
#for rows in result:
#if (counter>0):
# json += ","
#extracting data for passing
# dataset_dt = rows.time_stamp
# temp = str(rows.temperature)
# hum = str(rows.humidity)
# windvel = str(rows.windvelocity)
#for repeating values
# if(counter>0 and dataset_dt == pre_dt):
# continue
#for matching hour count
# if(dataset_dt == curr_dt):
# json += '\n{"hrs":' + str(dataset_dt.hour) + ', "temp":' + temp + ', "hum":' + hum + ', "windvel":' + windvel +', "letter":"' + chr(ord(letter) + counter) + '"}'
# if (chr(ord(letter) + counter) != 'X'):
# json += ','
# elif(dataset_dt < curr_dt):
# while(dataset_dt < curr_dt):
# json += '\n{"hrs":' + str(curr_dt.hour) + ', "temp":"", "hum":"" , "windvel":"", "letter":"' + chr(ord(letter) + counter) + '"}'
# if (chr(ord(letter) + counter) != 'X'):
# json += ','
# counter += 1
# curr_dt -= timedelta(hours=1)
# if(dataset_dt == curr_dt):
# json += '\n{"hrs":' + str(dataset_dt.hour) + ', "temp":' + temp + ', "hum":' + hum + ', "windvel":' + windvel +', "letter":"' + chr(ord(letter) + counter) + '"}'
# if (chr(ord(letter) + counter) != 'X'):
# json += ','
# else:
# json += '\n{"hrs":' + str(curr_dt.hour) + ', "temp":"", "hum":"" , "windvel":"", "letter":"' + chr(ord(letter) + counter) + '"}'
# if (chr(ord(letter) + counter) != 'X'):
# json += ','
#shifting hr_count val to pre_hr_count, incrementing counter and hr_count
# counter += 1
# pre_dt = curr_dt
# curr_dt -= timedelta(hours=1)
#json += '\n]'
return HttpResponse(json)
|
import sys
sys.path.insert(0,"/home/ahobbs/venv_dir/BallotPath/api")
from app import app as application
application.run(host='0.0.0.0', port=6112, debug=True);
|
for j in range(1, len(arr)):
key = arr[j]
i = j - 1
while i > 0 and arr[i] > key:
arr[i + 1] = arr[i]
i = i - 1
arr[i + 1] = key
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'UserProfile'
db.create_table('findeco_userprofile', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.OneToOneField')(related_name=u'profile', unique=True, to=orm['auth.User'])),
('description', self.gf('django.db.models.fields.TextField')(blank=True)),
('activationKey', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal('findeco', ['UserProfile'])
# Adding M2M table for field followees on 'UserProfile'
db.create_table('findeco_userprofile_followees', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('from_userprofile', models.ForeignKey(orm['findeco.userprofile'], null=False)),
('to_userprofile', models.ForeignKey(orm['findeco.userprofile'], null=False))
))
db.create_unique('findeco_userprofile_followees', ['from_userprofile_id', 'to_userprofile_id'])
# Adding M2M table for field blocked on 'UserProfile'
db.create_table('findeco_userprofile_blocked', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('from_userprofile', models.ForeignKey(orm['findeco.userprofile'], null=False)),
('to_userprofile', models.ForeignKey(orm['findeco.userprofile'], null=False))
))
db.create_unique('findeco_userprofile_blocked', ['from_userprofile_id', 'to_userprofile_id'])
def backwards(self, orm):
# Deleting model 'UserProfile'
db.delete_table('findeco_userprofile')
# Removing M2M table for field followees on 'UserProfile'
db.delete_table('findeco_userprofile_followees')
# Removing M2M table for field blocked on 'UserProfile'
db.delete_table('findeco_userprofile_blocked')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'findeco.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'activationKey': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'blocked': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'blocked_by'", 'blank': 'True', 'to': "orm['findeco.UserProfile']"}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'followees': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'followers'", 'blank': 'True', 'to': "orm['findeco.UserProfile']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "u'profile'", 'unique': 'True', 'to': "orm['auth.User']"})
}
}
complete_apps = ['findeco']
|
import urllib,urllib2
import xml.etree.ElementTree as ET
import xml.etree as etree
def get_woeid(lat,lon):
url = "http://query.yahooapis.com/v1/public/yql?q=select%20*%20from%20geo.placefinder%20where%20text%3D%22"+repr(lat)+"%2C"+repr(lon)+"%22%20and%20gflags%3D%22R%22"
result = urllib2.urlopen(url)
tree = ET.parse(result).getroot()
return tree.findall('results')[0].findall('Result')[0].findall('woeid')[0].text
def get_weather(lat,lon):
woeid = get_woeid(lat,lon)
url = "http://weather.yahooapis.com/forecastrss?w=%s"%woeid
result = urllib2.urlopen(url)
tree = ET.parse(result).getroot()
return '\n'.join(tree.findall('channel')[0].findall('item')[0].findall('description')[0].text.split('\n')[2:-3])
|
import urllib
import urlparse
import json
import logging
import datetime
from tornado import gen
from tornado.httpclient import AsyncHTTPClient, HTTPResponse
from motherbrain.com.messages import MBMessage
fetch_addr = 'http://127.0.0.1/__fetch'
GO_ACTIONS = {'fetch_url_data': ['http://127.0.0.1:5100', 'GoFetchUrlDataHandler'],
'fetch_list': [fetch_addr, 'GoFetchHandler'],
'fetch_discovery': [fetch_addr, 'GoFetchDiscoveryHandler'],
'fetch_landing': [fetch_addr, 'GoFetchLandingHandler'],
'fetch_user': [fetch_addr, 'GoFetchUserHandler'],
'fetch_user_network': [fetch_addr, 'GoFetchUserNetworkHandler'],
'fetch_focuson': [fetch_addr, 'GoFetchFocusOnHandler'],
'fetch_toplists': [fetch_addr, 'GoFetchTopListsHandler'],
'fetch_followmore': [fetch_addr, 'GoFetchFollowMoreHandler'],
'fetch_topusers': [fetch_addr, 'GoFetchTopUsersHandler'],
'fetch_popular': [fetch_addr, 'GoFetchPopularHandler'],
'fetch_user_profile': [fetch_addr, 'GoFetchProfileHandler'],
'fetch_contrib_notifications': [fetch_addr, 'GoFetchContribNotifications'],
'search_url': [fetch_addr, 'GoSearchUrl'],
'beta_search_url': [fetch_addr, 'GoGlobalSearchUrl'],
'fetch_hashtag': [fetch_addr, 'Hashtag'],
'fetch_hashtags': [fetch_addr, 'Hashtags'],
'fetch_facebook_friends': [fetch_addr, 'FacebookFriends'],
'fetch_facebook_links': [fetch_addr, 'FacebookLinks'],
'fetch_list_by_categories': [fetch_addr, 'Category'],
'fetch_notifications': [fetch_addr, 'Notifications'],
}
class GoHandler(object):
def __init__(self, handler, api_msg):
target = api_msg.target or {}
payload = api_msg.payload or {}
self.handler = handler
self.action = api_msg.action
self.payload = dict(payload, **target)
self.db = handler.db
self.api_msg = api_msg
self.msg_id = self.api_msg.msg_id
self.context = self.api_msg.context
def make_cache_callback(self):
def _callback(cached_result, callback=None):
callback(cached_result)
return _callback
def server_addr(self):
return GO_ACTIONS.get(self.action)[0]
@staticmethod
def can_handle(handler, api_msg):
action = api_msg.action
if not action in GO_ACTIONS:
return None
cls_name = GO_ACTIONS.get(action)[1]
cls = globals().get(cls_name)
if not cls:
return None
return cls(handler, api_msg)
@staticmethod
def hyperdrive_dispatch(handler, go_handlers, api_msg):
go = GoHandler.can_handle(handler, api_msg)
msg_id = api_msg.msg_id or str(uuid.uuid4)
if go:
go_handlers[msg_id] = go
dsp = go.dispatch()
else:
dsp = handler.dispatch(api_msg)
return dsp
@staticmethod
def hyperdrive_decode(go_handlers, raw_response):
if not isinstance(raw_response, HTTPResponse):
return raw_response
response_body = raw_response.body
try:
response_data = json.loads(response_body)
except ValueError:
response_data = None
if not response_data:
return None
request_id = response_data.get('RequestId')
go_handler = go_handlers[request_id]
return go_handler.make_response(raw_response)
class GoFetchUrlDataHandler(GoHandler):
def cache_set(self, cache_key, favicon_addr):
if not favicon_addr:
return None
cache = self.db.favicon_cache
cache.update({'netloc': cache_key},
{'$set': {'address': favicon_addr}},
upsert=True)
def make_response(self, response_data):
response_body = response_data.body
response_data = json.loads(response_body)
cache_key = getattr(self, 'cache_key', None)
favicon = response_data.get('Favicon')
if favicon:
if cache_key:
self.cache_set(cache_key, favicon)
self.db.urlists.update({'hash': self.payload.get('list_hash'),
'urls.hash': self.payload.get('url_hash')},
{'$set': {'urls.$.favicon': favicon}})
return MBMessage(self.api_msg.msg_id, self.action,
{}, {'favicon': favicon}, self.api_msg.context)
def endpoint(self):
server_addr = self.server_addr()
url_data = self._get_url_data(self.payload.get('list_hash'),
self.payload.get('url_hash'))
url = url_data.get('url')
qs = urllib.urlencode({'url': url.encode('UTF-8')})
return "{}?{}".format(server_addr, qs)
def cache_get(self, endpoint):
logging.debug('CACHE GET --- {}'.format(endpoint))
def get_url_from_qs():
url_parts = urlparse.urlparse(endpoint)
qs_data = urlparse.parse_qs(url_parts.query) or {}
url = qs_data.get('url')
if not len(url):
return None
return url[0]
def get_netloc(url):
url_parts = urlparse.urlparse(url)
return url_parts.netloc or None
url = get_url_from_qs()
if not url:
return None
netloc = get_netloc(url)
if not netloc:
return None
self.cache_key = netloc
cache = self.db.favicon_cache
entry = cache.find_one({'netloc': netloc})
logging.info("CACHE KEY --- {}".format(self.cache_key))
if not entry:
logging.debug('CACHE MISS --- {}'.format(endpoint))
return None
logging.debug('CACHE HIT --- {}'.format(endpoint))
class CachedResponse(object):
def __init__(self, body):
self.body = body
response = CachedResponse(json.dumps({'Favicon': entry.get('address')}))
return response
def dispatch(self):
logging.info('GO DISPATCH --- {}'.format(self.api_msg))
action = self.action
endpoint = self.endpoint()
cached_result = self.cache_get(endpoint)
if cached_result:
return gen.Task(self.make_cache_callback(), cached_result)
http_client = AsyncHTTPClient()
return gen.Task(http_client.fetch, endpoint)
def _get_url_data(self, list_hash, url_hash):
urlists = self.db.urlists
list_data = urlists.find_one({'hash': list_hash})
if not list_data:
return {}
_urls = [x for x in list_data.get('urls', [])
if x.get('hash') == url_hash]
if not len(_urls):
return {}
return _urls[0]
class GoFetchHandler(GoHandler):
def endpoint(self):
server_addr = u"{}/list/{}".format(self.server_addr(), self.payload.get('list_hash'))
qs_data = {'request_id': self.msg_id,
'user_id': self.context.get('current_user_id')}
return u"{}?{}".format(server_addr, urllib.urlencode(qs_data))
def dispatch(self):
logging.info(u'GO DISPATCH --- {}'.format(self.api_msg))
action = self.action
endpoint = self.endpoint()
callback = self.handler.get_argument("callback", "")
endpoint = "{}&callback={}".format(endpoint, callback)
logging.info("Callback: {}".format(endpoint))
http_client = AsyncHTTPClient()
return gen.Task(http_client.fetch, endpoint)
def make_response(self, raw_response):
response_body = raw_response.body
try:
response_payload = json.loads(response_body)
except ValueError:
return None
if not isinstance(response_payload, dict):
return None
response_code = response_payload.get("ResponseCode", None)
payload = response_payload.get("Payload")
callback = response_payload.get("Callback")
if callback:
payload = "{}({});".format(callback, json.dumps(payload))
if not response_code:
action = self.action
else:
action = response_code
return MBMessage(self.api_msg.msg_id, action,
{}, payload or {}, self.api_msg.context)
class GoFetchDiscoveryHandler(GoFetchHandler):
def endpoint(self):
server_addr = u"{}/discovery/{}".format(self.server_addr(), self.context.get('current_user_id'))
qs_data = {'request_id': self.msg_id,
'user_id': self.context.get('current_user_id')}
return u"{}?{}".format(server_addr, urllib.urlencode(qs_data))
class GoFetchLandingHandler(GoFetchHandler):
def endpoint(self):
server_addr = u"{}/landing/{}".format(self.server_addr(), self.payload.get('key'))
qs_data = {'request_id': self.msg_id,
'user_id': self.context.get('current_user_id')}
return u"{}?{}".format(server_addr, urllib.urlencode(qs_data))
class GoFetchProfileHandler(GoFetchHandler):
def endpoint(self):
server_addr = u"{}/profile/{}".format(self.server_addr(), self.payload.get('user_id'))
qs_data = {'request_id': self.msg_id,
'user_id': self.context.get('current_user_id')}
return u"{}?{}".format(server_addr, urllib.urlencode(qs_data))
class GoFetchUserHandler(GoFetchHandler):
def endpoint(self):
server_addr = u"{}/user/{}".format(self.server_addr(), self.payload.get('user_id'))
qs_data = {'request_id': self.msg_id,
'user_id': self.context.get('current_user_id')}
return u"{}?{}".format(server_addr, urllib.urlencode(qs_data))
class GoFetchContribNotifications(GoFetchHandler):
def endpoint(self):
current_user_id = self.context.get('current_user_id')
server_addr = u"{}/contrib-notifications/{}".format(self.server_addr(),
current_user_id)
qs_data = {'request_id': self.msg_id,
'user_id': self.context.get('current_user_id')}
return u"{}?{}".format(server_addr, urllib.urlencode(qs_data))
class GoSearchUrl(GoFetchHandler):
def endpoint(self):
query = self.payload.get('query')
server_addr = u"{}/search-results/{}".format(self.server_addr(), urllib.quote(query))
qs_data = {'request_id': self.msg_id,
'user_id': self.context.get('current_user_id')}
return u"{}?{}".format(server_addr, urllib.urlencode(qs_data))
class GoGlobalSearchUrl(GoFetchHandler):
def endpoint(self):
query = self.payload.get('query')
server_addr = u"{}/global-search-results/{}".format(self.server_addr(), urllib.quote(query))
qs_data = {'request_id': self.msg_id,
'user_id': self.context.get('current_user_id'),
'scope': self.payload.get('scope', 'me')}
url = u"{}?{}".format(server_addr, urllib.urlencode(qs_data))
logging.info(url)
return url
class GoFetchUserNetworkHandler(GoFetchHandler):
def endpoint(self):
server_addr = u"{}/network/{}".format(self.server_addr(),
self.payload.get('user_id'))
qs_data = {'request_id': self.msg_id,
'user_id': self.context.get('current_user_id'),
'mode': self.payload.get('mode')}
return u"{}?{}".format(server_addr, urllib.urlencode(qs_data))
class GoFetchFocusOnHandler(GoFetchHandler):
def endpoint(self):
server_addr = u"{}/focuson/{}".format(self.server_addr(), self.payload.get('edition', ''))
qs_data = {'request_id': self.msg_id,
'user_id': self.context.get('current_user_id')}
return u"{}?{}".format(server_addr, urllib.urlencode(qs_data))
class GoFetchPopularHandler(GoFetchHandler):
def endpoint(self):
server_addr = u"{}/popular/".format(self.server_addr())
qs_data = {'request_id': self.msg_id,
'user_id': self.context.get('current_user_id')}
return u"{}?{}".format(server_addr, urllib.urlencode(qs_data))
class GoFetchTopListsHandler(GoFetchHandler):
def endpoint(self):
server_addr = u"{}/toplists/{}".format(self.server_addr(),
self.payload.get('edition', ''))
qs_data = {'request_id': self.msg_id,
'user_id': self.context.get('current_user_id')}
return u"{}?{}".format(server_addr, urllib.urlencode(qs_data))
class GoFetchTopUsersHandler(GoFetchHandler):
def endpoint(self):
server_addr = u"{}/topusers/{}".format(self.server_addr(), self.payload.get('category'))
qs_data = {'request_id': self.msg_id,
'user_id': self.context.get('current_user_id')}
return u"{}?{}".format(server_addr, urllib.urlencode(qs_data))
class Category(GoFetchHandler):
def endpoint(self):
server_addr = u"{}/category/{}".format(self.server_addr(), self.payload.get('categories'))
user_id = self.context.get('current_user_id')
sort = self.handler.get_argument("sort", "")
network = self.handler.get_argument("network", False)
qs_data = {'request_id': self.msg_id,
'user_id': user_id,
'sort': sort,
'network': network}
return u"{}?{}".format(server_addr, urllib.urlencode(qs_data))
class Hashtag(GoFetchHandler):
def endpoint(self):
server_addr = u"{}/hashtag/{}".format(self.server_addr(), self.payload.get('hashtag'))
user_id = self.context.get('current_user_id')
sort = self.handler.get_argument("sort", None)
network = self.handler.get_argument("network", False)
qs_data = {'request_id': self.msg_id,
'user_id': user_id,
'sort': sort,
'network': network}
return u"{}?{}".format(server_addr, urllib.urlencode(qs_data))
class Hashtags(GoFetchHandler):
def endpoint(self):
server_addr = u"{}/hashtags/".format(self.server_addr())
qs_data = {'request_id': self.msg_id,
'user_id': self.context.get('current_user_id')}
return u"{}?{}".format(server_addr, urllib.urlencode(qs_data))
class GoFetchFollowMoreHandler(GoFetchHandler):
def endpoint(self):
server_addr = u"{}/followmore/".format(self.server_addr())
qs_data = {'request_id': self.msg_id,
'user_id': self.context.get('current_user_id')}
return u"{}?{}".format(server_addr, urllib.urlencode(qs_data))
class FacebookFriends(GoFetchHandler):
def endpoint(self):
server_addr = u"{}/facebookfriends/".format(self.server_addr())
qs_data = {'request_id': self.msg_id,
'user_id': self.context.get('current_user_id')}
return u"{}?{}".format(server_addr, urllib.urlencode(qs_data))
class FacebookLinks(GoFetchHandler):
def endpoint(self):
server_addr = u"{}/facebooklinks/".format(self.server_addr())
qs_data = {'request_id': self.msg_id,
'user_id': self.context.get('current_user_id')}
return u"{}?{}".format(server_addr, urllib.urlencode(qs_data))
class Notifications(GoFetchHandler):
def endpoint(self):
server_addr = u"{}/notifications/{}".format(self.server_addr(),
self.payload.get('user_id'))
qs_data = {'request_id': self.msg_id,
'user_id': self.context.get('current_user_id')}
return u"{}?{}".format(server_addr, urllib.urlencode(qs_data))
|
"""
Compute and plot the leading EOF of geopotential height on the 500 hPa
pressure surface over the European/Atlantic sector during winter time.
This example uses the metadata-retaining xarray interface.
Additional requirements for this example:
* xarray (http://xarray.pydata.org)
* matplotlib (http://matplotlib.org/)
* cartopy (http://scitools.org.uk/cartopy/)
"""
import cartopy.crs as ccrs
import matplotlib.pyplot as plt
import numpy as np
import xarray as xr
from eofs.xarray import Eof
from eofs.examples import example_data_path
filename = example_data_path('hgt_djf.nc')
z_djf = xr.open_dataset(filename)['z']
z_djf = z_djf - z_djf.mean(dim='time')
coslat = np.cos(np.deg2rad(z_djf.coords['latitude'].values)).clip(0., 1.)
wgts = np.sqrt(coslat)[..., np.newaxis]
solver = Eof(z_djf, weights=wgts)
eof1 = solver.eofsAsCovariance(neofs=1)
clevs = np.linspace(-75, 75, 11)
proj = ccrs.Orthographic(central_longitude=-20, central_latitude=60)
ax = plt.axes(projection=proj)
ax.coastlines()
ax.set_global()
eof1[0, 0].plot.contourf(ax=ax, levels=clevs, cmap=plt.cm.RdBu_r,
transform=ccrs.PlateCarree(), add_colorbar=False)
ax.set_title('EOF1 expressed as covariance', fontsize=16)
plt.show()
|
import json
import io
import os
try:
to_unicode = unicode
except NameError:
to_unicode = str
from .tools import CloudTools
from ..pathsandnames import PathsAndNames
import uuid
class CreateJSON():
def __init__(self):
"""
Create a subfolder in s3 bucket
Creates a JSON in local directory
"""
pass
@staticmethod
def create_json(loc='', plate='', prob='', infraction_date='', infraction_hour='', image_on_s3='', video_on_s3=''):
old_hour = iter(infraction_hour)
new_hour = ':'.join(a + b for a, b in zip(old_hour, old_hour))
toJSON = {
'location': loc,
'plate': plate,
'prob': prob,
'datetime': str(infraction_date),
'hour': new_hour,
'image_path': image_on_s3,
'video_path': video_on_s3
}
return toJSON
@staticmethod
def create_json_private(path_to_new_image='', prob=0, plate='', json_null=False):
"""
Creates JSON output with assets information
:param path_to_new_image: Path to image with plate
:param prob: prob of the detection
:param plate: plate number
:return: JSON object as dict with information to be upload to S3
"""
path_to_folder, image_name, video_name, subDirName = CloudTools.getNames(path_to_new_image)
destination = str(uuid.uuid4())
# Create names placeholders
path_to_image_on_S3 = PathsAndNames.S3Path + 'infractor-serve-assets-to-app' + '/' + 'NoPlate' + '/' + destination + '/' + image_name
path_to_video_on_S3 = PathsAndNames.S3Path + 'infractor-serve-assets-to-app' + '/' + 'NoPlate' + '/' + destination + '/' + video_name
# Prepare route for upload files to s3 into the subfolder /subDirName in S3
CloudTools.createSubDirOnS3(PathsAndNames.S3Bucket, subDirName)
# Check for Location path
location = CloudTools.load_loation()
if plate == 'NOPLATE':
"""
Normal sync behavior
"""
infractionDate = video_name.split('_')[0]
infractionHour = video_name.split('_')[1].replace('-', ':')
toJSON = CreateJSON.create_json(loc=location,
plate='x',
prob=str(prob),
infraction_date=infractionDate,
infraction_hour=infractionHour,
image_on_s3=path_to_image_on_S3,
video_on_s3=path_to_video_on_S3)
# Write JSON file
with io.open('{}/{}.json'.format(path_to_folder, destination), 'w', encoding='utf8') as outfile:
str_ = json.dumps(toJSON,
indent=4,
sort_keys=True,
separators=(',', ': '), ensure_ascii=False)
# Out
outfile.write(to_unicode(str_))
# Return parameters for AWS Sync
params = {
'plate': plate,
'local_directory': path_to_folder,
'bucket': 'infractor-serve-assets-to-app',
'destination': destination,
'img-name': image_name,
'vid-name': video_name,
'json-name': '{}.json'.format(destination)
}
return params
def __call__(self, path_to_new_image='', prob=0, plate='', json_null=False):
"""
Creates JSON output with assets information
:param path_to_new_image: Path to image with plate
:param prob: prob of the detection
:param plate: plate number
:return: JSON object as dict with information to be upload to S3
"""
path_to_folder, image_name, video_name, subDirName = CloudTools.getNames(path_to_new_image)
if json_null is False:
# Create names placeholders
path_to_image_on_S3 = PathsAndNames.S3Path + PathsAndNames.S3Bucket + '/' + subDirName + '/' + image_name
path_to_video_on_S3 = PathsAndNames.S3Path + PathsAndNames.S3Bucket + '/' + subDirName + '/' + video_name
# Prepare route for upload files to s3 into the subfolder /subDirName in S3
CloudTools.createSubDirOnS3(PathsAndNames.S3Bucket, subDirName)
# Check for Location path
location = CloudTools.load_loation()
if video_name != 'NOVIDEO' and plate != 'NOPLATE':
"""
Normal sync behavior
"""
infractionDate = video_name.split('_')[0]
infractionHour = video_name.split('_')[1].replace('-', ':')
toJSON = CreateJSON.create_json(loc=location,
plate=plate,
prob=str(prob),
infraction_date=infractionDate,
infraction_hour=infractionHour,
image_on_s3=path_to_image_on_S3,
video_on_s3=path_to_video_on_S3)
# Write JSON file
with io.open('{}/{}.json'.format(path_to_folder, subDirName), 'w', encoding='utf8') as outfile:
str_ = json.dumps(toJSON,
indent=4,
sort_keys=True,
separators=(',', ': '), ensure_ascii=False)
# Out
outfile.write(to_unicode(str_))
elif plate != 'NOPLATE' and video_name == 'NOVIDEO':
"""
Extreme case, not sync this
"""
toJSON = {
'location': location,
'plate': plate,
'prob': prob,
'datetime': 'NODATE',
'hour': 'NOHOUR',
'image_path': path_to_image_on_S3,
'video_path': 'NOVIDEO'
}
toJSON = CreateJSON.create_json(loc=location,
plate=plate,
prob=str(prob),
infraction_date='NODATE',
infraction_hour='NOHOUR',
image_on_s3=path_to_image_on_S3,
video_on_s3=path_to_video_on_S3)
# Write JSON file
with io.open('{}/{}.json'.format(path_to_folder, subDirName), 'w', encoding='utf8') as outfile:
str_ = json.dumps(toJSON,
indent=4,
sort_keys=True,
separators=(',', ': '), ensure_ascii=False)
# Out
outfile.write(to_unicode(str_))
else:
"""
Also exterme case, not sync this
"""
toJSON = CreateJSON.create_json(loc=location,
plate=plate,
prob=str(prob),
infraction_date='NODATE',
infraction_hour='NOHOUR',
image_on_s3='NOIMAGE',
video_on_s3='NOVIDEO')
# Write JSON file
with io.open('{}/{}.json'.format(path_to_folder, subDirName), 'w', encoding='utf8') as outfile:
str_ = json.dumps(toJSON,
indent=4,
sort_keys=True,
separators=(',', ': '), ensure_ascii=False)
# Out
outfile.write(to_unicode(str_))
# Return parameters for AWS Sync
params = {
'plate': plate,
'local_directory': path_to_folder,
'bucket': PathsAndNames.S3Bucket,
'destination': subDirName,
'img-name': image_name,
'vid-name': video_name,
'json-name': '{}.json'.format(subDirName)
}
return params
else:
"""
Create default null json
"""
# Check for Location path
location = CloudTools.load_loation()
toJSON = CreateJSON.create_json(loc=location,
plate='NOPLATE',
prob='NOPROB',
infraction_date='NODATE',
infraction_hour='NOHOUR',
image_on_s3='NOIMAGE',
video_on_s3='NOVIDEO')
# Write JSON file
with io.open('{}/{}.json'.format(path_to_folder, subDirName), 'w', encoding='utf8') as outfile:
str_ = json.dumps(toJSON,
indent=4,
sort_keys=True,
separators=(',', ': '), ensure_ascii=False)
# Out
outfile.write(to_unicode(str_))
return True
if __name__ == '__main__':
pass
|
from __future__ import (unicode_literals, division, absolute_import,
print_function)
"""
----------------------------
NOTE:
The "ShowProgressDialog" and ResultsDialog classes given below were
taken from the file "dialogs.py" which was part of the Calibre plugin
"diaps_toolbag" authored by DiapDealer.
----------------------------
"""
__license__ = 'GPL v3'
__docformat__ = 'restructuredtext en'
import os
try:
from PyQt5.Qt import (Qt, QVBoxLayout, QApplication,
QDialogButtonBox, QHBoxLayout,
QProgressDialog, QListWidget, QTimer, QDialog)
except ImportError:
from PyQt4.Qt import (Qt, QVBoxLayout, QApplication,
QDialogButtonBox, QHBoxLayout,
QProgressDialog, QListWidget, QTimer, QDialog)
from calibre.gui2 import error_dialog, choose_files, open_url
from calibre.utils.config import config_dir
from calibre.gui2.tweak_book.widgets import Dialog
from calibre_plugins.chinese_text.__init__ import (PLUGIN_NAME, PLUGIN_SAFE_NAME)
class ShowProgressDialog(QProgressDialog):
def __init__(self, gui, container, match_list, criteria, callback_fn, action_type='Checking'):
self.file_list = [i[0] for i in container.mime_map.items() if i[1] in match_list]
self.clean = True
self.changed_files = []
self.total_count = len(self.file_list)
QProgressDialog.__init__(self, '', _('Cancel'), 0, self.total_count, gui)
self.setMinimumWidth(500)
self.container, self.criteria, self.callback_fn, self.action_type = container, criteria, callback_fn, action_type
self.gui = gui
self.setWindowTitle('{0}...'.format(self.action_type))
self.i = 0
QTimer.singleShot(0, self.do_action)
self.exec_()
def do_action(self):
if self.wasCanceled():
return self.do_close()
if self.i >= self.total_count:
return self.do_close()
name = self.file_list[self.i]
data = self.container.raw_data(name)
self.i += 1
self.setLabelText('{0}: {1}'.format(self.action_type, name))
# Send the necessary data to the callback function in main.py.
htmlstr = self.callback_fn(data, self.criteria)
if htmlstr != data:
self.container.open(name, 'w').write(htmlstr)
self.container.dirty(name)
self.changed_files.append(name)
self.clean = False
self.setValue(self.i)
# Lather, rinse, repeat
QTimer.singleShot(0, self.do_action)
def do_close(self):
self.hide()
self.gui = None
class ResultsDialog(Dialog):
def __init__(self, parent, files):
self.files = files
Dialog.__init__(self, _('Changed Files'), 'toolbag_show_results_dialog', parent)
def setup_ui(self):
self.setMinimumWidth(300)
self.setMinimumHeight(300)
layout = QVBoxLayout(self)
self.setLayout(layout)
main_layout = QHBoxLayout()
layout.addLayout(main_layout)
self.listy = QListWidget()
# self.listy.setSelectionMode(QAbstractItemView.ExtendedSelection)
main_layout.addWidget(self.listy)
self.listy.addItems(self.files)
button_box = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel)
button_box = QDialogButtonBox()
ok_button = button_box.addButton(_("See what changed"), QDialogButtonBox.AcceptRole)
cancel_button = button_box.addButton(_("Close"), QDialogButtonBox.RejectRole)
button_box.accepted.connect(self.accept)
button_box.rejected.connect(self.reject)
layout.addWidget(button_box)
|
from __future__ import unicode_literals
from django.db import models
from cadastro.models import AtendimentoAbs
class Ano(models.Model):
_ano = models.CharField(verbose_name='Ano',max_length=4)
def get_absolute_url(self):
return reverse('ano_detail', kwargs={'pk': self.pk})
def _get_ano(self):
return self._ano
def _set_ano(self, ano):
self._ano = ano
ano = property(_get_ano,_set_ano)
def __unicode__(self):
return u'%s' % (self.ano)
def __str__(self):
return u'%s' % (self.ano)
class ItemNotaAbs(models.Model):
_nome = models.CharField(verbose_name='Nome',max_length=150)
_valor = models.DecimalField(verbose_name='Valor',max_digits=9, decimal_places=2)
class Meta:
abstract = True
class AcoesItemNota(ItemNotaAbs):
class Meta:
abstract = True
class ItemNota(AcoesItemNota):
def _get_nome(self):
return self._nome
def _set_nome(self,nome):
self._nome = nome
def _get_valor(self):
return self._valor
def _set_valor(self,valor):
self._valor = valor
def __unicode__(self):
return u'%s - %s (R$)' % (self._nome, self.valor)
nome = property(_get_nome,_set_nome)
valor = property(_get_valor,_set_valor)
class NotaAbs(models.Model):
SETOR_CHOICES = (
('1', 'Clínica de Pequenos'),
('2', 'Clínica de Grandes'),
('3', 'Clínica Cirúrgica'),
('4', 'Patologia Clínica'),
('5', 'Diagnóstico por Imagem'),
('6', 'Parasitologia'),
('7', 'Microbiologia'),
('8', 'Patologia Animal'),
)
_data = models.DateField(auto_now_add=True)
setor = models.CharField(verbose_name='Setor', max_length=30, choices=SETOR_CHOICES)
atendimento = models.OneToOneField(AtendimentoAbs,on_delete=models.CASCADE,primary_key=True)
status = models.BooleanField(verbose_name='Pago', default=False)
ano = models.ForeignKey(Ano, on_delete = models.CASCADE, related_name='notas')
itemNota = models.ManyToManyField(ItemNota, related_name='notas')
class Meta:
abstract = True
def __unicode__(self):
return u'%s - %s' % (self.setor, self._data)
def __str__(self):
return u'%s - %s' % (self.setor, self._data)
class AcoesNota(NotaAbs):
class Meta:
abstract = True
class Nota(AcoesNota):
def get_absolute_url(self):
return reverse('nota_detail', kwargs={'pk': self.pk})
def _get_data(self):
return self._data
def _set_data(self,data):
self._data = data
data = property(_get_data,_set_data)
class Produto(ItemNota):
pass
class Servico(ItemNota):
pass
|
file = open("/Users/quietchallenger/DEV/2. Turner Branches/input.txt", "r")
data = file.readlines()
for x in data:
str(x)
host = x.split("/")
ip = host[0]
netmask = host[1]
print (ip)
print(netmask)
|
import unittest
import mock
from haiku_cnf import search_provides, read_haikuports, firstrun
class TestPkgmanHooks(unittest.TestCase):
@mock.patch('haiku_cnf.check_output')
def test_search_provides(self, patched_check_output):
pkgman_out = """Status Name Description
-----------------------------------------------------------------------------
postgresql A powerful, open source object-relational database system"""
# > pkgman search postgre -D
pkgman_details = """Repository Name Version Arch
-----------------------------------------------------
HaikuPorts postgresql 9.3.5-2 x86_gcc2
"""
patched_check_output.return_value = pkgman_details
ret = search_provides("pg")
# expected = {'status': False, 'name': 'postgresql', 'description': "A powerful, open source object-relational database system"}
expected = {'name': 'postgresql', 'repo': 'HaikuPorts'}
self.assertEqual(ret, [expected])
@mock.patch('haiku_cnf.check_output')
def test_search_provides_bogus(self, patched_check_output):
patched_check_output.return_value = "No matching packages found."
ret = search_provides("foobarbaz")
expected = None
self.assertEqual(ret, expected)
class TestHP(unittest.TestCase):
@mock.patch('haiku_cnf.check_output')
def test_haikuports(self, patched_check_output):
ret = read_haikuports()
patched_check_output.return_value.splitlines.assert_called_with()
class TestFirstRun(unittest.TestCase):
@mock.patch("haiku_cnf.read_pkgman", return_value=[None])
@mock.patch("haiku_cnf.read_basepkgs")
@mock.patch("haiku_cnf.get_db")
def test_first_run(self, patched_get_db, patched_basepkgs, patched_pkgman):
patched_get_db.return_value = {}
patched_basepkgs.return_value = [None]
firstrun()
patched_get_db.assert_called_with()
patched_basepkgs.assert_called_with()
patched_pkgman.assert_called_with()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.