code
stringlengths 1
199k
|
|---|
from GUIComponent import GUIComponent
from enigma import eEPGCache, eListbox, eListboxPythonMultiContent, gFont, RT_HALIGN_LEFT, RT_HALIGN_RIGHT, RT_HALIGN_CENTER, RT_VALIGN_CENTER
from Tools.Alternatives import CompareWithAlternatives
from Tools.LoadPixmap import LoadPixmap
from time import localtime, time
from Components.config import config
from ServiceReference import ServiceReference
from Tools.Directories import resolveFilename, SCOPE_CURRENT_SKIN
from skin import parseFont
EPG_TYPE_SINGLE = 0
EPG_TYPE_MULTI = 1
EPG_TYPE_SIMILAR = 2
class Rect:
def __init__(self, x, y, width, height):
self.x = x
self.y = y
self.w = width
self.h = height
# silly, but backward compatible
def left(self):
return self.x
def top(self):
return self.y
def height(self):
return self.h
def width(self):
return self.w
class EPGList(GUIComponent):
def __init__(self, type=EPG_TYPE_SINGLE, selChangedCB=None, timer=None):
self.days = (_("Mon"), _("Tue"), _("Wed"), _("Thu"), _("Fri"), _("Sat"), _("Sun"))
self.timer = timer
self.onSelChanged = []
if selChangedCB is not None:
self.onSelChanged.append(selChangedCB)
GUIComponent.__init__(self)
self.type = type
self.l = eListboxPythonMultiContent()
self.eventItemFont = gFont("Regular", 22)
self.eventTimeFont = gFont("Regular", 16)
self.iconSize = 21
self.iconDistance = 2
self.colGap = 10
self.skinColumns = False
self.tw = 90
self.dy = 0
if type == EPG_TYPE_SINGLE:
self.l.setBuildFunc(self.buildSingleEntry)
elif type == EPG_TYPE_MULTI:
self.l.setBuildFunc(self.buildMultiEntry)
else:
assert(type == EPG_TYPE_SIMILAR)
self.l.setBuildFunc(self.buildSimilarEntry)
self.epgcache = eEPGCache.getInstance()
self.clocks = [LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/epgclock_add.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/epgclock_pre.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/epgclock.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/epgclock_prepost.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/epgclock_post.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/zapclock_add.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/zapclock_pre.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/zapclock.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/zapclock_prepost.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/zapclock_post.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/zaprecclock_add.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/zaprecclock_pre.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/zaprecclock.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/zaprecclock_prepost.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/zaprecclock_post.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/repepgclock_add.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/repepgclock_pre.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/repepgclock.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/repepgclock_prepost.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/repepgclock_post.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/repzapclock_add.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/repzapclock_pre.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/repzapclock.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/repzapclock_prepost.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/repzapclock_post.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/repzaprecclock_add.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/repzaprecclock_pre.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/repzaprecclock.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/repzaprecclock_prepost.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/repzaprecclock_post.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/pipclock_add.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/pipclock_pre.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/pipclock.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/pipclock_prepost.png')),
LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/pipclock_post.png'))]
def getEventFromId(self, service, eventid):
event = None
if self.epgcache is not None and eventid is not None:
event = self.epgcache.lookupEventId(service.ref, eventid)
return event
def getCurrentChangeCount(self):
if self.type == EPG_TYPE_MULTI and self.l.getCurrentSelection() is not None:
return self.l.getCurrentSelection()[0]
return 0
def getCurrent(self):
idx = 0
if self.type == EPG_TYPE_MULTI:
idx += 1
tmp = self.l.getCurrentSelection()
if tmp is None:
return (None, None)
eventid = tmp[idx + 1]
service = ServiceReference(tmp[idx])
event = self.getEventFromId(service, eventid)
return (event, service)
def moveUp(self):
self.instance.moveSelection(self.instance.moveUp)
def moveDown(self):
self.instance.moveSelection(self.instance.moveDown)
def connectSelectionChanged(func):
if not self.onSelChanged.count(func):
self.onSelChanged.append(func)
def disconnectSelectionChanged(func):
self.onSelChanged.remove(func)
def selectionChanged(self):
for x in self.onSelChanged:
if x is not None:
x()
GUI_WIDGET = eListbox
def postWidgetCreate(self, instance):
instance.setWrapAround(True)
instance.selectionChanged.get().append(self.selectionChanged)
instance.setContent(self.l)
def preWidgetRemove(self, instance):
instance.selectionChanged.get().remove(self.selectionChanged)
instance.setContent(None)
def recalcEntrySize(self):
esize = self.l.getItemSize()
width = esize.width()
height = esize.height()
try:
self.iconSize = self.clocks[0].size().height()
except:
pass
self.space = self.iconSize + self.iconDistance
self.dy = int((height - self.iconSize) / 2.)
if self.type == EPG_TYPE_SINGLE:
if self.skinColumns:
x = 0
self.weekday_rect = Rect(0, 0, self.gap(self.col[0]), height)
x += self.col[0]
self.datetime_rect = Rect(x, 0, self.gap(self.col[1]), height)
x += self.col[1]
self.descr_rect = Rect(x, 0, width - x, height)
else:
self.weekday_rect = Rect(0, 0, width / 20 * 2 - 10, height)
self.datetime_rect = Rect(width / 20 * 2, 0, width / 20 * 5 - 15, height)
self.descr_rect = Rect(width / 20 * 7, 0, width / 20 * 13, height)
elif self.type == EPG_TYPE_MULTI:
if self.skinColumns:
x = 0
self.service_rect = Rect(x, 0, self.gap(self.col[0]), height)
x += self.col[0]
self.progress_rect = Rect(x, 8, self.gap(self.col[1]), height - 16)
self.start_end_rect = Rect(x, 0, self.gap(self.col[1]), height)
x += self.col[1]
self.descr_rect = Rect(x, 0, width - x, height)
else:
xpos = 0
w = width / 10 * 3
self.service_rect = Rect(xpos, 0, w - 10, height)
xpos += w
w = width / 10 * 2
self.start_end_rect = Rect(xpos, 0, w - 10, height)
self.progress_rect = Rect(xpos, 4, w - 10, height - 8)
xpos += w
w = width / 10 * 5
self.descr_rect = Rect(xpos, 0, width, height)
else: # EPG_TYPE_SIMILAR
if self.skinColumns:
x = 0
self.weekday_rect = Rect(0, 0, self.gap(self.col[0]), height)
x += self.col[0]
self.datetime_rect = Rect(x, 0, self.gap(self.col[1]), height)
x += self.col[1]
self.service_rect = Rect(x, 0, width - x, height)
else:
self.weekday_rect = Rect(0, 0, width / 20 * 2 - 10, height)
self.datetime_rect = Rect(width / 20 * 2, 0, width / 20 * 5 - 15, height)
self.service_rect = Rect(width / 20 * 7, 0, width / 20 * 13, height)
def gap(self, width):
return width - self.colGap
def getClockTypesForEntry(self, service, eventId, beginTime, duration):
if not beginTime:
return None
rec = self.timer.isInTimer(eventId, beginTime, duration, service)
if rec is not None:
return rec[1]
else:
return None
def buildSingleEntry(self, service, eventId, beginTime, duration, EventName):
clock_types = self.getClockTypesForEntry(service, eventId, beginTime, duration)
r1 = self.weekday_rect
r2 = self.datetime_rect
r3 = self.descr_rect
t = localtime(beginTime)
res = [
None, # no private data needed
(eListboxPythonMultiContent.TYPE_TEXT, r1.x, r1.y, r1.w, r1.h, 0, RT_HALIGN_RIGHT | RT_VALIGN_CENTER, self.days[t[6]]),
(eListboxPythonMultiContent.TYPE_TEXT, r2.x, r2.y, r2.w, r1.h, 0, RT_HALIGN_RIGHT | RT_VALIGN_CENTER, "%02d.%02d, %02d:%02d" % (t[2], t[1], t[3], t[4]))
]
if clock_types:
for i in range(len(clock_types)):
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHABLEND, r3.x + i * self.space, r3.y + self.dy, self.iconSize, self.iconSize, self.clocks[clock_types[i]]))
res.append((eListboxPythonMultiContent.TYPE_TEXT, r3.x + (i + 1) * self.space, r3.y, r3.w, r3.h, 0, RT_HALIGN_LEFT | RT_VALIGN_CENTER, EventName))
else:
res.append((eListboxPythonMultiContent.TYPE_TEXT, r3.x, r3.y, r3.w, r3.h, 0, RT_HALIGN_LEFT | RT_VALIGN_CENTER, EventName))
return res
def buildSimilarEntry(self, service, eventId, beginTime, service_name, duration):
clock_types = self.getClockTypesForEntry(service, eventId, beginTime, duration)
r1 = self.weekday_rect
r2 = self.datetime_rect
r3 = self.service_rect
t = localtime(beginTime)
res = [
None, # no private data needed
(eListboxPythonMultiContent.TYPE_TEXT, r1.x, r1.y, r1.w, r1.h, 0, RT_HALIGN_RIGHT | RT_VALIGN_CENTER, self.days[t[6]]),
(eListboxPythonMultiContent.TYPE_TEXT, r2.x, r2.y, r2.w, r1.h, 0, RT_HALIGN_RIGHT | RT_VALIGN_CENTER, "%2d.%02d, %02d:%02d" % (t[2], t[1], t[3], t[4]))
]
if clock_types:
for i in range(len(clock_types)):
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHABLEND, r3.x + i * self.space, r3.y + self.dy, self.iconSize, self.iconSize, self.clocks[clock_types[i]]))
res.append((eListboxPythonMultiContent.TYPE_TEXT, r3.x + (i + 1) * self.space, r3.y, r3.w, r3.h, 0, RT_HALIGN_LEFT | RT_VALIGN_CENTER, service_name))
else:
res.append((eListboxPythonMultiContent.TYPE_TEXT, r3.x, r3.y, r3.w, r3.h, 0, RT_HALIGN_LEFT | RT_VALIGN_CENTER, service_name))
return res
def buildMultiEntry(self, changecount, service, eventId, beginTime, duration, EventName, nowTime, service_name):
clock_types = self.getClockTypesForEntry(service, eventId, beginTime, duration)
r1 = self.service_rect
r2 = self.progress_rect
r3 = self.descr_rect
r4 = self.start_end_rect
res = [None] # no private data needed
if clock_types:
res.append((eListboxPythonMultiContent.TYPE_TEXT, r1.x, r1.y, r1.w - self.space * len(clock_types), r1.h, 0, RT_HALIGN_LEFT | RT_VALIGN_CENTER, service_name))
for i in range(len(clock_types)):
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHABLEND, r1.x + r1.w - self.space * (i + 1), r1.y + self.dy, self.iconSize, self.iconSize, self.clocks[clock_types[len(clock_types) - 1 - i]]))
else:
res.append((eListboxPythonMultiContent.TYPE_TEXT, r1.x, r1.y, r1.w, r1.h, 0, RT_HALIGN_LEFT | RT_VALIGN_CENTER, service_name))
if beginTime is not None:
if nowTime < beginTime:
begin = localtime(beginTime)
end = localtime(beginTime + duration)
res.extend((
(eListboxPythonMultiContent.TYPE_TEXT, r4.x, r4.y, r4.w, r4.h, 1, RT_HALIGN_CENTER | RT_VALIGN_CENTER, "%02d.%02d - %02d.%02d" % (begin[3], begin[4], end[3], end[4])),
(eListboxPythonMultiContent.TYPE_TEXT, r3.x, r3.y, self.gap(self.tw), r3.h, 1, RT_HALIGN_RIGHT | RT_VALIGN_CENTER, _("%d min") % (duration / 60)),
(eListboxPythonMultiContent.TYPE_TEXT, r3.x + self.tw, r3.y, r3.w, r3.h, 0, RT_HALIGN_LEFT | RT_VALIGN_CENTER, EventName)
))
else:
percent = (nowTime - beginTime) * 100 / duration
prefix = "+"
remaining = ((beginTime + duration) - int(time())) / 60
if remaining <= 0:
prefix = ""
res.extend((
(eListboxPythonMultiContent.TYPE_PROGRESS, r2.x, r2.y, r2.w, r2.h, percent),
(eListboxPythonMultiContent.TYPE_TEXT, r3.x, r3.y, self.gap(self.tw), r3.h, 1, RT_HALIGN_RIGHT | RT_VALIGN_CENTER, _("%s%d min") % (prefix, remaining)),
(eListboxPythonMultiContent.TYPE_TEXT, r3.x + self.tw, r3.y, r3.w, r3.h, 0, RT_HALIGN_LEFT | RT_VALIGN_CENTER, EventName)
))
return res
def queryEPG(self, list, buildFunc=None):
if self.epgcache is not None:
if buildFunc is not None:
return self.epgcache.lookupEvent(list, buildFunc)
else:
return self.epgcache.lookupEvent(list)
return []
def fillMultiEPG(self, services, stime=-1):
#t = time()
test = [(service.ref.toString(), 0, stime) for service in services]
test.insert(0, 'X0RIBDTCn')
self.list = self.queryEPG(test)
self.l.setList(self.list)
#print time() - t
self.selectionChanged()
def updateMultiEPG(self, direction):
#t = time()
test = [x[3] and (x[1], direction, x[3]) or (x[1], direction, 0) for x in self.list]
test.insert(0, 'XRIBDTCn')
tmp = self.queryEPG(test)
cnt = 0
for x in tmp:
changecount = self.list[cnt][0] + direction
if changecount >= 0:
if x[2] is not None:
self.list[cnt] = (changecount, x[0], x[1], x[2], x[3], x[4], x[5], x[6])
cnt += 1
self.l.setList(self.list)
#print time() - t
self.selectionChanged()
def fillSingleEPG(self, service):
t = time()
epg_time = t - config.epg.histminutes.getValue() * 60
test = ['RIBDT', (service.ref.toString(), 0, epg_time, -1)]
self.list = self.queryEPG(test)
self.l.setList(self.list)
if t != epg_time:
idx = 0
for x in self.list:
idx += 1
if t < x[2] + x[3]:
break
self.instance.moveSelectionTo(idx - 1)
self.selectionChanged()
def sortSingleEPG(self, type):
list = self.list
if list:
event_id = self.getSelectedEventId()
if type == 1:
list.sort(key=lambda x: (x[4] and x[4].lower(), x[2]))
else:
assert(type == 0)
list.sort(key=lambda x: x[2])
self.l.invalidate()
self.moveToEventId(event_id)
def getSelectedEventId(self):
x = self.l.getCurrentSelection()
return x and x[1]
def moveToService(self, serviceref):
if not serviceref:
return
index = 0
refstr = serviceref.toString()
for x in self.list:
if CompareWithAlternatives(x[1], refstr):
self.instance.moveSelectionTo(index)
break
index += 1
def moveToEventId(self, eventId):
if not eventId:
return
index = 0
for x in self.list:
if x[1] == eventId:
self.instance.moveSelectionTo(index)
break
index += 1
def fillSimilarList(self, refstr, event_id):
t = time()
# search similar broadcastings
if event_id is None:
return
l = self.epgcache.search(('RIBND', 1024, eEPGCache.SIMILAR_BROADCASTINGS_SEARCH, refstr, event_id))
if l and len(l):
l.sort(key=lambda x: x[2])
self.l.setList(l)
self.selectionChanged()
print time() - t
def applySkin(self, desktop, parent):
def warningWrongSkinParameter(string):
print "[EPGList] wrong '%s' skin parameters" % string
def setEventItemFont(value):
self.eventItemFont = parseFont(value, ((1, 1), (1, 1)))
def setEventTimeFont(value):
self.eventTimeFont = parseFont(value, ((1, 1), (1, 1)))
def setIconDistance(value):
self.iconDistance = int(value)
def setIconShift(value):
self.dy = int(value)
def setTimeWidth(value):
self.tw = int(value)
def setColWidths(value):
self.col = map(int, value.split(','))
if len(self.col) == 2:
self.skinColumns = True
else:
warningWrongSkinParameter(attrib)
def setColGap(value):
self.colGap = int(value)
for (attrib, value) in self.skinAttributes[:]:
try:
locals().get(attrib)(value)
self.skinAttributes.remove((attrib, value))
except:
pass
self.l.setFont(0, self.eventItemFont)
self.l.setFont(1, self.eventTimeFont)
return GUIComponent.applySkin(self, desktop, parent)
|
"""
OpenVZ container-type virtualization installation functions.
Copyright 2012 Artem Kanarev <kanarev AT tncc.ru>, Sergey Podushkin <psv AT tncc.ru>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA
"""
from __future__ import print_function
import os
from cexceptions import OVZCreateException
def start_install(*args, **kwargs):
# check for Openvz tools presence
# can be this apps installed in some other place?
vzcfgvalidate = '/usr/sbin/vzcfgvalidate'
vzctl = '/usr/sbin/vzctl'
if not os.path.exists(vzcfgvalidate) or not os.path.exists(vzctl):
raise OVZCreateException(
"Cannot find %s and/or %s! Are OpenVZ tools installed?" %
(vzcfgvalidate, vzctl)
)
# params, that can be defined/redefined through ks_meta
keys_for_meta = [
'KMEMSIZE', # "14372700:14790164",
'LOCKEDPAGES', # "2048:2048",
'PRIVVMPAGES', # "65536:69632",
'SHMPAGES', # "21504:21504",
'NUMPROC', # "240:240",
'VMGUARPAGES', # "33792:unlimited",
'OOMGUARPAGES', # "26112:unlimited",
'NUMTCPSOCK', # "360:360",
'NUMFLOCK', # "188:206",
'NUMPTY', # "16:16",
'NUMSIGINFO', # "256:256",
'TCPSNDBUF', # "1720320:2703360",
'TCPRCVBUF', # "1720320:2703360",
'OTHERSOCKBUF', # "1126080:2097152",
'DGRAMRCVBUF', # "262144:262144",
'NUMOTHERSOCK', # "120",
'DCACHESIZE', # "3409920:3624960",
'NUMFILE', # "9312:9312",
'AVNUMPROC', # "180:180",
'NUMIPTENT', # "128:128",
'DISKINODES', # "200000:220000",
'QUOTATIME', # "0",
'VE_ROOT', # "/vz/root/$VEID",
'VE_PRIVATE', # "/vz/private/$VEID",
'SWAPPAGES', # "0:1G",
'ONBOOT', # "yes"
]
sysname = kwargs['name']
autoinst = kwargs['profile_data']['autoinst']
# we use it for --ostemplate parameter
template = kwargs['profile_data']['breed']
hostname = kwargs['profile_data']['hostname']
ipadd = kwargs['profile_data']['ip_address_eth0']
nameserver = kwargs['profile_data']['name_servers'][0]
diskspace = kwargs['profile_data']['virt_file_size']
physpages = kwargs['profile_data']['virt_ram']
cpus = kwargs['profile_data']['virt_cpus']
onboot = kwargs['profile_data']['virt_auto_boot']
# we get [0,1] ot [False,True] and have to map it to [no,yes]
onboot = 'yes' if onboot == '1' or onboot else 'no'
CTID = None
vz_meta = {}
# get all vz_ parameters from ks_meta
for item in kwargs['profile_data']['ks_meta'].split():
var = item.split('=')
if var[0].startswith('vz_'):
vz_meta[var[0].replace('vz_', '').upper()] = var[1]
if 'CTID' in vz_meta and vz_meta['CTID']:
try:
CTID = int(vz_meta['CTID'])
del vz_meta['CTID']
except ValueError:
print("Invalid CTID in ks_meta. Exiting...")
return 1
else:
raise OVZCreateException(
'Mandatory "vz_ctid" parameter not found in ks_meta!')
confiname = '/etc/vz/conf/%d.conf' % CTID
# this is the minimal config. we can define additional parameters or
# override some of them in ks_meta
min_config = {
'PHYSPAGES': "0:%sM" % physpages,
'SWAPPAGES': "0:1G",
'DISKSPACE': "%sG:%sG" % (diskspace, diskspace),
'DISKINODES': "200000:220000",
'QUOTATIME': "0",
'CPUUNITS': "1000",
'CPUS': cpus,
'VE_ROOT': "/vz/root/$VEID",
'VE_PRIVATE': "/vz/private/$VEID",
'OSTEMPLATE': template,
'NAME': sysname,
'HOSTNAME': hostname,
'IP_ADDRESS': ipadd,
'NAMESERVER': nameserver,
}
# merge with override
full_config = dict(
[
(k, vz_meta[k] if k in vz_meta and k in keys_for_meta else min_config[k])
for k in set(min_config.keys() + vz_meta.keys())]
)
# write config file for container
f = open(confiname, 'w+')
for key, val in full_config.items():
f.write('%s="%s"\n' % (key, val))
f.close()
# validate the config file
cmd = '%s %s' % (vzcfgvalidate, confiname)
if not os.system(cmd.strip()):
# now install the container tree
cmd = '/usr/bin/ovz-install %s %s %s' % (
sysname,
autoinst,
full_config['VE_PRIVATE'].replace('$VEID', '%d' % CTID)
)
if not os.system(cmd.strip()):
# if everything fine, start the container
cmd = '%s start %s' % (vzctl, CTID)
if os.system(cmd.strip()):
raise OVZCreateException("Start container %s failed" % CTID)
else:
raise OVZCreateException("Container creation %s failed" % CTID)
else:
raise OVZCreateException(
"Container %s config file is not valid" %
CTID)
|
DEFAULT_INSTANCE_NAME = "booth"
GLOBAL_KEYS = (
"transport",
"port",
"name",
"authfile",
"maxtimeskew",
"site",
"arbitrator",
"site-user",
"site-group",
"arbitrator-user",
"arbitrator-group",
"debug",
"ticket",
)
TICKET_KEYS = (
"acquire-after",
"attr-prereq",
"before-acquire-handler",
"expire",
"renewal-freq",
"retries",
"timeout",
"weights",
)
|
import sys
import sublime
import sublime_plugin
import subprocess
import json
from os import path, name
__file__ = path.normpath(path.abspath(__file__))
__path__ = path.dirname(__file__)
libs_path = path.join(__path__, 'libs')
csscomb_path = path.join(libs_path, 'call_string.php')
is_python3 = sys.version_info[0] > 2
def to_unicode_or_bust(obj, encoding='utf-8'):
if isinstance(obj, basestring):
if not isinstance(obj, unicode):
obj = unicode(obj, encoding)
return obj
class CssSorter(sublime_plugin.TextCommand):
def __init__(self, view):
self.view = view
self.startupinfo = None
self.error = False
if name == 'nt':
self.startupinfo = subprocess.STARTUPINFO()
self.startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
self.startupinfo.wShowWindow = subprocess.SW_HIDE
def run(self, edit):
self.check_php_on_path()
self.sortorder = False
self.order_settings = sublime.load_settings('CSScomb.sublime-settings')
if self.order_settings.has('custom_sort_order') and self.order_settings.get('custom_sort_order') is True:
self.sortorder = json.dumps(self.order_settings.get('sort_order'))
sublime.status_message('Sorting with custom sort order...')
else:
self.sortorder = ''
selections = self.get_selections()
for sel in selections:
selbody = self.view.substr(sel)
if is_python3:
selbody = str(selbody)
else:
selbody = selbody.encode('utf-8')
myprocess = subprocess.Popen(['php', csscomb_path, selbody, self.sortorder], shell=False, stdout=subprocess.PIPE, startupinfo=self.startupinfo)
(sout, serr) = myprocess.communicate()
myprocess.wait()
if serr:
sublime.error_message(self.status)
return
elif sout is None:
sublime.error_message('There was an error sorting CSS.')
return
if is_python3:
result = str(sout, encoding='utf-8')
else:
result = to_unicode_or_bust(sout)
self.view.replace(edit, sel, result)
sublime.status_message('Successfully sorted')
def get_selections(self):
selections = self.view.sel()
# check if the user has any actual selections
has_selections = False
for region in selections:
if region.empty() is False:
has_selections = True
# if not, add the entire file as a selection
if not has_selections:
full_region = sublime.Region(0, self.view.size())
selections.add(full_region)
return selections
def check_php_on_path(self):
try:
subprocess.call(['php', '-v'], shell=False, startupinfo=self.startupinfo)
except (OSError):
sublime.error_message('Unable find php.exe. Make sure it is available in your PATH.')
return
|
import os, sys
if __name__ == '__main__':
execfile(os.path.join(sys.path[0], 'framework.py'))
from Products.UWOshOIE.tests.uwoshoietestcase import UWOshOIETestCase
class TestWorkflowsInstalled(UWOshOIETestCase):
"""Test all workflows"""
def afterSetUp(self):
self.workflow = self.portal.portal_workflow['OIEStudentApplicationWorkflow']
def test_added_permissions(self):
permissions = [ 'list',
'Modify portal content',
'View',
'Access contents information',
'UWOshOIE: Review OIE Application',
'UWOshOIE: Modify revisable fields',
'UWOshOIE: Modify Financial Aid fields',
'UWOshOIE: Modify Office Use Only fields',
'UWOshOIE: Modify normal fields'
]
for permission in permissions:
self.failUnless(permission in self.workflow.permissions)
def test_suite():
from unittest import TestSuite, makeSuite
suite = TestSuite()
suite.addTest(makeSuite(TestWorkflowsInstalled))
return suite
if __name__ == '__main__':
framework()
|
import os, sys, getopt
import threading, signal, time
import confluent_kafka
class Producer(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
try:
sent = 0
if not debug:
producer = confluent_kafka.Producer(**conf)
print "conf:", conf, "topic:", topic,
elif logfile:
l = open(logfile, 'w+', 4000)
#a = time.time()
if not fname:
print("log2kafka is reading input from stdin..")
while 1:
_lines = b''
n_lines = 0
for _line in sys.stdin.readlines(8192): #stdin not feeding EOF, must has a size hint
if not debug:
if not multi_lines:
try:
producer.produce(topic, value=_line, callback=delivery_callback)
except BufferError as e:
#producer queue is full,
#just ignore new incoming messages
pass
except Exception as e:
if debug:
print(e)
else:
_lines += _line
n_lines += 1
if n_lines > max_lines - 1:
try:
producer.produce(topic, value=_lines, callback=delivery_callback)
except BufferError as e:
#producer queue is full,
#just ignore new incoming messages
pass
except Exception as e:
if debug:
print(e)
_lines = b''
n_lines = 0
else:
if logfile:
_lstat = os.fstat(l.fileno())
if _lstat.st_nlink == 0:
print "output file deleted, try to re-open it..",
l = open(logfile, 'w+', 4000)
elif _lstst.st_size < l.tell():
print "output file truncated, try to re-seek it..",
l.seek(0, 2)
l.write(_line)
else:
print "[%d] %s" % (sent+1, _line),
sent += 1
if not debug and multi_lines and n_lines > 0:
try:
producer.produce(topic, value=_lines, callback=delivery_callback)
except BufferError as e:
#producer queue is full,
#just ignore new incoming messages
pass
except Exception as e:
if debug:
print(e)
if not debug:
producer.flush()
else:
print("log2kafka is reading input from "+fname+"..")
while 1:
try:
time.sleep(2)
f = open(fname, 'r')
if f:
f.seek(0, 2)
sent = 0
while 1:
time.sleep(0.333) #sleep 333ms, 3 times per second
_flines = b''
n_flines = 0
if logfile:
_lstat = os.fstat(l.fileno())
if _lstat.st_nlink == 0:
print "output file deleted, try to re-open it..",
l = open(logfile, 'w+', 4000)
elif _lstst.st_size < l.tell():
print "output file truncated, try to re-seek it..",
l.seek(0, 2)
_fstat = os.fstat(f.fileno())
if _fstat.st_nlink == 0:
print "input file deleted, try to re-open it..",
f = open(fname, 'r')
if f:
f.seek(0, 2)
sent = 0
elif _fstat.st_size < f.tell():
print "input file truncated, try to re-seek it..",
f.seek(0, 2)
sent = 0
else:
#file has EOF, it's ok not having size hint
for _fline in f.readlines():
if not debug:
if not multi_lines:
try:
producer.produce(topic, value=_fline, callback=delivery_callback)
except BufferError as e:
#producer queue is full,
#just ignore new incoming messages
pass
except Exception as e:
if debug:
print(e)
else:
_flines += _fline
n_flines += 1
if n_flines > max_lines - 1:
try:
producer.produce(topic, value=_flines, callback=delivery_callback)
_flines = b''
n_flines = 0
except BufferError as e:
pass
except Exception as e:
if debug:
print(e)
else:
if logfile:
l.write(_fline)
else:
print"[%d] %s" % (sent+1, _fline),
sent += 1
if not debug and multi_lines and n_flines > 0:
try:
producer.produce(topic, value=_flines, callback=delivery_callback)
except BufferError as e:
pass
except Exception as e:
if debug:
print(e)
if not debug:
producer.flush()
except Exception as e:
if debug:
print(e)
except Exception as e:
if debug:
print(e)
finally:
#b = time.time()
#print "starting producer at:", a, ", ending producer at:", b, ",sending duration:", b-a, "secs"
#print "sent", sent, "messages at", sent/(b-a), "messages/sec"
if not debug:
producer.flush()
elif logfile and l:
l.close()
if f:
f.close()
def delivery_callback(err, msg):
pass
def main():
threads = [
Producer(),
]
for t in threads:
t.daemon = True
t.start()
while 1:
time.sleep(100)
def sigint_handler(signum, frame):
print 'log2kafka.py is interrupted by Ctrl-C or Kill',
sys.exit()
if __name__ == "__main__":
opts, args = getopt.getopt(sys.argv[1:], "cmudvho:t:f:l:n:b:q:k:")
# Producer configuration
# See https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md
daemon = False
debug = False
conf = { 'queue.buffering.max.messages': 100000,
'queue.buffering.max.kbytes' : 100000,
'queue.buffering.max.ms' : 500,
'max.in.flight.requests.per.connection' : 1000,
'batch.num.messages': 50,
'compression.type': 'none' }
#'default.topic.config': {'acks': 0}}
topic = b''
fname = b''
logfile = b''
multi_lines = False
max_lines = 10
for o, a in opts:
if o == "-o":
for item in a.split(';'):
if not item:
continue
_name, _value = item.split('=')
conf[_name] = _value
elif o == "-t":
topic = a
elif o == "-f":
fname = a
elif o == "-l":
logfile = a
elif o == "-u":
debug = True
elif o == "-d":
daemon = True
elif o == "-m":
multi_lines = True
elif o == "-n":
max_lines = int(a)
elif o == "-b":
conf['batch.num.messages'] = int(a)
elif o == "-q":
conf['queue.buffering.max.messages'] = int(a)
elif o == "-k":
conf['queue.buffering.max.kbytes'] = int(a)
elif o == "-c":
conf['compression.type'] = 'gzip'
elif o == "-v":
print("log2kafka version 0.0.2")
sys.exit()
elif o == "-h":
print("usage:")
print("log2kafka <options>\n")
print("options:")
print("-o <kafka_params>: parameters pass to librdkafka, refer to librdkafka manual(required)")
print("-t <topic>: message topic to produce(required)")
print("-f <logfile>: logfile name to read(input)")
print("-l <logfile>: logfile name to write(output)")
print("-m: send multi lines in one kafka messages(absence:no)")
print("-n <max_lines>: max lines in one kafka message(default:10)")
print("-b <num>: batch.num.messages of kafka conf(default:50)")
print("-q <num>: queue.buffering.max.messages of kafka conf(default:100000)")
print("-k <num>: queue.buffering.max.kbytes of kafka conf(default:100000(100MB))")
print("-c: set compression.type of kafka conf to gzip(absence:none)")
print("-u: debug mode, not actually send out message")
print("-d: run as daemon")
print("-v: show version")
print("-h: this help")
print("\nexample:")
print("log2kafka.py -o 'bootstrap.servers=127.0.0.1:9092,127.0.0.2:9092;queue.buffering.max.messages=1000000'")
sys.exit()
error = False
if not conf and not debug:
print "must have a conf, use -o"
error = True
if not topic and not debug:
print "must have a topic, use -t"
error = True
if error:
sys.exit()
if daemon:
pid = os.fork()
if pid > 0:
sys.exit()
pid = os.fork()
if pid > 0:
sys.exit()
signal.signal(signal.SIGINT, sigint_handler)
signal.signal(signal.SIGTERM, sigint_handler)
main()
|
"""
Unit testing for determining paramters of the intersections of two cuboids
(C) Martin Green 2014
"""
from unittest import TestCase, skip
from cuboid_set_operation_results import *
class IntersectionVolumesTests(TestCase):
def test_complete_intersection(self):
self.assertAlmostEqual(intersection_volume(((0, 1), (0, 1), (0, 1)),
((0, 1), (0, 1), (0, 1))),
1)
self.assertAlmostEqual(intersection_volume(((4, 5), (-9, -11), (1.5, 1)),
((4, 5), (-9, -11), (1.5, 1))),
1)
def test_no_intersection(self):
self.assertAlmostEqual(intersection_volume(((0, 1), (0, 1), (0, 1)),
((1, 2), (0, 1), (0, 1))),
0)
self.assertAlmostEqual(intersection_volume(((0, 1), (0, 1), (0, 1)),
((0, 1), (1, 2), (0, 1))),
0)
self.assertAlmostEqual(intersection_volume(((0, 1), (0, 1), (0, 1)),
((0, 1), (0, 1), (1, 2))),
0)
def test_internal_intersection(self):
self.assertAlmostEqual(intersection_volume(((0, 1), (0, 1), (0, 1)),
((-1, 2), (-1, 2), (-1, 2))),
1)
self.assertAlmostEqual(intersection_volume(((-1, 2), (-1, 2), (-1, 2)),
((0, 1), (0, 1), (0, 1))),
1)
def test_overlap_intersection(self):
self.assertAlmostEqual(intersection_volume(((0, 1), (0, 1), (0, 1)),
((0.5, 2), (0, 1), (0, 1))),
0.5)
self.assertAlmostEqual(intersection_volume(((0, 1), (0, 1), (0, 1)),
((0, 1), (0.5, 2), (0, 1))),
0.5)
self.assertAlmostEqual(intersection_volume(((0, 1), (0, 1), (0, 1)),
((0, 1), (0, 1), (0.5, 2))),
0.5)
self.assertAlmostEqual(intersection_volume(((0.5, 2), (0, 1), (0, 1)),
((0, 1), (0, 1), (0, 1))),
0.5)
self.assertAlmostEqual(intersection_volume(((0, 1), (0.5, 2), (0, 1)),
((0, 1), (0, 1), (0, 1))),
0.5)
self.assertAlmostEqual(intersection_volume(((0, 1), (0, 1), (0.5, 2)),
((0, 1), (0, 1), (0, 1))),
0.5)
class DifferenceVolumesTests(TestCase):
def test_complete_difference(self):
self.assertAlmostEqual(difference_volume(((0, 1), (0, 1), (0, 1)),
((0, 1), (0, 1), (0, 1))),
0)
self.assertAlmostEqual(difference_volume(((4, 5), (-9, -11), (1.5, 1)),
((4, 5), (-9, -11), (1.5, 1))),
0)
def test_no_difference(self):
self.assertAlmostEqual(difference_volume(((0, 1), (0, 1), (0, 1)),
((1, 2), (0, 1), (0, 1))),
1)
self.assertAlmostEqual(difference_volume(((0, 1), (0, 1), (0, 1)),
((0, 1), (1, 2), (0, 1))),
1)
self.assertAlmostEqual(difference_volume(((0, 1), (0, 1), (0, 1)),
((0, 1), (0, 1), (1, 2))),
1)
def test_internal_difference(self):
self.assertAlmostEqual(difference_volume(((0, 1), (0, 1), (0, 1)),
((-1, 2), (-1, 2), (-1, 2))),
0)
self.assertAlmostEqual(difference_volume(((-1, 2), (-1, 2), (-1, 2)),
((0, 1), (0, 1), (0, 1))),
26)
def test_overlap_difference(self):
self.assertAlmostEqual(difference_volume(((0, 1), (0, 1), (0, 1)),
((0.5, 2), (0, 1), (0, 1))),
0.5)
self.assertAlmostEqual(difference_volume(((0, 1), (0, 1), (0, 1)),
((0, 1), (0.5, 2), (0, 1))),
0.5)
self.assertAlmostEqual(difference_volume(((0, 1), (0, 1), (0, 1)),
((0, 1), (0, 1), (0.5, 2))),
0.5)
self.assertAlmostEqual(difference_volume(((0.5, 2), (0, 1), (0, 1)),
((0, 1), (0, 1), (0, 1))),
1)
self.assertAlmostEqual(difference_volume(((0, 1), (0.5, 2), (0, 1)),
((0, 1), (0, 1), (0, 1))),
1)
self.assertAlmostEqual(difference_volume(((0, 1), (0, 1), (0.5, 2)),
((0, 1), (0, 1), (0, 1))),
1)
|
"""
gateway tests - Testing various methods on a Big image when renderingEngine.load() etc throws MissingPyramidException
"""
import exceptions
import unittest
import omero
import time
import gatewaytest.library as lib
class PyramidTest (lib.GTest):
def setUp (self):
super(PyramidTest, self).setUp()
self.loginAsAuthor()
self.TESTIMG = self.getTestImage()
def testThrowException(self):
""" test that image._prepareRE() throws MissingPyramidException """
image = self.TESTIMG
image._conn.createRenderingEngine = lambda: MockRenderingEngine()
try:
image._prepareRE()
self.assertTrue(False, "_prepareRE should have thrown an exception")
except omero.ConcurrencyException, ce:
print "Handling MissingPyramidException with backoff: %s secs" % (ce.backOff/1000)
def testPrepareRenderingEngine(self):
""" We need image._prepareRenderingEngine() to raise MissingPyramidException"""
image = self.TESTIMG
image._conn.createRenderingEngine = lambda: MockRenderingEngine()
try:
image._prepareRenderingEngine()
self.assertTrue(False, "_prepareRenderingEngine() should have thrown an exception")
except omero.ConcurrencyException, ce:
print "Handling MissingPyramidException with backoff: %s secs" % (ce.backOff/1000)
def testGetChannels(self):
""" Missing Pyramid shouldn't stop us from getting Channel Info """
image = self.TESTIMG
image._conn.createRenderingEngine = lambda: MockRenderingEngine()
channels = image.getChannels()
for c in channels:
print c.getLabel()
class MockRenderingEngine(object):
""" Should throw on re.load() """
def lookupPixels(self, id):
pass
def lookupRenderingDef(self, id):
pass
def loadRenderingDef(self, id):
pass
def resetDefaults(self):
pass
def getRenderingDefId(self):
return 1
def load(self):
e = omero.ConcurrencyException("MOCK MissingPyramidException")
e.backOff = (3 * 60 * 60 * 1000) + (20 * 60 * 1000) + (45 * 1000) # 3 hours
raise e
if __name__ == '__main__':
unittest.main()
|
import threading
import time
class ThreadRunner(threading.Thread):
def __init__(self, thread_id, divisor):
self.thread_id = thread_id
self.divisor = divisor
threading.Thread.__init__(self)
def run(self):
for i in range(1, 100):
if i % self.divisor == 0:
print "=> Thread {}: {}".format(self.thread_id, i)
time.sleep(0.2)
|
import os, sys, inspect
import sublime
BASE_PATH = os.path.abspath(os.path.dirname(__file__))
PACKAGES_PATH = sublime.packages_path() or os.path.dirname(BASE_PATH)
sys.path += [BASE_PATH]
import base
class Grep (base.Base):
pass
engine_class = Grep
|
from django.contrib import admin
from finance_game.models import Stock
from finance_game.models import StockOption
from finance_game.models import Portfolio
from finance_game.models import Wallet
from finance_game.models import Currency
from finance_game.models import HistoricStockPrice
from finance_game.models import APIKey
class WalletInline(admin.StackedInline):
model = Wallet
extra = 1
# fieldsets = [
# (None, {'fields': ['name']}),
# ('Wallet details', {'fields': ['money', 'currency']}),
# ]
class StockOptionInline(admin.StackedInline):
model = StockOption
extra = 1
class StockOptionInline(admin.StackedInline):
model = StockOption
extra = 1
class PortfolioAdmin(admin.ModelAdmin):
inlines = [StockOptionInline]
class UserAdmin(admin.ModelAdmin):
fields = ['name', 'email']
inlines = [WalletInline, ]
admin.site.register(Stock)
admin.site.register(StockOption)
admin.site.register(Portfolio, PortfolioAdmin)
admin.site.register(Wallet)
admin.site.register(Currency)
admin.site.register(HistoricStockPrice)
admin.site.register(APIKey)
|
from Tkinter import *
from Creep import *
class Generateur:
def __init__(self, x=0, y=0, tempsVagues=10000, tempsCreep=1000, leController=None, actif=True, partieEnCours=False, nbrCreepsParVague=5):
self.x = x
self.y = y
self.tempsVagues = tempsVagues
self.tempsCreep = tempsCreep
self.leController = leController
self.actif=actif
self.partieEnCours = partieEnCours
self.creepsCrees = 0
self.nbrCreepsParVague = nbrCreepsParVague
self.nbrVagues = 1
self.jouer = True
self.generer()
def change(self): #sert a changer le status du generateur
if self.actif == True:
self.actif = False
else:
self.actif = True
self.generer()
def generer(self):
if self.jouer == True: #si joueur vivant
if self.partieEnCours == True: #si partie en cours
if self.actif == True:
self.leController.creerCreep()
#print "Creep cree" #debug
self.leController.interface.root.after(self.tempsCreep, self.generer) #un creep a chaque x milisecondes
self.creepsCrees = self.creepsCrees+1
if self.creepsCrees == self.nbrCreepsParVague: #si on a fait assez de creeps pour la vague, on pause jusqu'a la prochaine vague
self.change()
else: #on reset pour la prochaine vague
# print "En pause" #debug
self.creepsCrees = 0
self.nbrVagues = self.nbrVagues + 1
self.leController.interface.root.after(self.tempsVagues, self.change)
|
baseURL = 'http://lrs:8080'
endpoint = 'http://lrs:8080/xapi/'
username = 'username'
password = 'password'
|
"""ShutIt module. See http://shutit.tk/
"""
from shutit_module import ShutItModule
class sqlite(ShutItModule):
def is_installed(self, shutit):
return shutit.file_exists('/root/shutit_build/module_record/' + self.module_id + '/built')
def build(self, shutit):
shutit.send('mkdir -p /tmp/build/sqlite')
shutit.send('cd /tmp/build/sqlite')
shutit.send('curl http://www.sqlite.org/2014/sqlite-autoconf-3080701.tar.gz | gunzip -c - | tar -xf -')
shutit.send('cd sqlite-autoconf-*')
shutit.send('./configure --prefix=/usr')
shutit.send('make')
shutit.send('make install')
return True
#def get_config(self, shutit):
# return True
#def check_ready(self, shutit):
# return True
#def start(self, shutit):
# return True
#def stop(self, shutit):
# return True
def finalize(self, shutit):
#shutit.send('rm -rf
return True
#def remove(self, shutit):
# return True
#def test(self, shutit):
# return True
def module():
return sqlite(
'shutit.tk.sd.sqlite.sqlite', 158844782.0027,
description='',
maintainer='ian.miell@gmail.com',
depends=['shutit.tk.sd.pkg_config.pkg_config']
)
|
import socket
import sys
port = 70
host = sys.argv[1]
filename = sys.argv[2]
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.connect((host, port))
except socket.gaierror as e:
print "Error connecting to server: %s" % e
sys.exit(1)
s.sendall(filename + "\r\n")
while True:
buf = s.recv(2048)
if not len(buf):
break
sys.stdout.write(buf)
|
__author__ = 'Olga Botvinnik'
__email__ = 'olga.botvinnik@gmail.com'
__version__ = '0.1.0'
from .visualize import wasabiplot
__all__ = ['wasabiplot']
|
import re
content = open('regex_sum_42.txt', 'r')
integers = re.findall('\d+', content.read())
sum = 0
for item in integers:
sum += int(item)
print sum
|
from django.db import models
from tech.models import Tech
class Race(models.Model):
name = models.CharField(max_length=250)
techs = models.ManyToManyField(Tech)
|
"""
Salamander ALM
Copyright (c) 2016 Djuro Drljaca
This Python module is free software; you can redistribute it and/or modify it under the terms of the
GNU General Public License as published by the Free Software Foundation; either version 2 of the
License, or (at your option) any later version.
This Python module is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Lesser General Public License along with this library. If
not, see <http://www.gnu.org/licenses/>.
"""
from database.connection import Connection
from database.database import DatabaseInterface
from database.tables.tracker_information import TrackerSelection
import datetime
from typing import List, Optional
class TrackerManagementInterface(object):
"""
Tracker management
Dependencies:
- DatabaseInterface
"""
def __init__(self):
"""
Constructor is disabled!
"""
raise RuntimeError()
@staticmethod
def read_all_tracker_ids(project_id: int,
tracker_selection=TrackerSelection.Active,
max_revision_id=None) -> List[int]:
"""
Reads all tracker IDs from the database
:param project_id: ID of the project
:param tracker_selection: Search for active, inactive or all tracker
:param max_revision_id: Maximum revision ID for the search ("None" for latest revision)
:return: List of tracker IDs
"""
connection = DatabaseInterface.create_connection()
if max_revision_id is None:
max_revision_id = DatabaseInterface.tables().revision.read_current_revision_id(
connection)
# Reads all tracker IDs from the database
trackers = None
if max_revision_id is not None:
trackers = DatabaseInterface.tables().tracker_information.read_all_tracker_ids(
connection,
project_id,
tracker_selection,
max_revision_id)
return trackers
@staticmethod
def read_tracker_by_id(tracker_id: int, max_revision_id=None) -> Optional[dict]:
"""
Reads a tracker (active or inactive) that matches the specified tracker ID
:param tracker_id: ID of the tracker
:param max_revision_id: Maximum revision ID for the search ("None" for latest revision)
:return: Tracker information object
Returned dictionary contains items:
- id
- project_id
- short_name
- full_name
- description
- active
- revision_id
"""
connection = DatabaseInterface.create_connection()
if max_revision_id is None:
max_revision_id = DatabaseInterface.tables().revision.read_current_revision_id(
connection)
# Read a tracker that matches the specified tracker ID
tracker = None
if max_revision_id is not None:
tracker = TrackerManagementInterface.__read_tracker_by_id(connection,
tracker_id,
max_revision_id)
return tracker
@staticmethod
def read_tracker_by_short_name(short_name: str, max_revision_id=None) -> Optional[dict]:
"""
Reads an active tracker that matches the specified short name
:param short_name: Tracker's short name
:param max_revision_id: Maximum revision ID for the search ("None" for latest revision)
:return: Tracker information object
Returned dictionary contains items:
- id
- project_id
- short_name
- full_name
- description
- active
- revision_id
"""
connection = DatabaseInterface.create_connection()
if max_revision_id is None:
max_revision_id = DatabaseInterface.tables().revision.read_current_revision_id(
connection)
# Read a tracker that matches the specified short name
tracker = None
if max_revision_id is not None:
tracker = TrackerManagementInterface.__read_tracker_by_short_name(connection,
short_name,
max_revision_id)
return tracker
@staticmethod
def read_trackers_by_short_name(short_name: str,
max_revision_id=None) -> List[dict]:
"""
Reads all active and inactive trackers that match the specified short name
:param short_name: Tracker's short name
:param max_revision_id: Maximum revision ID for the search ("None" for latest revision)
:return: Tracker information of all trackers that match the search attribute
Each dictionary in the returned list contains items:
- id
- project_id
- short_name
- full_name
- description
- active
- revision_id
"""
connection = DatabaseInterface.create_connection()
if max_revision_id is None:
max_revision_id = DatabaseInterface.tables().revision.read_current_revision_id(
connection)
# Read trackers that match the specified short name
trackers = list()
if max_revision_id is not None:
tracker_information_list = \
DatabaseInterface.tables().tracker_information.read_information(
connection,
"short_name",
short_name,
TrackerSelection.All,
max_revision_id)
for tracker_information in tracker_information_list:
trackers.append(TrackerManagementInterface.__parse_tracker_information(
tracker_information))
return trackers
@staticmethod
def read_tracker_by_full_name(full_name: str,
max_revision_id=None) -> Optional[dict]:
"""
Reads an active tracker that matches the specified full name
:param full_name: Tracker's full name
:param max_revision_id: Maximum revision ID for the search ("None" for latest revision)
:return: Tracker information object
Returned dictionary contains items:
- id
- project_id
- short_name
- full_name
- description
- active
- revision_id
"""
connection = DatabaseInterface.create_connection()
if max_revision_id is None:
max_revision_id = DatabaseInterface.tables().revision.read_current_revision_id(
connection)
# Read a tracker that matches the specified full name
tracker = None
if max_revision_id is not None:
tracker = TrackerManagementInterface.__read_tracker_by_full_name(connection,
full_name,
max_revision_id)
return tracker
@staticmethod
def read_trackers_by_full_name(full_name: str,
max_revision_id=None) -> List[dict]:
"""
Reads all active and inactive trackers that match the specified full name
:param full_name: Tracker's full name
:param max_revision_id: Maximum revision ID for the search ("None" for latest revision)
:return: Tracker information of all trackers that match the search attribute
Each dictionary in the returned list contains items:
- id
- project_id
- short_name
- full_name
- description
- active
- revision_id
"""
connection = DatabaseInterface.create_connection()
if max_revision_id is None:
max_revision_id = DatabaseInterface.tables().revision.read_current_revision_id(
connection)
# Read trackers that match the specified full name
trackers = list()
if max_revision_id is not None:
tracker_information_list = \
DatabaseInterface.tables().tracker_information.read_information(
connection,
"full_name",
full_name,
TrackerSelection.All,
max_revision_id)
for tracker_information in tracker_information_list:
trackers.append(TrackerManagementInterface.__parse_tracker_information(
tracker_information))
return trackers
@staticmethod
def create_tracker(requested_by_user: int,
project_id: int,
short_name: str,
full_name: str,
description: str) -> Optional[int]:
"""
Creates a new tracker
:param requested_by_user: ID of the user that requested creation of the new tracker
:param project_id: ID of the project
:param short_name: Tracker's short name
:param full_name: Tracker's full name
:param description: Tracker's description
:return: Tracker ID of the new tracker
"""
tracker_id = None
connection = DatabaseInterface.create_connection()
try:
success = connection.begin_transaction()
# Start a new revision
revision_id = None
if success:
revision_id = DatabaseInterface.tables().revision.insert_row(
connection,
datetime.datetime.utcnow(),
requested_by_user)
if revision_id is None:
success = False
# Create the tracker
if success:
tracker_id = TrackerManagementInterface.__create_tracker(connection,
project_id,
short_name,
full_name,
description,
revision_id)
if tracker_id is None:
success = False
if success:
connection.commit_transaction()
else:
connection.rollback_transaction()
except:
connection.rollback_transaction()
raise
return tracker_id
@staticmethod
def update_tracker_information(requested_by_user: int,
tracker_to_modify: int,
short_name: str,
full_name: str,
description: str,
active: bool) -> bool:
"""
Updates tracker's information
:param requested_by_user: ID of the user that requested modification of the user
:param tracker_to_modify: ID of the tracker that should be modified
:param short_name: Tracker's new short name
:param full_name: Tracker's new full name
:param description: Tracker's new description
:param active: Tracker's new state (active or inactive)
:return: Success or failure
"""
connection = DatabaseInterface.create_connection()
try:
success = connection.begin_transaction()
# Start a new revision
revision_id = None
if success:
revision_id = DatabaseInterface.tables().revision.insert_row(
connection,
datetime.datetime.utcnow(),
requested_by_user)
if revision_id is None:
success = False
# Check if there is already an existing tracker with the same short name
if success:
tracker = TrackerManagementInterface.__read_tracker_by_short_name(connection,
short_name,
revision_id)
if tracker is not None:
if tracker["id"] != tracker_to_modify:
success = False
# Check if there is already an existing tracker with the same full name
if success:
tracker = TrackerManagementInterface.__read_tracker_by_full_name(connection,
full_name,
revision_id)
if tracker is not None:
if tracker["id"] != tracker_to_modify:
success = False
# Update tracker's information in the new revision
if success:
row_id = DatabaseInterface.tables().tracker_information.insert_row(
connection,
tracker_to_modify,
short_name,
full_name,
description,
active,
revision_id)
if row_id is None:
success = False
if success:
connection.commit_transaction()
else:
connection.rollback_transaction()
except:
connection.rollback_transaction()
raise
return success
@staticmethod
def activate_tracker(requested_by_user: int, tracker_id: int) -> bool:
"""
Activates an inactive tracker
:param requested_by_user: ID of the user that requested modification of the user
:param tracker_id: ID of the tracker that should be activated
:return: Success or failure
"""
connection = DatabaseInterface.create_connection()
try:
success = connection.begin_transaction()
# Start a new revision
revision_id = None
if success:
revision_id = DatabaseInterface.tables().revision.insert_row(
connection,
datetime.datetime.utcnow(),
requested_by_user)
if revision_id is None:
success = False
# Read tracker
tracker = None
if success:
tracker = TrackerManagementInterface.__read_tracker_by_id(connection,
tracker_id,
revision_id)
if tracker is None:
success = False
elif tracker["active"]:
# Error, tracker is already active
success = False
# Activate tracker
if success:
success = DatabaseInterface.tables().tracker_information.insert_row(
connection,
tracker_id,
tracker["short_name"],
tracker["full_name"],
tracker["description"],
True,
revision_id)
if success:
connection.commit_transaction()
else:
connection.rollback_transaction()
except:
connection.rollback_transaction()
raise
return success
@staticmethod
def deactivate_tracker(requested_by_user: int, tracker_id: int) -> bool:
"""
Deactivates an active tracker
:param requested_by_user: ID of the user that requested modification of the user
:param tracker_id: ID of the tracker that should be deactivated
:return: Success or failure
"""
connection = DatabaseInterface.create_connection()
try:
success = connection.begin_transaction()
# Start a new revision
revision_id = None
if success:
revision_id = DatabaseInterface.tables().revision.insert_row(
connection,
datetime.datetime.utcnow(),
requested_by_user)
if revision_id is None:
success = False
# Read tracker
tracker = None
if success:
tracker = TrackerManagementInterface.__read_tracker_by_id(connection,
tracker_id,
revision_id)
if tracker is None:
success = False
elif not tracker["active"]:
# Error, tracker is already inactive
success = False
# Deactivate tracker
if success:
success = DatabaseInterface.tables().tracker_information.insert_row(
connection,
tracker_id,
tracker["short_name"],
tracker["full_name"],
tracker["description"],
False,
revision_id)
if success:
connection.commit_transaction()
else:
connection.rollback_transaction()
except:
connection.rollback_transaction()
raise
return success
@staticmethod
def __read_tracker_by_id(connection: Connection,
tracker_id: int,
max_revision_id: int) -> Optional[dict]:
"""
Reads a tracker (active or inactive) that matches the search parameters
:param connection: Database connection
:param tracker_id: ID of the tracker
:param max_revision_id: Maximum revision ID for the search
:return: Tracker information object
Returned dictionary contains items:
- id
- project_id
- short_name
- full_name
- description
- active
- revision_id
"""
# Read the trackers that match the search attribute
trackers = DatabaseInterface.tables().tracker_information.read_information(
connection,
"tracker_id",
tracker_id,
TrackerSelection.All,
max_revision_id)
# Return a tracker only if exactly one was found
tracker = None
if trackers is not None:
if len(trackers) == 1:
tracker = {"id": trackers[0]["tracker_id"],
"project_id": trackers[0]["project_id"],
"short_name": trackers[0]["short_name"],
"full_name": trackers[0]["full_name"],
"description": trackers[0]["description"],
"active": trackers[0]["active"],
"revision_id": trackers[0]["revision_id"]}
return tracker
@staticmethod
def __read_tracker_by_short_name(connection: Connection,
short_name: str,
max_revision_id: int) -> Optional[dict]:
"""
Reads an active tracker that matches the specified short name
:param connection: Database connection
:param short_name: Tracker's short name
:param max_revision_id: Maximum revision ID for the search
:return: Tracker information object
Returned dictionary contains items:
- id
- project_id
- short_name
- full_name
- description
- active
- revision_id
"""
# Read the trackers that match the search attribute
trackers = DatabaseInterface.tables().tracker_information.read_information(
connection,
"short_name",
short_name,
TrackerSelection.Active,
max_revision_id)
# Return a tracker only if exactly one was found
tracker = None
if trackers is not None:
if len(trackers) == 1:
tracker = {"id": trackers[0]["tracker_id"],
"project_id": trackers[0]["project_id"],
"short_name": trackers[0]["short_name"],
"full_name": trackers[0]["full_name"],
"description": trackers[0]["description"],
"active": trackers[0]["active"],
"revision_id": trackers[0]["revision_id"]}
return tracker
@staticmethod
def __read_tracker_by_full_name(connection: Connection,
full_name: str,
max_revision_id: int) -> Optional[dict]:
"""
Reads an active tracker that matches the specified full name
:param connection: Database connection
:param full_name: Tracker's full name
:param max_revision_id: Maximum revision ID for the search
:return: Tracker information object
Returned dictionary contains items:
- id
- project_id
- short_name
- full_name
- description
- active
- revision_id
"""
# Read the trackers that match the search attribute
trackers = DatabaseInterface.tables().tracker_information.read_information(
connection,
"full_name",
full_name,
TrackerSelection.Active,
max_revision_id)
# Return a tracker only if exactly one was found
tracker = None
if trackers is not None:
if len(trackers) == 1:
tracker = {"id": trackers[0]["tracker_id"],
"project_id": trackers[0]["project_id"],
"short_name": trackers[0]["short_name"],
"full_name": trackers[0]["full_name"],
"description": trackers[0]["description"],
"active": trackers[0]["active"],
"revision_id": trackers[0]["revision_id"]}
return tracker
@staticmethod
def __create_tracker(connection: Connection,
project_id: int,
short_name: str,
full_name: str,
description: str,
revision_id: int) -> Optional[int]:
"""
Creates a new tracker
:param connection: Database connection
:param project_id: ID of the project
:param short_name: Tracker's short name
:param full_name: Tracker's full name
:param description: Tracker's description
:param revision_id: Revision ID
:return: Tracker ID of the newly created tracker
"""
# Check if a tracker with the same short name already exists
tracker = TrackerManagementInterface.__read_tracker_by_short_name(connection,
short_name,
revision_id)
if tracker is not None:
return None
# Check if a tracker with the same full name already exists
tracker = TrackerManagementInterface.__read_tracker_by_full_name(connection,
full_name,
revision_id)
if tracker is not None:
return None
# Create the tracker in the new revision
tracker_id = DatabaseInterface.tables().tracker.insert_row(connection, project_id)
if tracker_id is None:
return None
# Add tracker information to the tracker
tracker_information_id = DatabaseInterface.tables().tracker_information.insert_row(
connection,
tracker_id,
short_name,
full_name,
description,
True,
revision_id)
if tracker_information_id is None:
return None
return tracker_id
@staticmethod
def __parse_tracker_information(raw_tracker_information: dict) -> dict:
"""
Parse raw tracker information object and convert it to a tracker information object
:param raw_tracker_information: Tracker information
:return: Tracker information object
Input (raw) dictionary contains items:
- project_id
- tracker_id
- short_name
- full_name
- description
- active
- revision_id
Returned dictionary contains items:
- id
- project_id
- short_name
- full_name
- description
- active
- revision_id
"""
return {"id": raw_tracker_information["tracker_id"],
"project_id": raw_tracker_information["project_id"],
"short_name": raw_tracker_information["short_name"],
"full_name": raw_tracker_information["full_name"],
"description": raw_tracker_information["description"],
"active": raw_tracker_information["active"],
"revision_id": raw_tracker_information["revision_id"]}
|
'''
By Jeremy Mill
jeremymill@gmail.com
github.com/livinginsyn
licensed until the GPL V2
'''
import subprocess
import sys
import os
class Wifi_Manager:
def __init__(self):
#init vars with some default values, just in case something goes wrong. These should be 'safe values'
#trusted_time is safe at 0 by default because trusted_networks is empty by default
trusted_networks = []
trusted_time = 0
no_trust_time = 300
def current_network(self):
info = self.get_wpa_cli_status()
start_index = info.find("\nssid")+6
if(start_index != -1):
stop_index = info.find("\n",start_index)
ssid = info[start_index:stop_index]
return ssid
else:
ssid = None
return ssid
def get_wpa_cli_status(self):
info = subprocess.check_output(['wpa_cli','status'])
return info
|
import inkex
import re
class C(inkex.Effect):
def __init__(self):
inkex.Effect.__init__(self)
self.OptionParser.add_option("-w", "--width", action="store", type="int", dest="generic_width", default="1920", help="Custom width")
self.OptionParser.add_option("-z", "--height", action="store", type="int", dest="generic_height", default="1080", help="Custom height")
self.OptionParser.add_option("-u", "--unit", action="store", type="string", dest="generic_unit", default="px", help="SVG Unit")
self.OptionParser.add_option("-b", "--background", action="store", type="string", dest="generic_background", default="normal", help="Canvas background")
self.OptionParser.add_option("-n", "--noborder", action="store", type="inkbool", dest="generic_noborder", default=False)
# self.OptionParser.add_option("-l", "--layer", action="store", type="inkbool", dest="generic_layer", default=True)
def effect(self):
width = self.options.generic_width
height = self.options.generic_height
unit = self.options.generic_unit
root = self.document.getroot()
root.set("id", "SVGRoot")
root.set("width", str(width) + unit)
root.set("height", str(height) + unit)
root.set("viewBox", "0 0 " + str(width) + " " + str(height) )
namedview = root.find(inkex.addNS('namedview', 'sodipodi'))
if namedview is None:
namedview = inkex.etree.SubElement( root, inkex.addNS('namedview', 'sodipodi') );
namedview.set(inkex.addNS('document-units', 'inkscape'), unit)
# Until units are supported in 'cx', etc.
namedview.set(inkex.addNS('zoom', 'inkscape'), str(512.0/self.uutounit( width, 'px' )) )
namedview.set(inkex.addNS('cx', 'inkscape'), str(self.uutounit( width, 'px' )/2.0 ) )
namedview.set(inkex.addNS('cy', 'inkscape'), str(self.uutounit( height, 'px' )/2.0 ) )
if self.options.generic_background == "white":
namedview.set( 'pagecolor', "#ffffff" )
namedview.set( 'bordercolor', "#666666" )
namedview.set(inkex.addNS('pageopacity', 'inkscape'), "1.0" )
namedview.set(inkex.addNS('pageshadow', 'inkscape'), "0" )
if self.options.generic_background == "gray":
namedview.set( 'pagecolor', "#808080" )
namedview.set( 'bordercolor', "#444444" )
namedview.set(inkex.addNS('pageopacity', 'inkscape'), "1.0" )
namedview.set(inkex.addNS('pageshadow', 'inkscape'), "0" )
if self.options.generic_background == "black":
namedview.set( 'pagecolor', "#000000" )
namedview.set( 'bordercolor', "#999999" )
namedview.set(inkex.addNS('pageopacity', 'inkscape'), "1.0" )
namedview.set(inkex.addNS('pageshadow', 'inkscape'), "0" )
if self.options.generic_noborder:
pagecolor = namedview.get( 'pagecolor' )
namedview.set( 'bordercolor', pagecolor )
namedview.set( 'borderopacity', "0" )
# This nees more thought... we need to set "Current layer" to (root), how?
# if self.options.generic_layer:
# # Add layer
# inkex.debug( "We want a layer" )
# else:
# # Remove layer id default document (assuming only one)
# inkex.debug( "We don't want a layer" )
# layer_node = self.current_layer
# if layer_node is not None:
# inkex.debug( "We have layer" )
# root.remove(layer_node)
# try:
# del namedview.attrib[ inkex.addNS('current-layer', 'inkscape') ]
# except:
# pass
c = C()
c.affect()
|
import sys
import code5 as c
from random import Random
def read_training_set(file_name):
with open(file_name, 'r') as ts_file:
parse_line = lambda l: tuple(l.strip().split())
return map(parse_line, ts_file.readlines())
def train_and_predict(activation_func="th"):
rand = Random(0)
nn = c.NeuralNet(rand, activation_func)
sys.stdout.write('Train: ')
nn.train(big_training_set, 10)
sys.stdout.write('done\n')
sys.stdout.write('Predict: ')
predictedClass = nn.predict(testSeq)
sys.stdout.write('done\n')
print("Test prediction: %s" % predictedClass)
sys.stdout.write('Read training set: ')
big_training_set = read_training_set("SecondStructureTrainData.txt")
sys.stdout.write('done\n')
testSeq = 'DLLSA'
train_and_predict()
train_and_predict("logistic")
|
from threading import Thread
from threading import Lock
import Queue
from ftplib import FTP
from utils import config
from console import thread_manager_processes
from console import threads_map_key
import os
import uuid
import time
exitFlag = 0
class LayerDownloadThread(Thread):
layer_name = None
source_name = None
total_size = 0
download_size = 0
def __init__(self, source_name, product, year, day, thread_name, q, key):
Thread.__init__(self)
self.source_name = source_name
self.thread_name = thread_name
self.product = product
self.year = year
self.day = day
self.config = config.Config(self.source_name)
self.q = q
self.key = key
def run(self):
while not exitFlag:
queueLock.acquire()
if not workQueue.empty():
self.layer_name = self.q.get()
queueLock.release()
ftp = FTP(self.config.get('ftp'))
ftp.login()
ftp.cwd(self.config.get('ftp_dir') + self.product + '/' + self.year + '/' + self.day + '/')
ftp.sendcmd('TYPE i')
self.total_size = ftp.size(self.layer_name)
file = self.layer_name
local_file = os.path.join(self.config.get('targetDir'), file)
if not os.path.isfile(local_file):
try:
print '>>> ' + os.stat(local_file).st_size + ' <<<'
fileSize = os.stat(local_file).st_size
if fileSize < self.total_size:
with open(local_file, 'w') as f:
def callback(chunk):
f.write(chunk)
self.download_size += len(chunk)
ftp.retrbinary('RETR %s' %file, callback)
except:
with open(local_file, 'w') as f:
def callback(chunk):
f.write(chunk)
self.download_size += len(chunk)
ftp.retrbinary('RETR %s' %file, callback)
ftp.quit()
else:
queueLock.release()
time.sleep(1)
def percent_done(self):
print str(self.download_size) + ' / ' + str(self.total_size)
return float(self.download_size) / float(self.total_size) * 100
nameList = ["MOD13Q1.A2014001.h10v02.005.2014018100659.hdf", "MOD13Q1.A2014001.h10v03.005.2014018122614.hdf",
"MOD13Q1.A2014001.h10v04.005.2014018095113.hdf", "MOD13Q1.A2014001.h10v05.005.2014018095025.hdf",
"MOD13Q1.A2014001.h10v06.005.2014018090148.hdf", "MOD13Q1.A2014001.h10v07.005.2014018084024.hdf",
"MOD13Q1.A2014001.h10v08.005.2014018090845.hdf", "MOD13Q1.A2014001.h10v09.005.2014018095633.hdf",
"MOD13Q1.A2014001.h10v10.005.2014018083448.hdf", "MOD13Q1.A2014001.h10v11.005.2014018090632.hdf"]
threadList = ['Alpha', 'Bravo', 'Charlie', 'Delta', 'Echo', 'Foxtrot', 'Golf', 'Hotel', 'India', 'Juliet', 'Kilo']
queueLock = Lock()
workQueue = Queue.Queue(len(nameList))
threads = []
for tName in threadList:
key = uuid.uuid4()
thread = LayerDownloadThread('MODIS', 'MOD13Q1', '2014', '001', tName, workQueue, key)
thread.start()
if not threads_map_key in thread_manager_processes:
thread_manager_processes[threads_map_key] = {}
thread_manager_processes[threads_map_key][key] = thread
print 'MGR | ' + str(thread_manager_processes[threads_map_key])
threads.append(thread)
queueLock.acquire()
for word in nameList:
workQueue.put(word)
queueLock.release()
while not workQueue.empty():
pass
exitFlag = 1
for t in threads:
t.join()
print 'Exiting Main Thread - Do Some More Stuff...'
|
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
class Migration(migrations.Migration):
dependencies = [
('notices', '0002_auto_20150211_2015'),
]
operations = [
migrations.AlterField(
model_name='notice',
name='event_date',
field=models.DateField(default=datetime.datetime(2015, 2, 19, 21, 36, 35, 208766), help_text='Whern is the event going to happen?'),
preserve_default=True,
),
migrations.AlterField(
model_name='notice',
name='publish_date',
field=models.DateTimeField(default=datetime.datetime(2015, 2, 19, 21, 36, 35, 208826), help_text='When do you want this published'),
preserve_default=True,
),
]
|
import os.path
import logging
import shutil
from chimera.core.constants import (SYSTEM_CONFIG_DIRECTORY,
SYSTEM_CONFIG_DEFAULT_FILENAME,
SYSTEM_CONFIG_DEFAULT_SAMPLE,
SYSTEM_CONFIG_LOG_NAME)
logging.getLogger().setLevel(logging.DEBUG)
def init_sysconfig ():
if not os.path.exists(SYSTEM_CONFIG_DIRECTORY):
try:
logging.info("Default configuration directory not found (%s). Creating a new one." % SYSTEM_CONFIG_DIRECTORY)
os.mkdir(SYSTEM_CONFIG_DIRECTORY)
except IOError, e:
logging.error("Couldn't create default configuration directory at %s (%s)" % (SYSTEM_CONFIG_DIRECTORY, e))
if not os.path.exists(SYSTEM_CONFIG_DEFAULT_FILENAME):
logging.info("Default chimera.config not found. Creating a sample at %s." % SYSTEM_CONFIG_DEFAULT_FILENAME)
try:
shutil.copyfile(SYSTEM_CONFIG_DEFAULT_SAMPLE, SYSTEM_CONFIG_DEFAULT_FILENAME)
except IOError, e:
logging.error("Couldn't create default chimera.config at %s (%s)" % (SYSTEM_CONFIG_DEFAULT_FILENAME, e))
if not os.path.exists(SYSTEM_CONFIG_LOG_NAME):
try:
open(SYSTEM_CONFIG_LOG_NAME, 'w').close()
except IOError, e:
logging.error("Couldn't create initial log file %s (%s)" % (SYSTEM_CONFIG_LOG_NAME, e))
init_sysconfig()
|
"""
Provide the management of databases from CLI. This includes opening, renaming,
creating, and deleting of databases.
"""
import re
import os
import sys
import ast
import time
from urllib.parse import urlparse
from urllib.request import urlopen, url2pathname
import tempfile
import logging
from gprime.plug import BasePluginManager
from gprime.config import config
from gprime.constfunc import win
from gprime.db.dbconst import DBLOGNAME
from gprime.const import LOCALE as glocale
_ = glocale.translation.gettext
LOG = logging.getLogger(".clidbman")
_LOG = logging.getLogger(DBLOGNAME)
DEFAULT_TITLE = _("Family Tree")
NAME_FILE = "name.txt"
META_NAME = "meta_data.db"
def _errordialog(title, errormessage):
"""
Show the error. A title for the error and an errormessage
"""
print(_('ERROR: %(title)s \n %(message)s') % {
'title': title,
'message': errormessage})
sys.exit()
class CLIDbManager:
"""
Database manager without GTK functionality, allows users to create and
open databases
"""
IND_NAME = 0
IND_PATH = 1
IND_PATH_NAMEFILE = 2
IND_TVAL_STR = 3
IND_TVAL = 4
IND_USE_ICON_BOOL = 5
IND_STOCK_ID = 6
ICON_NONE = 0
ICON_RECOVERY = 1
ICON_LOCK = 2
ICON_OPEN = 3
ICON_MAP = {
ICON_NONE : None,
ICON_RECOVERY : None,
ICON_LOCK : None,
ICON_OPEN : None,
}
ERROR = _errordialog
def __init__(self, dbstate):
self.dbstate = dbstate
self.msg = None
if dbstate and dbstate.is_open():
self.active = dbstate.db.get_save_path()
else:
self.active = None
self.current_names = []
if dbstate:
self._populate_cli()
def empty(self, val):
"""
Callback that does nothing
"""
pass
def get_dbdir_summary(self, dirpath, name):
"""
dirpath: full path to database
name: proper name of family tree
Returns dictionary of summary item.
Should include at least, if possible:
_("Path")
_("Family Tree")
_("Last accessed")
_("Database")
_("Locked?")
and these details:
_("Number of people")
_("Version")
_("Schema version")
"""
dbid = "dbapi"
dbid_path = os.path.join(dirpath, "database.txt")
if os.path.isfile(dbid_path):
with open(dbid_path) as file:
dbid = file.read().strip()
if not self.is_locked(dirpath):
try:
database = self.dbstate.make_database(dbid)
database.load(dirpath, None, update=False)
retval = database.get_summary()
database.close(update=False)
except Exception as msg:
retval = {_("Unavailable"): str(msg)[:74] + "..."}
else:
retval = {_("Unavailable"): "locked"}
retval.update({_("Family Tree"): name,
_("Path"): dirpath,
_("Database"): dbid,
_("Last accessed"): time_val(dirpath)[1],
_("Locked?"): self.is_locked(dirpath),
})
return retval
def print_family_tree_summaries(self, database_names=None):
"""
Prints a detailed list of the known family trees.
"""
print(_('Gramps Family Trees:'))
for item in self.current_names:
(name, dirpath, path_name, last,
tval, enable, stock_id, backend_type, version) = item
if (database_names is None or
any([(re.match("^" + dbname + "$", name) or
dbname == name)
for dbname in database_names])):
summary = self.get_dbdir_summary(dirpath, name)
print(_("Family Tree \"%s\":") % summary[_("Family Tree")])
for item in sorted(summary):
if item != "Family Tree":
# translators: needed for French, ignore otherwise
print(_(" %(item)s: %(summary)s") % {
'item' : item,
'summary' : summary[item]})
def family_tree_summary(self, database_names=None):
"""
Return a list of dictionaries of the known family trees.
"""
# make the default directory if it does not exist
summary_list = []
for item in self.current_names:
(name, dirpath, path_name, last,
tval, enable, stock_id, backend_type, version) = item
if (database_names is None or
any([(re.match("^" + dbname + "$", name) or
dbname == name)
for dbname in database_names])):
retval = self.get_dbdir_summary(dirpath, name)
summary_list.append(retval)
return summary_list
def _populate_cli(self):
"""
Get the list of current names in the database dir
"""
# make the default directory if it does not exist
dbdir = os.path.expanduser("./gprime-test") # config.get('database.path'))
db_ok = make_dbdir(dbdir)
self.current_names = []
if db_ok:
for dpath in os.listdir(dbdir):
dirpath = os.path.join(dbdir, dpath)
path_name = os.path.join(dirpath, NAME_FILE)
try:
with open(os.path.join(dirpath, "database.txt")) as file:
backend_type = file.read()
except:
backend_type = "dbapi"
try:
with open(os.path.join(dirpath, "bdbversion.txt")) as file:
version = file.read()
except:
version = "(0, 0, 0)"
try:
version = ast.literal_eval(version)
except:
version = (0, 0, 0)
if os.path.isfile(path_name):
with open(path_name, 'r', encoding='utf8') as file:
name = file.readline().strip()
(tval, last) = time_val(dirpath)
(enable, stock_id) = self.icon_values(
dirpath, self.active, self.dbstate.is_open())
if stock_id == 'gramps-lock':
last = find_locker_name(dirpath)
self.current_names.append(
(name, os.path.join(dbdir, dpath), path_name,
last, tval, enable, stock_id, backend_type, version))
self.current_names.sort()
def get_family_tree_path(self, name):
"""
Given a name, return None if name not existing or the path to the
database if it is a known database name.
"""
for data in self.current_names:
if data[0] == name:
return data[1]
return None
def family_tree_list(self):
"""
Return a list of name, dirname of the known family trees
"""
lst = [(x[0], x[1]) for x in self.current_names]
return lst
def __start_cursor(self, msg):
"""
Do needed things to start import visually, eg busy cursor
"""
print(_('Starting Import, %s') % msg)
def __end_cursor(self):
"""
Set end of a busy cursor
"""
print(_('Import finished...'))
def create_new_db_cli(self, title=None, create_db=True, dbid=None):
"""
Create a new database.
"""
new_path = find_next_db_dir()
os.mkdir(new_path)
path_name = os.path.join(new_path, NAME_FILE)
if title is None:
name_list = [name[0] for name in self.current_names]
title = find_next_db_name(name_list)
with open(path_name, "w", encoding='utf8') as name_file:
name_file.write(title)
if create_db:
# write the version number into metadata
if dbid is None:
dbid = "dbapi"
newdb = self.dbstate.make_database(dbid)
newdb.write_version(new_path)
(tval, last) = time_val(new_path)
self.current_names.append((title, new_path, path_name,
last, tval, False, "", dbid))
return new_path, title
def _create_new_db(self, title=None, dbid=None, edit_entry=False):
"""
Create a new database, do extra stuff needed
"""
return self.create_new_db_cli(title, dbid=dbid)
def import_new_db(self, filename, user):
"""
Attempt to import the provided file into a new database.
A new database will only be created if an appropriate importer was
found.
:param filename: a fully-qualified path, filename, and
extension to open.
:param user: a :class:`.cli.user.User` or :class:`.gui.user.User`
instance for managing user interaction.
:returns: A tuple of (new_path, name) for the new database
or (None, None) if no import was performed.
"""
pmgr = BasePluginManager.get_instance()
# check to see if it isn't a filename directly:
if not os.path.isfile(filename):
# Allow URL names here; make temp file if necessary
url = urlparse(filename)
if url.scheme != "":
if url.scheme == "file":
filename = url2pathname(filename[7:])
else:
url_fp = urlopen(filename) # open URL
# make a temp local file:
ext = os.path.splitext(url.path)[1]
fd, filename = tempfile.mkstemp(suffix=ext)
temp_fp = os.fdopen(fd, "w")
# read from URL:
data = url_fp.read()
# write locally:
temp_fp.write(data)
url_fp.close()
from gramps.gen.db.dbconst import BDBVERSFN
versionpath = os.path.join(name, BDBVERSFN)
_LOG.debug("Write version %s", str(dbase.version()))
with open(versionpath, "w") as version_file:
version_file.write(str(dbase.version()))
temp_fp.close()
(name, ext) = os.path.splitext(os.path.basename(filename))
format = ext[1:].lower()
for plugin in pmgr.get_import_plugins():
if format == plugin.get_extension():
new_path, name = self._create_new_db(name, edit_entry=False)
# Create a new database
self.__start_cursor(_("Importing data..."))
dbid = "dbapi" ## config.get('database.backend')
dbase = self.dbstate.make_database(dbid)
dbase.load(new_path, user.callback)
import_function = plugin.get_import_function()
import_function(dbase, filename, user)
# finish up
self.__end_cursor()
dbase.close()
return new_path, name
return None, None
def is_locked(self, dbpath):
"""
Returns True if there is a lock file in the dirpath
"""
if os.path.isfile(os.path.join(dbpath, "lock")):
return True
return False
def needs_recovery(self, dbpath):
"""
Returns True if the database in dirpath needs recovery
"""
if os.path.isfile(os.path.join(dbpath, "need_recover")):
return True
return False
def remove_database(self, dbname, user=None):
"""
Deletes a database folder given a pattenr that matches
its proper name.
"""
dbdir = os.path.expanduser("./gprime-test") # config.get('database.path'))
match_list = []
for dpath in os.listdir(dbdir):
dirpath = os.path.join(dbdir, dpath)
path_name = os.path.join(dirpath, NAME_FILE)
if os.path.isfile(path_name):
with open(path_name, 'r', encoding='utf8') as file:
name = file.readline().strip()
if re.match("^" + dbname + "$", name) or dbname == name:
match_list.append((name, dirpath))
if len(match_list) == 0:
CLIDbManager.ERROR("Family tree not found",
"No matching family tree found: '%s'" % dbname)
# now delete them:
for (name, directory) in match_list:
if user is None or user.prompt(
_('Remove family tree warning'),
_('Are you sure you want to remove '
'the family tree named\n"%s"?'
) % name,
_('yes'), _('no'), default_label=_('no')):
try:
for (top, dirs, files) in os.walk(directory):
for filename in files:
os.unlink(os.path.join(top, filename))
os.rmdir(directory)
except (IOError, OSError) as msg:
CLIDbManager.ERROR(_("Could not delete Family Tree"),
str(msg))
def rename_database(self, filepath, new_text):
"""
Renames the database by writing the new value to the name.txt file
Returns old_name, new_name if success, None, None if no success
"""
try:
with open(filepath, "r", encoding='utf8') as name_file:
old_text = name_file.read()
with open(filepath, "w", encoding='utf8') as name_file:
name_file.write(new_text)
except (OSError, IOError) as msg:
CLIDbManager.ERROR(_("Could not rename Family Tree"), str(msg))
return None, None
return old_text, new_text
def break_lock(self, dbpath):
"""
Breaks the lock on a database
"""
if os.path.exists(os.path.join(dbpath, "lock")):
os.unlink(os.path.join(dbpath, "lock"))
def icon_values(self, dirpath, active, is_open):
"""
If the directory path is the active path, then return values
that indicate to use the icon, and which icon to use.
"""
if os.path.isfile(os.path.join(dirpath, "need_recover")):
return (True, self.ICON_MAP[self.ICON_RECOVERY])
elif dirpath == active and is_open:
return (True, self.ICON_MAP[self.ICON_OPEN])
elif os.path.isfile(os.path.join(dirpath, "lock")):
return (True, self.ICON_MAP[self.ICON_LOCK])
else:
return (False, self.ICON_MAP[self.ICON_NONE])
def make_dbdir(dbdir):
"""
Create the default database directory, as defined by dbdir
"""
try:
if not os.path.isdir(dbdir):
os.makedirs(dbdir)
except (IOError, OSError) as msg:
LOG.error(_("\nERROR: Wrong database path in Edit Menu->Preferences.\n"
"Open preferences and set correct database path.\n\n"
"Details: Could not make database directory:\n %s\n\n"),
str(msg))
return False
return True
def find_next_db_name(name_list):
"""
Scan the name list, looking for names that do not yet exist.
Use the DEFAULT_TITLE as the basis for the database name.
"""
i = 1
while True:
title = "%s %d" % (DEFAULT_TITLE, i)
if title not in name_list:
return title
i += 1
def find_next_db_dir():
"""
Searches the default directory for the first available default
database name. Base the name off the current time. In all actuality,
the first should be valid.
"""
while True:
base = "%x" % int(time.time())
dbdir = os.path.expanduser("./gprime-test") # config.get('database.path'))
new_path = os.path.join(dbdir, base)
if not os.path.isdir(new_path):
break
return new_path
def time_val(dirpath):
"""
Return the last modified time of the database. We do this by looking
at the modification time of the meta db file. If this file does not
exist, we indicate that database as never modified.
"""
meta = os.path.join(dirpath, META_NAME)
if os.path.isfile(meta):
tval = os.stat(meta)[9]
# This gives creation date in Windows, but correct date in Linux
if win():
# Try to use last modified date instead in Windows
# and check that it is later than the creation date.
tval_mod = os.stat(meta)[8]
if tval_mod > tval:
tval = tval_mod
last = time.strftime('%x %X', time.localtime(tval))
else:
tval = 0
last = _("Never")
return (tval, last)
def find_locker_name(dirpath):
"""
Opens the lock file if it exists, reads the contexts which is "USERNAME"
and returns the contents, with correct string before "USERNAME",
so the message can be printed with correct locale.
If a file is encountered with errors, we return 'Unknown'
This data can eg be displayed in the time column of the manager
"""
try:
fname = os.path.join(dirpath, "lock")
with open(fname, 'r', encoding='utf8') as ifile:
username = ifile.read().strip()
# feature request 2356: avoid genitive form
last = _("Locked by %s") % username
except (OSError, IOError, UnicodeDecodeError):
last = _("Unknown")
return last
|
import os
import sys
import types
from zope.interface import implements
from feat.common import log, decorator, fiber, manhole, mro
from feat.interface import generic, agent, protocols
from feat import applications
from feat.agencies import retrying, recipient
from feat.agents.base import (replay, requester, alert,
replier, partners, dependency, manager, )
from feat.agents.common import monitor, rpc
from feat.agents.application import feat
from feat.configure import configure
from feat.interface.agent import AgencyAgentState
@decorator.simple_function
def update_descriptor(function):
@replay.immutable
def decorated(self, state, *args, **kwargs):
immfun = replay.immutable(function)
method = types.MethodType(immfun, self, self.__class__)
f = fiber.succeed(method, debug_depth=2, debug_call=function)
f.add_callback(state.medium.update_descriptor, *args, **kwargs)
return f
return decorated
@feat.register_restorator
class BasePartner(partners.BasePartner):
pass
@feat.register_restorator
class MonitorPartner(monitor.PartnerMixin, BasePartner):
type_name = "agent->monitor"
@feat.register_restorator
class HostPartner(BasePartner):
type_name = "agent->host"
def on_buried(self, agent):
if self.role == u"host":
agent.info("Received host agent %s buried, committing suicide.",
self.recipient.key)
agent.terminate_hard()
def on_restarted(self, agent):
'''
This called also after host agent has switched shard.
The agents which have been initialized before it happened should be
restarted accordingly.
'''
if agent.get_shard_id == 'lobby':
return agent.switch_shard(self.recipient.shard)
class Partners(partners.Partners):
partners.has_many("monitors", "monitor_agent", MonitorPartner)
partners.has_many("hosts", "host_agent", HostPartner)
class MetaAgent(type(replay.Replayable), type(manhole.Manhole)):
implements(agent.IAgentFactory)
### used by partnership protocol ###
@property
def identity_for_partners(cls):
return getattr(cls.partners_class, 'identity_for_partners',
cls.descriptor_type)
class BaseAgent(mro.FiberMroMixin, log.Logger, log.LogProxy, replay.Replayable,
manhole.Manhole, rpc.AgentMixin,
dependency.AgentDependencyMixin, monitor.AgentMixin,
alert.AgentMixin):
__metaclass__ = MetaAgent
ignored_state_keys = ['medium']
implements(agent.IAgent, generic.ITimeProvider)
partners_class = Partners
application = feat
standalone = False
categories = {'access': agent.Access.none,
'address': agent.Address.none,
'storage': agent.Storage.none}
# resources required to run the agent
resources = {'epu': 1}
def __init__(self, medium):
manhole.Manhole.__init__(self)
log.Logger.__init__(self, medium)
log.LogProxy.__init__(self, medium)
replay.Replayable.__init__(self, medium)
@replay.immutable
def restored(self, state):
log.Logger.__init__(self, state.medium)
log.LogProxy.__init__(self, state.medium)
replay.Replayable.restored(self)
def init_state(self, state, medium):
state.medium = agent.IAgencyAgent(medium)
state.partners = self.partners_class(self)
### Used by gateway model ###
@replay.immutable
def get_agent_status(self, state):
return state.medium.state
### IAgent Methods ###
@replay.journaled
def initiate_agent(self, state, **keywords):
f = fiber.succeed()
f.add_callback(fiber.drop_param, self.call_mro, 'initiate', **keywords)
f.add_callback(fiber.drop_param, self._initiate_partners)
return f
@replay.journaled
def startup_agent(self, state):
return self.call_mro('startup')
@replay.journaled
def shutdown_agent(self, state):
return self.call_mro('shutdown')
@replay.journaled
def on_agent_killed(self, state):
return self.call_mro('on_killed')
@replay.journaled
def on_agent_disconnect(self, state):
return self.call_mro('on_disconnect')
@replay.journaled
def on_agent_reconnect(self, state):
return self.call_mro('on_reconnect')
@replay.journaled
def on_agent_configuration_change(self, state, config):
return self.call_mro_ex('on_configuration_change',
dict(config=config),
raise_on_unconsumed=False)
### Methods called as a result of agency calls ###
@replay.immutable
def initiate(self, state):
state.medium.register_interest(replier.Ping)
@replay.journaled
def shutdown(self, state):
desc = self.get_descriptor()
self.info('Agent shutdown, partners: %r', desc.partners)
fibers = [x.call_mro('on_shutdown', agent=self)
for x in desc.partners]
f = fiber.FiberList(fibers)
return f.succeed()
def startup(self):
pass
def on_killed(self):
pass
def on_disconnect(self):
pass
def on_reconnect(self):
pass
### Public methods ###
@replay.immutable
def get_descriptor(self, state):
'''Returns a copy of the agent descriptos.'''
return state.medium.get_descriptor()
@replay.immutable
def get_configuration(self, state):
'''Returns a copy of the agent config.'''
return state.medium.get_configuration()
@replay.immutable
def get_agent_id(self, state):
'''Returns a global unique identifier for the agent.
Do not change when the agent is restarted.'''
desc = state.medium.get_descriptor()
return desc.doc_id
@replay.immutable
def get_hostname(self, state):
'''Returns a hostname the agent is running on'''
return state.medium.get_hostname()
@replay.immutable
def get_instance_id(self, state):
"""Returns the agent instance identifier.
Changes when the agent is restarted.
It's unique only for the agent."""
desc = state.medium.get_descriptor()
return desc.instance_id
@replay.immutable
def get_full_id(self, state):
"""Return a global unique identifier for this agent instance.
It's a combination of agent_id and instance_id:
full_id = agent_id + '/' + instance_id
"""
desc = state.medium.get_descriptor()
return desc.doc_id + u"/" + unicode(desc.instance_id)
@replay.immutable
def get_shard_id(self, state):
'''Returns current shard identifier.'''
return state.medium.get_descriptor().shard
@replay.immutable
def get_agent_type(self, state):
return state.medium.get_descriptor().type_name
def get_cmd_line(self, *args, **kwargs):
raise NotImplemented('To be used for standalone agents!')
@rpc.publish
@replay.journaled
def switch_shard(self, state, shard):
self.debug('Switching shard to %r.', shard)
desc = state.medium.get_descriptor()
if desc.shard == shard:
self.debug("switch_shard(%s) called, but we are already member "
"of this shard, ignoring.", shard)
return fiber.succeed()
def save_change(desc, shard):
desc.shard = shard
f = fiber.Fiber()
f.add_callback(fiber.drop_param, state.medium.leave_shard, desc.shard)
f.add_callback(fiber.drop_param, self.update_descriptor,
save_change, shard)
f.add_callback(fiber.drop_param, state.medium.join_shard, shard)
f.add_callback(fiber.drop_param, self._fix_alert_poster, shard)
f.add_callback(fiber.drop_param,
self._notify_partners_about_shard_switch)
return f.succeed()
@replay.immutable
def _notify_partners_about_shard_switch(self, state):
fibers = list()
own = self.get_own_address()
for partner in state.partners.all:
fibers.append(requester.notify_restarted(
self, partner.recipient, own, own))
if fibers:
return fiber.FiberList(fibers, consumeErrors=True).succeed()
### ITimeProvider Methods ###
@replay.immutable
def get_time(self, state):
return generic.ITimeProvider(state.medium).get_time()
### Public Methods ###
@rpc.publish
@replay.journaled
def terminate_hard(self, state):
self.call_next(state.medium.terminate_hard)
@rpc.publish
@replay.journaled
def terminate(self, state):
self.call_next(state.medium.terminate)
@manhole.expose()
@replay.journaled
def wait_for_ready(self, state):
return fiber.wrap_defer(state.medium.wait_for_state,
AgencyAgentState.ready)
@manhole.expose()
def propose_to(self, recp, partner_role=None, our_role=None):
return self.establish_partnership(recipient.IRecipient(recp),
partner_role=partner_role,
our_role=our_role)
@replay.journaled
def establish_partnership(self, state, recp, allocation_id=None,
partner_allocation_id=None,
partner_role=None, our_role=None,
substitute=None, allow_double=False,
max_retries=0, **options):
f = fiber.succeed()
found = state.partners.find(recp)
default_role = getattr(self.partners_class, 'default_role', None)
our_role = our_role or default_role
if not allow_double and found:
msg = ('establish_partnership() called for %r which is already '
'our partner with the class %r.' % (recp, type(found), ))
self.debug(msg)
if substitute:
f.add_callback(fiber.drop_param, state.partners.remove,
substitute)
f.chain(fiber.fail(partners.DoublePartnership(msg)))
return f
factory = retrying.RetryingProtocolFactory(requester.Propose,
max_retries=max_retries)
f.add_callback(fiber.drop_param, self.initiate_protocol,
factory, recp, allocation_id,
partner_allocation_id,
our_role, partner_role, substitute, options)
f.add_callback(fiber.call_param, "notify_finish")
return f
@replay.journaled
def substitute_partner(self, state, partners_recp, recp, alloc_id):
'''
Establish the partnership to recp and, when it is successfull
remove partner with recipient partners_recp.
Use with caution: The partner which we are removing is not notified
in any way, so he still keeps link in his description. The correct
usage of this method requires calling it from two agents which are
divorcing.
'''
partner = state.partners.find(recipient.IRecipient(partners_recp))
if not partner:
msg = 'subsitute_partner() did not find the partner %r' %\
partners_recp
self.error(msg)
return fiber.fail(partners.FindPartnerError(msg))
return self.establish_partnership(recp, partner.allocation_id,
alloc_id, substitute=partner)
@manhole.expose()
@replay.journaled
def breakup(self, state, recp):
'''Order the agent to break the partnership with the given
recipient'''
recp = recipient.IRecipient(recp)
partner = self.find_partner(recp)
if partner:
return state.partners.breakup(partner)
else:
self.warning('We were trying to break up with agent recp %r.,'
'but apparently he is not our partner!.', recp)
@replay.immutable
def create_partner(self, state, partner_class, recp, allocation_id=None,
role=None, substitute=None, options=None):
return state.partners.create(partner_class, recp, allocation_id, role,
substitute, options)
@replay.immutable
def remove_partner(self, state, partner):
return state.partners.remove(partner)
@replay.mutable
def partner_sent_notification(self, state, recp, notification_type,
payload, sender):
return state.partners.receive_notification(
recp, notification_type, payload, sender)
@manhole.expose()
@replay.immutable
def query_partners(self, state, name_or_class):
'''Query the partners by the relation name or partner class.'''
return state.partners.query(name_or_class)
@manhole.expose()
@replay.immutable
def query_partners_with_role(self, state, name, role):
return state.partners.query_with_role(name, role)
@replay.immutable
def find_partner(self, state, recp_or_agent_id):
return state.partners.find(recp_or_agent_id)
@replay.immutable
def query_partner_handler(self, state, partner_type, role=None):
return state.partners.query_handler(partner_type, role)
@manhole.expose()
@replay.immutable
def get_medium(self, state):
return state.medium
@manhole.expose()
@replay.immutable
def get_own_address(self, state):
'''Return IRecipient representing the agent.'''
return state.medium.get_own_address()
@replay.immutable
def initiate_protocol(self, state, *args, **kwargs):
return state.medium.initiate_protocol(*args, **kwargs)
@replay.immutable
def register_interest(self, state, *args, **kwargs):
return state.medium.register_interest(*args, **kwargs)
@replay.immutable
def revoke_interest(self, state, *args, **kwargs):
return state.medium.revoke_interest(*args, **kwargs)
@replay.immutable
def get_document(self, state, doc_id):
return fiber.wrap_defer(state.medium.get_document, doc_id)
@replay.immutable
def delete_document(self, state, doc):
return fiber.wrap_defer(state.medium.delete_document, doc)
@replay.immutable
def register_change_listener(self, state, filter_, callback, **kwargs):
return fiber.wrap_defer(state.medium.register_change_listener,
filter_, callback, **kwargs)
@replay.immutable
def cancel_change_listener(self, state, filter_):
state.medium.cancel_change_listener(filter_)
@replay.immutable
def query_view(self, state, factory, **options):
return fiber.wrap_defer(state.medium.query_view, factory, **options)
@replay.immutable
def get_attachment_body(self, state, attachment):
return fiber.wrap_defer(state.medium.get_attachment_body, attachment)
@replay.immutable
def save_document(self, state, doc):
return fiber.wrap_defer(state.medium.save_document, doc)
@replay.immutable
def update_document(self, state, doc_or_id, *args, **kwargs):
db = state.medium.get_database()
return fiber.wrap_defer(db.update_document, doc_or_id, *args, **kwargs)
@update_descriptor
def update_descriptor(self, state, desc, method, *args, **kwargs):
return method(desc, *args, **kwargs)
@replay.journaled
def discover_service(self, state, string_or_factory,
timeout=3, shard='lobby'):
initiator = manager.DiscoverService(string_or_factory, timeout)
recp = recipient.Broadcast(route=shard,
protocol_id=initiator.protocol_id)
f = fiber.succeed(initiator)
f.add_callback(self.initiate_protocol, recp)
# this contract will always finish in expired state as it is blindly
# rejecting all it gets
f.add_callback(manager.ServiceDiscoveryManager.notify_finish)
f.add_errback(self._expire_handler)
return f
@replay.immutable
def get_database(self, state):
return state.medium.get_database()
@replay.immutable
def call_next(self, state, method, *args, **kwargs):
return state.medium.call_next(method, *args, **kwargs)
@replay.immutable
def call_later(self, state, time_left, method, *args, **kwargs):
return state.medium.call_later(time_left, method, *args, **kwargs)
@replay.immutable
def cancel_delayed_call(self, state, call_id):
state.medium.cancel_delayed_call(call_id)
@replay.immutable
def observe(self, state, _method, *args, **kwargs):
return state.medium.observe(_method, *args, **kwargs)
@replay.immutable
def get_tunneling_url(self, state):
return state.medium.get_tunneling_url()
@replay.journaled
def add_tunneling_route(self, state, recp, url):
state.medium.create_external_route('tunnel', recipient=recp, uri=url)
@replay.journaled
def remove_tunneling_route(self, state, recp, url):
state.medium.remove_external_route('tunnel', recipient=recp, uri=url)
### used by model api ###
def get_description(self):
'''
Override this to give an description specific for the instance of the
agent. This will be shown in the the /agents section of the gateway.
'''
### Private Methods ###
def _expire_handler(self, fail):
if fail.check(protocols.ProtocolFailed):
return fail.value.args[0]
else:
fail.raiseException()
@replay.journaled
def _initiate_partners(self, state):
desc = self.get_descriptor()
results = [state.partners.initiate_partner(x) for x in desc.partners]
fibers = [x for x in results if isinstance(x, fiber.Fiber)]
f = fiber.FiberList(fibers)
f.add_callback(fiber.drop_param,
state.medium.register_interest,
replier.PartnershipProtocol)
f.add_callback(fiber.drop_param,
state.medium.register_interest,
replier.ProposalReceiver)
return f.succeed()
class StandalonePartners(Partners):
default_role = u'standalone'
class Standalone(BaseAgent):
partners_class = StandalonePartners
standalone = True
@staticmethod
def get_cmd_line(desc):
python_path = ":".join(sys.path)
path = os.environ.get("PATH", "")
command = os.path.join(configure.bindir, 'feat')
args = ['-X', '--agent-id', str(desc.doc_id)]
agent = applications.lookup_agent(desc.type_name)
if agent and agent.application.name != 'feat':
app = agent.application
args += ['--application', '.'.join([app.module, app.name])]
env = dict(PYTHONPATH=python_path,
FEAT_DEBUG=log.FluLogKeeper.get_debug(), PATH=path)
return command, args, env
|
from objects import NamedEntity
class User(NamedEntity):
yaml_tag = "!User"
STATUS = [
'new',
'alive',
'dead',
]
def __init__(
self,
name = None,
id = None,
password = None,
token = None,
status = 'new',
location_id = None,
credits = 0,
is_business = False,
):
self.name = name
self.id = id
self.password = password
self.token = token
self.status = status
self.location_id = location_id
self.credits = 0
self.is_business = is_business
# Parent init should be called at end of __init__
super(User,self).__init__(name, id)
|
import argparse
import gettext
import sys
import os
import wx
import socket
from subprocess import call
import math
import cmath
import time
import traceback
from math import pi, sin, cos, log, sqrt, atan2
import gettext
import __builtin__
__builtin__.__dict__['_'] = wx.GetTranslation
BTN_AMPLIFICADOR_INVERSOR = wx.NewId()
BTN_AMPLIFICADOR_NAO_INVERSOR = wx.NewId()
BTN_AMPLIFICADOR_DE_CORRENTE = wx.NewId()
BTN_AMPLIFICADOR_DE_TRANSCONDUTANCIA = wx.NewId()
BTN_AMPLIFICADOR_INVERSOR_AC = wx.NewId()
BTN_AMPLIFICADOR_SENSIVEL__CARGA = wx.NewId()
BTN_AMPLIFICADOR_SOMADOR = wx.NewId()
class SelAmpOpDesign(wx.Dialog):
def __init__(self):
wx.Dialog.__init__(self, None, -1, _("Operational Amplifier Circuits Selection"),size=(300, 150)) #inicializacao do dialogo
self.sizer1 = wx.BoxSizer(wx.VERTICAL)
self.sizer10 = wx.BoxSizer(wx.HORIZONTAL);
self.sizer11 = wx.StaticBoxSizer( wx.StaticBox( self, -1, "AMPLIFIERS" ), wx.VERTICAL );
self.sizer111 = wx.BoxSizer(wx.HORIZONTAL);
self.sizer112 = wx.BoxSizer(wx.HORIZONTAL);
self.sizer113 = wx.BoxSizer(wx.HORIZONTAL);
self.sizer11.Add(self.sizer111,flag= wx.ALL | wx.ALIGN_CENTER_HORIZONTAL, border = 10)
self.sizer11.Add(self.sizer112,flag= wx.ALL | wx.ALIGN_CENTER_HORIZONTAL, border = 10)
self.sizer11.Add(self.sizer113,flag= wx.ALL | wx.ALIGN_CENTER_HORIZONTAL, border = 10)
#self.sizer12 = wx.BoxSizer(wx.HORIZONTAL);
self.sizer13 = wx.BoxSizer(wx.HORIZONTAL);
self.staticTxtTitulo= wx.StaticText(self, -1, _("Select the circuit type"))
self.staticTxtTitulo.SetFont(wx.Font(18, wx.SWISS, wx.NORMAL, wx.BOLD))
self.sizer10.Add(self.staticTxtTitulo) #insere os conteudos dos sizer horizontais
self.rb1 = wx.RadioButton(self, BTN_AMPLIFICADOR_INVERSOR , _("Inverter amplifier") ,style=wx.RB_GROUP)
self.rb2 = wx.RadioButton(self, BTN_AMPLIFICADOR_NAO_INVERSOR , _("Non inverter amplifier"))
self.rb3 = wx.RadioButton(self, BTN_AMPLIFICADOR_DE_CORRENTE , _("Current amplifier"))
self.rb4 = wx.RadioButton(self, BTN_AMPLIFICADOR_DE_TRANSCONDUTANCIA , _("Transcondutance amplifier"))
self.rb5 = wx.RadioButton(self, BTN_AMPLIFICADOR_INVERSOR_AC , _("AC inverter amplifier"))
self.rb6 = wx.RadioButton(self, BTN_AMPLIFICADOR_DE_CORRENTE , _("Current amplifier"))
self.rb7 = wx.RadioButton(self, BTN_AMPLIFICADOR_SENSIVEL__CARGA , _("Charge amplifier"))
self.rb8 = wx.RadioButton(self, BTN_AMPLIFICADOR_SOMADOR , _("Summing amplifier"))
self.sizer111.Add(self.rb1,flag= wx.EXPAND | wx.ALL, border = 5) #insere os conteudos dos sizer horizontais
self.sizer111.Add(self.rb2,flag= wx.EXPAND | wx.ALL, border = 5)
self.sizer111.Add(self.rb3,flag= wx.EXPAND | wx.ALL, border = 5)
self.sizer112.Add(self.rb4,flag= wx.EXPAND | wx.ALL, border = 5) #insere os conteudos dos sizer horizontais
self.sizer112.Add(self.rb5,flag= wx.EXPAND | wx.ALL, border = 5)
self.sizer112.Add(self.rb6,flag= wx.EXPAND | wx.ALL, border = 5)
self.sizer113.Add(self.rb7,flag= wx.EXPAND | wx.ALL, border = 5) #insere os conteudos dos sizer horizontais
self.sizer113.Add(self.rb8,flag= wx.EXPAND | wx.ALL, border = 5)
self.okButton= wx.Button(self, wx.ID_OK, "OK") #objeto botao OK
self.cancelButton = wx.Button(self, wx.ID_CANCEL, "Cancel") #objeto botao cancela
self.sizer13.Add(self.okButton,flag= wx.EXPAND | wx.ALL, border = 5)
self.sizer13.Add(self.cancelButton,flag= wx.EXPAND | wx.ALL, border = 5)
self.sizer1.Add(self.sizer10,flag= wx.ALL | wx.ALIGN_CENTER_HORIZONTAL, border = 10) #inserimos os sizers horizontais no vertical
self.sizer1.Add(self.sizer11,flag= wx.ALL | wx.ALIGN_CENTER_HORIZONTAL, border = 10)
#self.sizer1.Add(self.sizer12,flag= wx.ALL | wx.ALIGN_CENTER_HORIZONTAL, border = 10)
self.sizer1.Add(self.sizer13,flag= wx.ALL |wx.ALIGN_CENTER_HORIZONTAL ,border = 10)
self.SetSizer(self.sizer1) #insere o sizer de maior nivel na área ocupada pelo dialogo
self.Fit()
def GetSelection(self):
if self.rb1.GetValue() == True:
return BTN_AMPLIFICADOR_INVERSOR
if self.rb2.GetValue() == True:
return BTN_AMPLIFICADOR_NAO_INVERSOR
if self.rb3.GetValue() == True:
return BTN_AMPLIFICADOR_DE_CORRENTE
if self.rb4.GetValue() == True:
return BTN_AMPLIFICADOR_DE_TRANSCONDUTANCIA
if self.rb5.GetValue() == True:
return BTN_AMPLIFICADOR_INVERSOR_AC
if self.rb6.GetValue() == True:
return BTN_AMPLIFICADOR_DE_CORRENTE
if self.rb7.GetValue() == True:
return BTN_AMPLIFICADOR_SENSIVEL__CARGA
if self.rb8.GetValue() == True:
return BTN_AMPLIFICADOR_SOMADOR
|
from copy import copy
from poemtube.errors import InvalidRequest
def getpoem( db, id ):
if id not in db.poems:
raise InvalidRequest(
'"%s" is not the ID of an existing poem.' % id, 404 )
ans = copy( db.poems[id] )
ans["id"] = id
del ans["_id"]
del ans["_rev"]
return ans
|
"""
Holding Pen is a web interface overlay for all BibWorkflowObject's.
This area is targeted to catalogers and administrators for inspecting
and reacting to workflows executions. More importantly, allowing users to deal
with halted workflows.
For example, accepting submissions or other tasks.
"""
import os
import json
from six import text_type
from flask import (render_template, Blueprint, request, jsonify,
url_for, flash, session, send_from_directory)
from flask.ext.login import login_required
from flask.ext.breadcrumbs import default_breadcrumb_root, register_breadcrumb
from flask.ext.menu import register_menu
from invenio.base.decorators import templated, wash_arguments
from invenio.base.i18n import _
from invenio.ext.principal import permission_required
from invenio.utils.date import pretty_date
from ..models import BibWorkflowObject, Workflow, ObjectVersion
from ..registry import actions
from ..utils import (sort_bwolist, extract_data, get_action_list,
get_formatted_holdingpen_object,
get_holdingpen_objects,
get_rendered_task_results,
get_previous_next_objects)
from ..api import continue_oid_delayed, start_delayed
from ..acl import viewholdingpen
blueprint = Blueprint('holdingpen', __name__, url_prefix="/admin/holdingpen",
template_folder='../templates',
static_folder='../static')
default_breadcrumb_root(blueprint, '.holdingpen')
HOLDINGPEN_WORKFLOW_STATES = {
ObjectVersion.HALTED: {'message': _('Need Action'), 'class': 'danger'},
ObjectVersion.WAITING: {'message': _('Waiting'), 'class': 'warning'},
ObjectVersion.ERROR: {'message': _('Error'), 'class': 'danger'},
ObjectVersion.COMPLETED: {'message': _('Done'), 'class': 'success'},
ObjectVersion.INITIAL: {'message': _('New'), 'class': 'info'},
ObjectVersion.RUNNING: {'message': _('In process'), 'class': 'warning'}
}
@blueprint.route('/', methods=['GET', 'POST'])
@blueprint.route('/index', methods=['GET', 'POST'])
@login_required
@register_menu(blueprint, 'personalize.holdingpen', _('Your Pending Actions'))
@register_breadcrumb(blueprint, '.', _('Holdingpen'))
@templated('workflows/index.html')
def index():
"""
Display main interface of Holdingpen.
Acts as a hub for catalogers (may be removed)
"""
# FIXME: Add user filtering
bwolist = get_holdingpen_objects()
action_list = get_action_list(bwolist)
return dict(tasks=action_list)
@blueprint.route('/maintable', methods=['GET', 'POST'])
@register_breadcrumb(blueprint, '.records', _('Records'))
@login_required
@permission_required(viewholdingpen.name)
@templated('workflows/maintable.html')
def maintable():
"""Display main table interface of Holdingpen."""
bwolist = get_holdingpen_objects()
action_list = get_action_list(bwolist)
tags = session.get("holdingpen_tags", list())
if 'version' in request.args:
for key, value in ObjectVersion.MAPPING.items():
if value == int(request.args.get('version')):
if key not in tags:
tags.append(key)
tags_to_print = []
for tag in tags:
if tag:
tags_to_print.append({
"text": str(_(tag)),
"value": tag,
})
return dict(bwolist=bwolist,
action_list=action_list,
tags=json.dumps(tags_to_print))
@blueprint.route('/details/<int:objectid>', methods=['GET', 'POST'])
@register_breadcrumb(blueprint, '.details', _("Record Details"))
@login_required
@permission_required(viewholdingpen.name)
def details(objectid):
"""Display info about the object."""
from ..utils import get_workflow_info
from invenio.ext.sqlalchemy import db
from itertools import groupby
of = "hd"
bwobject = BibWorkflowObject.query.get(objectid)
previous_object, next_object = get_previous_next_objects(
session.get("holdingpen_current_ids"),
objectid
)
formatted_data = bwobject.get_formatted_data(of)
extracted_data = extract_data(bwobject)
action_name = bwobject.get_action()
if action_name:
action = actions[action_name]
rendered_actions = action().render(bwobject)
else:
rendered_actions = {}
if bwobject.id_parent:
history_objects_db_request = BibWorkflowObject.query.filter(
db.or_(BibWorkflowObject.id_parent == bwobject.id_parent,
BibWorkflowObject.id == bwobject.id_parent,
BibWorkflowObject.id == bwobject.id)).all()
else:
history_objects_db_request = BibWorkflowObject.query.filter(
db.or_(BibWorkflowObject.id_parent == bwobject.id,
BibWorkflowObject.id == bwobject.id)).all()
history_objects = {}
temp = groupby(history_objects_db_request,
lambda x: x.version)
for key, value in temp:
if key != ObjectVersion.RUNNING:
value = list(value)
value.sort(key=lambda x: x.modified, reverse=True)
history_objects[key] = value
history_objects = sum(history_objects.values(), [])
for obj in history_objects:
obj._class = HOLDINGPEN_WORKFLOW_STATES[obj.version]["class"]
obj.message = HOLDINGPEN_WORKFLOW_STATES[obj.version]["message"]
results = get_rendered_task_results(bwobject)
workflow_definition = get_workflow_info(extracted_data['workflow_func'])
task_history = bwobject.get_extra_data().get('_task_history', [])
return render_template('workflows/details.html',
bwobject=bwobject,
rendered_actions=rendered_actions,
history_objects=history_objects,
bwparent=extracted_data['bwparent'],
info=extracted_data['info'],
log=extracted_data['logtext'],
data_preview=formatted_data,
workflow=extracted_data['w_metadata'],
task_results=results,
previous_object=previous_object,
next_object=next_object,
task_history=task_history,
workflow_definition=workflow_definition,
versions=ObjectVersion,
pretty_date=pretty_date,
)
@blueprint.route('/files/<int:object_id>/<path:filename>',
methods=['POST', 'GET'])
@login_required
@permission_required(viewholdingpen.name)
def get_file_from_task_result(object_id=None, filename=None):
"""Send the requested file to user from a workflow task result.
Expects a certain file meta-data structure in task result:
.. code-block:: python
{
"type": "Fulltext",
"filename": "file.pdf",
"full_path": "/path/to/file",
}
"""
bwobject = BibWorkflowObject.query.get(object_id)
task_results = bwobject.get_tasks_results()
if filename in task_results and task_results[filename]:
fileinfo = task_results[filename][0].get("result", dict())
directory, actual_filename = os.path.split(fileinfo.get("full_path", ""))
return send_from_directory(directory, actual_filename)
@blueprint.route('/restart_record', methods=['GET', 'POST'])
@login_required
@permission_required(viewholdingpen.name)
@wash_arguments({'objectid': (int, 0)})
def restart_record(objectid, start_point='continue_next'):
"""Restart the initial object in its workflow."""
bwobject = BibWorkflowObject.query.get(objectid)
workflow = Workflow.query.filter(
Workflow.uuid == bwobject.id_workflow).first()
start_delayed(workflow.name, [bwobject.get_data()])
return 'Record Restarted'
@blueprint.route('/continue_record', methods=['GET', 'POST'])
@login_required
@permission_required(viewholdingpen.name)
@wash_arguments({'objectid': (int, 0)})
def continue_record(objectid):
"""Continue workflow for current object."""
continue_oid_delayed(oid=objectid, start_point='continue_next')
return 'Record continued workflow'
@blueprint.route('/restart_record_prev', methods=['GET', 'POST'])
@login_required
@permission_required(viewholdingpen.name)
@wash_arguments({'objectid': (int, 0)})
def restart_record_prev(objectid):
"""Restart the last task for current object."""
continue_oid_delayed(oid=objectid, start_point="restart_task")
return 'Record restarted current task'
@blueprint.route('/delete', methods=['GET', 'POST'])
@login_required
@permission_required(viewholdingpen.name)
@wash_arguments({'objectid': (int, 0)})
def delete_from_db(objectid):
"""Delete the object from the db."""
BibWorkflowObject.delete(objectid)
return 'Record Deleted'
@blueprint.route('/delete_multi', methods=['GET', 'POST'])
@login_required
@permission_required(viewholdingpen.name)
@wash_arguments({'bwolist': (text_type, "")})
def delete_multi(bwolist):
"""Delete list of objects from the db."""
from ..utils import parse_bwids
bwolist = parse_bwids(bwolist)
for objectid in bwolist:
delete_from_db(objectid)
return 'Records Deleted'
@blueprint.route('/resolve', methods=['GET', 'POST'])
@login_required
@permission_required(viewholdingpen.name)
@wash_arguments({'objectid': (text_type, '-1')})
def resolve_action(objectid):
"""Resolve the action taken.
Will call the resolve() function of the specific action.
"""
bwobject = BibWorkflowObject.query.get(int(objectid))
action_name = bwobject.get_action()
action_form = actions[action_name]
res = action_form().resolve(bwobject)
return jsonify(res)
@blueprint.route('/entry_data_preview', methods=['GET', 'POST'])
@login_required
@permission_required(viewholdingpen.name)
@wash_arguments({'objectid': (text_type, '0'),
'of': (text_type, None)})
def entry_data_preview(objectid, of):
"""Present the data in a human readble form or in xml code."""
bwobject = BibWorkflowObject.query.get(int(objectid))
if not bwobject:
flash("No object found for %s" % (objectid,))
return jsonify(data={})
formatted_data = bwobject.get_formatted_data(of)
return jsonify(data=formatted_data)
@blueprint.route('/get_context', methods=['GET', 'POST'])
@login_required
@permission_required(viewholdingpen.name)
def get_context():
"""Return the a JSON structure with URL maps and actions."""
context = {}
context['url_prefix'] = blueprint.url_prefix
context['holdingpen'] = {
"url_load": url_for('holdingpen.load_table'),
"url_preview": url_for('holdingpen.entry_data_preview'),
"url_restart_record": url_for('holdingpen.restart_record'),
"url_restart_record_prev": url_for('holdingpen.restart_record_prev'),
"url_continue_record": url_for('holdingpen.continue_record'),
}
return jsonify(context)
@blueprint.route('/load_table', methods=['GET', 'POST'])
@login_required
@permission_required(viewholdingpen.name)
@templated('workflows/maintable.html')
def load_table():
"""Get JSON data for the Holdingpen table.
Function used for the passing of JSON data to DataTables:
1. First checks for what record version to show
2. Then the sorting direction.
3. Then if the user searched for something.
:return: JSON formatted str from dict of DataTables args.
"""
tags = session.setdefault("holdingpen_tags", list())
if request.method == "POST":
if request.json and "tags" in request.json:
tags = request.json["tags"]
session["holdingpen_tags"] = tags
# This POST came from tags-input.
# We return here as DataTables will call a GET here after.
return None
i_sortcol_0 = request.args.get('iSortCol_0',
session.get('iSortCol_0', 0))
s_sortdir_0 = request.args.get('sSortDir_0',
session.get('sSortDir_0', None))
session["holdingpen_iDisplayStart"] = int(request.args.get(
'iDisplayStart', session.get('iDisplayLength', 10))
)
session["holdingpen_iDisplayLength"] = int(
request.args.get('iDisplayLength', session.get('iDisplayLength', 0))
)
session["holdingpen_sEcho"] = int(
request.args.get('sEcho', session.get('sEcho', 0))
) + 1
bwobject_list = get_holdingpen_objects(tags)
if (i_sortcol_0 and s_sortdir_0)\
or ("holdingpen_iSortCol_0" in session
and "holdingpen_sSortDir_0" in session):
if i_sortcol_0:
i_sortcol = int(str(i_sortcol_0))
else:
i_sortcol = session["holdingpen_iSortCol_0"]
if not ('holdingpen_iSortCol_0' in session
and "holdingpen_sSortDir_0" in session):
bwobject_list = sort_bwolist(bwobject_list, i_sortcol, s_sortdir_0)
elif i_sortcol != session['holdingpen_iSortCol_0']\
or s_sortdir_0 != session['holdingpen_sSortDir_0']:
bwobject_list = sort_bwolist(bwobject_list, i_sortcol, s_sortdir_0)
else:
bwobject_list = sort_bwolist(bwobject_list,
session["holdingpen_iSortCol_0"],
session["holdingpen_sSortDir_0"])
session["holdingpen_iSortCol_0"] = i_sortcol_0
session["holdingpen_sSortDir_0"] = s_sortdir_0
table_data = {'aaData': [],
'iTotalRecords': len(bwobject_list),
'iTotalDisplayRecords': len(bwobject_list),
'sEcho': session["holdingpen_sEcho"]}
# Add current ids in table for use by previous/next
record_ids = [o.id for o in bwobject_list]
session['holdingpen_current_ids'] = record_ids
records_showing = 0
display_start = session["holdingpen_iDisplayStart"]
display_end = display_start + session["holdingpen_iDisplayLength"]
for bwo in bwobject_list[display_start:display_end]:
records_showing += 1
action_name = bwo.get_action()
action_message = bwo.get_action_message()
if not action_message:
action_message = ""
preformatted = get_formatted_holdingpen_object(bwo)
action = actions.get(action_name, None)
mini_action = None
if action:
mini_action = getattr(action, "render_mini", None)
extra_data = bwo.get_extra_data()
record = bwo.get_data()
if not hasattr(record, "get"):
try:
record = dict(record)
except (ValueError, TypeError):
record = {}
bwo._class = HOLDINGPEN_WORKFLOW_STATES[bwo.version]["class"]
bwo.message = HOLDINGPEN_WORKFLOW_STATES[bwo.version]["message"]
row = render_template('workflows/row_formatter.html',
title=preformatted["title"],
object=bwo,
record=record,
extra_data=extra_data,
description=preformatted["description"],
action=action,
mini_action=mini_action,
action_message=action_message,
pretty_date=pretty_date,
version=ObjectVersion,
)
row = row.split("<!--sep-->")
table_data['aaData'].append(row)
return jsonify(table_data)
|
"""
Created 9/2/15 by Greg Griffes
"""
import time
from zilog_ZDMII_constants import *
from zilog_ZDMII import zilog_ZDMII
import Tkinter as tk
class App:
def __init__(self, master):
frame = tk.Frame(master)
frame.pack()
zdmii = zilog_ZDMII("/dev/ttyAMA0")
sw_text = tk.StringVar()
app, zdm = zdmii.get_sw_revision()
sw_text.set('SW Rev - App:'+"%d"%app+' ZDM:'+"%d"%zdm)
entry_sw = tk.Entry(master, text="Software Revision", justify=tk.CENTER, textvariable=sw_text)
entry_sw.pack()
entry_sw.focus_set
ll_text = tk.StringVar()
current_light_level = zdmii.read_integer(ZDMII_READ_LIGHT_LEVEL)
ll_text.set('Current light level = '+"%d"%current_light_level)
entry_sw = tk.Entry(master, text="Software Revision", justify=tk.CENTER, textvariable=ll_text)
entry_sw.pack()
entry_sw.focus_set
self.button = tk.Button(
frame, text="QUIT", fg="red", command=frame.quit
)
self.button.pack(side=tk.LEFT)
self.hi_there = tk.Button(frame, text="Hello", command=self.say_hi)
self.hi_there.pack(side=tk.LEFT)
def say_hi(self):
print "hi there, everyone!"
root = tk.Tk()
myapp = App(root)
root.mainloop()
root.destroy() # optional; see description below
|
from Plugins.Plugin import PluginDescriptor
from Screens.Screen import Screen
from Components.ActionMap import ActionMap
from Components.Label import Label
from Components.Pixmap import Pixmap
from Components.Sources.List import List
from Components.Sources.StaticText import StaticText
from Components.Ipkg import IpkgComponent
from Components.MultiContent import MultiContentEntryText, MultiContentEntryPixmapAlphaTest
from Tools.LoadPixmap import LoadPixmap
from enigma import ePixmap
from Tools.Directories import resolveFilename, SCOPE_CURRENT_PLUGIN, SCOPE_CURRENT_SKIN, SCOPE_METADIR
import os
class SoftwarePanel(Screen):
def __init__(self, session, *args):
Screen.__init__(self, session)
Screen.setTitle(self, _("Software Panel"))
skin = """
<screen name="SoftwarePanel" position="center,center" size="650,605" title="Software Panel">
<widget name="a_off" pixmap="/usr/lib/enigma2/python/Plugins/Extensions/Infopanel/pics/aoff.png" position="10,10" zPosition="1" size="36,97" alphatest="on" />
<widget name="a_red" pixmap="/usr/lib/enigma2/python/Plugins/Extensions/Infopanel/pics/ared.png" position="10,10" zPosition="1" size="36,97" alphatest="on" />
<widget name="a_yellow" pixmap="/usr/lib/enigma2/python/Plugins/Extensions/Infopanel/pics/ayellow.png" position="10,10" zPosition="1" size="36,97" alphatest="on" />
<widget name="a_green" pixmap="/usr/lib/enigma2/python/Plugins/Extensions/Infopanel/pics/agreen.png" position="10,10" zPosition="1" size="36,97" alphatest="on" />
<widget name="feedstatusRED" position="60,14" size="200,30" zPosition="1" font="Regular;25" halign="left" transparent="1" />
<widget name="feedstatusYELLOW" position="60,46" size="200,30" zPosition="1" font="Regular;25" halign="left" transparent="1" />
<widget name="feedstatusGREEN" position="60,78" size="200,30" zPosition="1" font="Regular;25" halign="left" transparent="1" />
<widget name="packagetext" position="180,50" size="350,30" zPosition="1" font="Regular;25" halign="right" transparent="1" />
<widget name="packagenr" position="511,50" size="50,30" zPosition="1" font="Regular;25" halign="right" transparent="1" />
<widget source="list" render="Listbox" position="10,120" size="630,365" scrollbarMode="showOnDemand">
<convert type="TemplatedMultiContent">
{"template": [
MultiContentEntryText(pos = (5, 1), size = (540, 28), font=0, flags = RT_HALIGN_LEFT, text = 0), # index 0 is the name
MultiContentEntryText(pos = (5, 26), size = (540, 20), font=1, flags = RT_HALIGN_LEFT, text = 2), # index 2 is the description
MultiContentEntryPixmapAlphaTest(pos = (545, 2), size = (48, 48), png = 4), # index 4 is the status pixmap
MultiContentEntryPixmapAlphaTest(pos = (5, 50), size = (610, 2), png = 5), # index 4 is the div pixmap
],
"fonts": [gFont("Regular", 22),gFont("Regular", 14)],
"itemHeight": 52
}
</convert>
</widget>
<ePixmap pixmap="skin_default/buttons/red.png" position=" 30,570" size="35,27" alphatest="blend" />
<widget name="key_green_pic" pixmap="skin_default/buttons/green.png" position="290,570" size="35,27" alphatest="blend" />
<widget name="key_red" position=" 80,573" size="200,26" zPosition="1" font="Regular;22" halign="left" transparent="1" />
<widget name="key_green" position="340,573" size="200,26" zPosition="1" font="Regular;22" halign="left" transparent="1" />
</screen> """
self.skin = skin
self.list = []
self.statuslist = []
self["list"] = List(self.list)
self['a_off'] = Pixmap()
self['a_red'] = Pixmap()
self['a_yellow'] = Pixmap()
self['a_green'] = Pixmap()
self['key_green_pic'] = Pixmap()
self['key_red_pic'] = Pixmap()
self['key_red'] = Label(_("Cancel"))
self['key_green'] = Label(_("Update"))
self['packagetext'] = Label(_("Updates Available:"))
self['packagenr'] = Label("0")
self['feedstatusRED'] = Label("< " + _("feed status"))
self['feedstatusYELLOW'] = Label("< " + _("feed status"))
self['feedstatusGREEN'] = Label("< " + _("feed status"))
self['key_green'].hide()
self['key_green_pic'].hide()
self.update = False
self.packages = 0
self.ipkg = IpkgComponent()
self.ipkg.addCallback(self.ipkgCallback)
self["actions"] = ActionMap(["OkCancelActions", "DirectionActions", "ColorActions", "SetupActions"],
{
"cancel": self.Exit,
"green": self.Green,
"red": self.Exit,
}, -2)
self.onLayoutFinish.append(self.layoutFinished)
def Exit(self):
self.ipkg.stop()
self.close()
def Green(self):
if self.packages > 0:
from Plugins.SystemPlugins.SoftwareManager.plugin import UpdatePlugin
self.session.open(UpdatePlugin)
self.close()
def layoutFinished(self):
self.checkTraficLight()
self.rebuildList()
def UpdatePackageNr(self):
self.packages = len(self.list)
print self.packages
print"packagenr" + str(self.packages)
self["packagenr"].setText(str(self.packages))
if self.packages == 0:
self['key_green'].hide()
self['key_green_pic'].hide()
else:
self['key_green'].show()
self['key_green_pic'].show()
def checkTraficLight(self):
print"checkTraficLight"
from urllib import urlopen
import socket
self['a_red'].hide()
self['a_yellow'].hide()
self['a_green'].hide()
self['feedstatusRED'].hide()
self['feedstatusYELLOW'].hide()
self['feedstatusGREEN'].hide()
currentTimeoutDefault = socket.getdefaulttimeout()
socket.setdefaulttimeout(3)
try:
urlopenATV = "http://ampel.mynonpublic.com/Ampel/index.php"
d = urlopen(urlopenATV)
tmpStatus = d.read()
if 'rot.png' in tmpStatus:
self['a_off'].hide()
self['a_red'].show()
self['feedstatusRED'].show()
elif 'gelb.png' in tmpStatus:
self['a_off'].hide()
self['a_yellow'].show()
self['feedstatusYELLOW'].show()
elif 'gruen.png' in tmpStatus:
self['a_off'].hide()
self['a_green'].show()
self['feedstatusGREEN'].show()
except:
self['a_off'].show()
socket.setdefaulttimeout(currentTimeoutDefault)
def setStatus(self,status = None):
if status:
self.statuslist = []
divpng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/div-h.png"))
if status == 'update':
statuspng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/upgrade.png"))
self.statuslist.append(( _("Package list update"), '', _("Trying to download a new updatelist. Please wait..." ),'',statuspng, divpng ))
elif status == 'error':
statuspng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/remove.png"))
self.statuslist.append(( _("Error"), '', _("There was an error downloading the updatelist. Please try again." ),'',statuspng, divpng ))
elif status == 'noupdate':
statuspng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/installed.png"))
self.statuslist.append(( _("Nothing to upgrade"), '', _("There are no updates available." ),'',statuspng, divpng ))
self['list'].setList(self.statuslist)
def rebuildList(self):
self.setStatus('update')
self.ipkg.startCmd(IpkgComponent.CMD_UPDATE)
def ipkgCallback(self, event, param):
if event == IpkgComponent.EVENT_ERROR:
self.setStatus('error')
elif event == IpkgComponent.EVENT_DONE:
if self.update == False:
self.update = True
self.ipkg.startCmd(IpkgComponent.CMD_UPGRADE_LIST)
else:
self.buildPacketList()
pass
def buildEntryComponent(self, name, version, description, state):
divpng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/div-h.png"))
if not description:
description = "No description available."
if state == 'installed':
installedpng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/installed.png"))
return((name, version, _(description), state, installedpng, divpng))
elif state == 'upgradeable':
upgradeablepng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/upgradeable.png"))
return((name, version, _(description), state, upgradeablepng, divpng))
else:
installablepng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_PLUGIN, "SystemPlugins/SoftwareManager/installable.png"))
return((name, version, _(description), state, installablepng, divpng))
def buildPacketList(self):
self.list = []
fetchedList = self.ipkg.getFetchedList()
excludeList = self.ipkg.getExcludeList()
if len(fetchedList) > 0:
for x in fetchedList:
try:
self.list.append(self.buildEntryComponent(x[0], x[1], x[2], "upgradeable"))
except:
print "[SOFTWAREPANEL] " + x[0] + " no valid architecture, ignoring !!"
self['list'].setList(self.list)
elif len(fetchedList) == 0:
self.setStatus('noupdate')
else:
self.setStatus('error')
self.UpdatePackageNr()
|
class DummyCache(dict):
"""
A dummy cache class to store deserialized item of documents to privent
serializers to do not efficient jobs like database queries.
"""
pass
dummy_cache = DummyCache()
|
from django import forms
from braces.forms import UserKwargModelFormMixin
from defusedxml import DefusedXmlException
from defusedxml.cElementTree import fromstring
from feeds.models import Feed, URL_MAX_LEN
from subscriptions.models import Subscription, Category
MAX_OPML_FILE_SIZE = 1024 * 1024 * 2
OPML_CONTENT_TYPES = ('application/xml', 'text/xml', 'text/x-opml', 'text/x-opml+xml', 'application/xml+opml')
OPML_FILE_EXTS = ('xml', 'opml')
class NewSubForm(UserKwargModelFormMixin, forms.Form):
feed_url = forms.URLField(max_length=URL_MAX_LEN, label=('Feed URL'))
existing_category = forms.ModelChoiceField(required=False, queryset=Category.objects.none())
new_category = forms.CharField(required=False)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['existing_category'].queryset = Category.objects.filter(owner=self.user).exclude(name='Uncategorized').order_by('name')
def clean_new_category(self):
category = self.cleaned_data['new_category']
if category and Category.objects.filter(owner=self.user, name=category).exists():
raise forms.ValidationError("You already have a category named %(name)s", code="category_exists", params={'name': category})
return category
def clean(self):
if 'feed_url' not in self.cleaned_data:
return self.cleaned_data
cleaned_data = super().clean()
url = cleaned_data['feed_url']
if Subscription.objects.filter(owner=self.user, feed__href=url).exists():
raise forms.ValidationError("You are already subscribed to %(url)s", code="already_subscribed", params={'url': url})
feed = Feed.objects.get_feed(url)
if feed.href != url:
if Subscription.objects.filter(owner=self.user, feed__href=feed.href).exists():
raise forms.ValidationError("You are already subscribed to %(url)s", code="already_subscribed", params={'url': feed.href})
cleaned_data['feed'] = feed
existing_category = cleaned_data['existing_category']
new_category = cleaned_data['new_category']
if existing_category and new_category:
raise forms.ValidationError("You can't add a new category and pick an existing one.", code="new_and_existing_categories")
return cleaned_data
class UpdateSubscriptionForm(UserKwargModelFormMixin, forms.ModelForm):
new_category = forms.CharField(required=False)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['category'].queryset = Category.objects.filter(owner=self.user).order_by('name')
self.fields['category'].empty_label = None
def clean_new_category(self):
category = self.cleaned_data['new_category']
if category and Category.objects.filter(owner=self.user, name=category).exists():
raise forms.ValidationError("You already have a category named %(name)s", code="category_exists", params={'name': category})
return category
class Meta:
model = Subscription
fields = ('title', 'category')
widgets = {'title': forms.TextInput}
class ImportOPMLForm(UserKwargModelFormMixin, forms.Form):
opml_file = forms.FileField(label='OPML File')
mark_read = forms.BooleanField(required=False, label='Mark posts as read')
def clean(self):
if 'opml_file' not in self.cleaned_data:
return self.cleaned_data
cleaned_data = super().clean()
opml_file = cleaned_data['opml_file']
if opml_file.size > MAX_OPML_FILE_SIZE:
raise forms.ValidationError('Your OPML file was too large: %(size)s', code="too_large", params={'size': opml_file.size})
if opml_file.content_type not in OPML_CONTENT_TYPES:
raise forms.ValidationError('The file was not a valid OPML file: %(content_type)s',
code="invalid", params={'content_type': opml_file.content_type})
opml_contents = opml_file.read()
try:
opml = fromstring(opml_contents, forbid_dtd=True)
feeds = opml.findall('./body//outline[@xmlUrl]')
if not feeds:
raise forms.ValidationError('No feeds were found in the OPML file', code="no_feeds")
except DefusedXmlException:
raise forms.ValidationError('The file was not a valid OPML file', code="invalid")
# Use SyntaxError, as ParseError doesn't work here
except SyntaxError:
raise forms.ValidationError('The file was not a valid OPML file', code="invalid")
cleaned_data['feeds'] = feeds
return cleaned_data
|
"""
main function.
"""
import sys
import socket
import time
from .common import options, excepthook, dprint, Stats, ErrorMessage, UsageError, ITIMEOUT, RETRIES
from .options import parse_args
from .util import random_init, get_socketparams, is_multicast
from .dnsparam import qc, qt
from .dnsmsg import DNSquery, DNSresponse
from .query import send_request_udp, send_request_tcp, send_request_tls, do_axfr
from .https import send_request_https
from .walk import zonewalk
def main(args):
""" main function"""
sys.excepthook = excepthook
random_init()
qname, qtype, qclass = parse_args(args[1:])
try:
qtype_val = qt.get_val(qtype)
except KeyError:
raise UsageError("ERROR: invalid query type: {}\n".format(qtype))
try:
qclass_val = qc.get_val(qclass)
except KeyError:
raise UsageError("ERROR: invalid query class: {}\n".format(qclass))
query = DNSquery(qname, qtype_val, qclass_val)
try:
server_addr, port, family, _ = \
get_socketparams(options["server"], options["port"],
options["af"], socket.SOCK_DGRAM)
except socket.gaierror as e:
raise ErrorMessage("bad server: %s (%s)" % (options["server"], e))
if options["do_zonewalk"]:
zonewalk(server_addr, port, family, qname, options)
sys.exit(0)
request = query.get_message()
if (qtype == "AXFR") or (qtype == "IXFR" and options["use_tcp"]):
do_axfr(query, request, server_addr, port, family)
sys.exit(0)
# the rest is for non AXFR queries ..
response = None
if options["https"]:
if not options["have_https"]:
raise ErrorMessage("DNS over HTTPS not supported")
t1 = time.time()
responsepkt = send_request_https(request, options["https_url"])
t2 = time.time()
if responsepkt:
response = DNSresponse(family, query, responsepkt)
print(";; HTTPS response from %s, %d bytes, in %.3f sec" %
(options["https_url"], response.msglen, (t2-t1)))
else:
print(";; HTTPS response failure from %s" % options["https_url"])
return 2
elif options["tls"]:
t1 = time.time()
responsepkt = send_request_tls(request, server_addr,
options["tls_port"], family,
hostname=options["tls_hostname"])
t2 = time.time()
if responsepkt:
response = DNSresponse(family, query, responsepkt)
print(";; TLS response from %s, %d bytes, in %.3f sec" %
((server_addr, options["tls_port"]), response.msglen, (t2-t1)))
else:
print(";; TLS response failure from %s, %d" %
(server_addr, options["tls_port"]))
if not options["tls_fallback"]:
return 2
elif not options["use_tcp"]:
t1 = time.time()
(responsepkt, responder_addr) = \
send_request_udp(request, server_addr, port, family,
ITIMEOUT, RETRIES)
t2 = time.time()
if not responsepkt:
raise ErrorMessage("No response from server")
response = DNSresponse(family, query, responsepkt)
if not response.tc:
print(";; UDP response from %s, %d bytes, in %.3f sec" %
(responder_addr, response.msglen, (t2-t1)))
if not is_multicast(server_addr) and \
server_addr != "0.0.0.0" and responder_addr[0] != server_addr:
print("WARNING: Response from unexpected address %s" %
responder_addr[0])
if options["use_tcp"] or (response and response.tc) \
or (options["tls"] and options["tls_fallback"] and not response):
if response and response.tc:
if options["ignore"]:
print(";; UDP Response was truncated.")
else:
print(";; UDP Response was truncated. Retrying using TCP ...")
if options["tls"] and options["tls_fallback"] and not response:
print(";; TLS fallback to TCP ...")
if not options["ignore"]:
t1 = time.time()
responsepkt = send_request_tcp(request, server_addr, port, family)
t2 = time.time()
response = DNSresponse(family, query, responsepkt)
print(";; TCP response from %s, %d bytes, in %.3f sec" %
((server_addr, port), response.msglen, (t2-t1)))
response.print_all()
dprint("Compression pointer dereferences=%d" % Stats.compression_cnt)
return response.rcode
|
from django.db import models
from influencetx.core import constants, utils
import logging
log = logging.getLogger(__name__)
class Legislator(models.Model):
# Legislator ID from Open States API.
openstates_leg_id = models.CharField(max_length=48, db_index=True)
tpj_filer_id = models.IntegerField(default=0, blank=True, db_index=True)
tx_lege_id = models.CharField(max_length=48, blank=True, db_index=True)
name = models.CharField(max_length=45)
first_name = models.CharField(max_length=20, blank=True)
last_name = models.CharField(max_length=20, blank=True)
party = models.CharField(max_length=1, choices=constants.PARTY_CHOICES)
chamber = models.CharField(max_length=6, choices=constants.CHAMBER_CHOICES)
district = models.IntegerField()
# updated_at field from Open States API. Used to check whether legislator-detail needs update
openstates_updated_at = models.DateTimeField()
url = models.URLField(blank=True)
photo_url = models.URLField(blank=True)
@property
def initial(self):
"""First initial used for placeholder image."""
return self.name[0]
@property
def party_label(self):
"""User-friendly party label."""
return utils.party_label(self.party)
@property
def chamber_label(self):
"""User-friendly label for chamber of congress."""
return utils.chamber_label(self.chamber)
def __str__(self):
return f'{self.name} {self.tx_lege_id}'
class LegislatorIdMap(models.Model):
# Provide mapping between TPJ FILER_ID and Legislator ID from Open States API.
openstates_leg_id = models.CharField(max_length=48, db_index=True)
tpj_filer_id = models.IntegerField(db_index=True)
def __str__(self):
return f'{self.openstates_leg_id!r} {self.tpj_filer_id}'
|
from lnst.Common.LnstError import LnstError
class MeasurementError(LnstError):
pass
|
"""Blueprints for item."""
from __future__ import absolute_import
from .api_views import api_blueprint
from .rest import InventoryListResource
inventory_list = InventoryListResource.as_view(
'inventory_search'
)
api_blueprint.add_url_rule(
'/inventory',
view_func=inventory_list
)
blueprints = [
api_blueprint,
]
__all__ = 'api_blueprint'
|
from __future__ import print_function
from __future__ import division
import math
import gmpy2
from gmpy2 import mpfr
gmpy2.get_context().precision=1024
def P(n,r): # n=longest run. R = length of data sequence
topa = -mpfr(r+1)
bottoma = (mpfr(2)**mpfr(n+1))-n-2
first = gmpy2.exp(topa/bottoma)
topb = (mpfr(2)**mpfr(n+1))-1
bottomb = (mpfr(2)**mpfr(n+1))-(mpfr(n+2)/mpfr(2))
second = topb/bottomb
answer = first*second
return answer
for n in range(2,30):
mn = mpfr(n)
r = mpfr(256)
answer = P(mn,r)
print("n={n}, r={r}, P={answer}".format(**locals()))
|
from __future__ import print_function
import argparse
import sys
def GetArgs():
parser = argparse.ArgumentParser(description = "Apply an lexicon edits file (output from steps/dict/select_prons_bayesian.py)to an input lexicon"
"to produce a learned lexicon.",
epilog = "See steps/dict/learn_lexicon.sh for example")
parser.add_argument("in_lexicon", metavar='<in-lexicon>', type = str,
help = "Input lexicon. Each line must be <word> <phones>.")
parser.add_argument("lexicon_edits_file", metavar='<lexicon-edits-file>', type = str,
help = "Input lexicon edits file containing human-readable & editable"
"pronounciation info. The info for each word is like:"
"------------ an 4086.0 --------------"
"R | Y | 2401.6 | AH N"
"R | Y | 640.8 | AE N"
"P | Y | 1035.5 | IH N"
"R(ef), P(hone-decoding) represents the pronunciation source"
"Y/N means the recommended decision of including this pron or not"
"and the numbers are soft counts accumulated from lattice-align-word outputs. See steps/dict/select_prons_bayesian.py for more details.")
parser.add_argument("out_lexicon", metavar='<out-lexicon>', type = str,
help = "Output lexicon to this file.")
print (' '.join(sys.argv), file=sys.stderr)
args = parser.parse_args()
args = CheckArgs(args)
return args
def CheckArgs(args):
if args.in_lexicon == "-":
args.in_lexicon = sys.stdin
else:
args.in_lexicon_handle = open(args.in_lexicon)
args.lexicon_edits_file_handle = open(args.lexicon_edits_file)
if args.out_lexicon == "-":
args.out_lexicon_handle = sys.stdout
else:
args.out_lexicon_handle = open(args.out_lexicon, "w")
return args
def ReadLexicon(lexicon_file_handle):
lexicon = set()
if lexicon_file_handle:
for line in lexicon_file_handle.readlines():
splits = line.strip().split()
if len(splits) == 0:
continue
if len(splits) < 2:
raise Exception('Invalid format of line ' + line
+ ' in lexicon file.')
word = splits[0]
phones = ' '.join(splits[1:])
lexicon.add((word, phones))
return lexicon
def ApplyLexiconEdits(lexicon, lexicon_edits_file_handle):
if lexicon_edits_file_handle:
for line in lexicon_edits_file_handle.readlines():
# skip all commented lines
if line.startswith('#'):
continue
# read a word from a line like "---- MICROPHONES 200.0 ----".
if line.startswith('---'):
splits = line.strip().strip('-').strip().split()
if len(splits) != 2:
print(splits, file=sys.stderr)
raise Exception('Invalid format of line ' + line
+ ' in lexicon edits file.')
word = splits[0].strip()
else:
# parse the pron and decision 'Y/N' of accepting the pron or not,
# from a line like: 'P | Y | 42.0 | M AY K R AH F OW N Z'
splits = line.split('|')
if len(splits) != 4:
raise Exception('Invalid format of line ' + line
+ ' in lexicon edits file.')
pron = splits[3].strip()
if splits[1].strip() == 'Y':
lexicon.add((word, pron))
elif splits[1].strip() == 'N':
lexicon.discard((word, pron))
else:
raise Exception('Invalid format of line ' + line
+ ' in lexicon edits file.')
return lexicon
def WriteLexicon(lexicon, out_lexicon_handle):
for word, pron in lexicon:
print('{0} {1}'.format(word, pron), file=out_lexicon_handle)
out_lexicon_handle.close()
def Main():
args = GetArgs()
lexicon = ReadLexicon(args.in_lexicon_handle)
ApplyLexiconEdits(lexicon, args.lexicon_edits_file_handle)
WriteLexicon(lexicon, args.out_lexicon_handle)
if __name__ == "__main__":
Main()
|
import urllib
import urlparse
import subprocess
from django.conf import settings
import facebook
from instagram.client import InstagramAPI
import tweepy
class FacebookError(Exception):
def __init__(self, message, errors):
super(FacebookError, self).__init__(message)
self.errors = 'Could not connect to Facebook'
class InstagramError(Exception):
def __init__(self, message, errors):
super(InstagramError, self).__init__(message)
self.errors = 'Could not connect to Instagram'
class TwitterError(Exception):
def __init__(self, message, errors):
super(InstagramError, self).__init__(message)
self.errors = 'Could not connect to Twitter'
class SocialFollowers():
def __init__(self, **options):
self.INSTAGRAM_SETTINGS = settings.INSTAGRAM_SETTINGS
self.TWITTER_SETTINGS = settings.TWITTER_SETTINGS
self.FACEBOOK_SETTINGS = settings.FACEBOOK_SETTINGS
def get_facebook_followers(self, user):
print 'get_facebook_followers', user
oauth_args = {
'client_id': self.FACEBOOK_SETTINGS['CLIENT_ID'],
'client_secret': self.FACEBOOK_SETTINGS['CLIENT_SECRET'],
'grant_type': self.FACEBOOK_SETTINGS['GRANT_TYPE']
}
oauth_curl_cmd = ['curl',
'https://graph.facebook.com/oauth/access_token?' + urllib.urlencode(oauth_args)]
oauth_response = subprocess.Popen(oauth_curl_cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE).communicate()[0]
try:
oauth_access_token = urlparse.parse_qs(str(oauth_response))['access_token'][0]
except KeyError:
raise FacebookError
exit()
facebook_graph = facebook.GraphAPI(oauth_access_token)
user = facebook_graph.get_object('/%s' % user)
return user['likes']
def get_twitter_followers(self, user):
print 'get_twitter_followers', user
try:
auth = tweepy.OAuthHandler(self.TWITTER_SETTINGS['CONSUMER_KEY'], self.TWITTER_SETTINGS['CONSUMER_SECRET_KEY'])
auth.set_access_token(self.TWITTER_SETTINGS['ACCESS_TOKEN_KEY'], self.TWITTER_SETTINGS['ACCESS_TOKEN_SECRET_KEY'])
api = tweepy.API(auth)
user = api.get_user(user)
return user.followers_count
except Exception as e:
raise TwitterError
def get_instagram_followers(self, user_id):
print 'get_instagram_followers', user_id
try:
api = InstagramAPI(client_id=self.INSTAGRAM_SETTINGS['CLIENT_ID'], client_secret=self.INSTAGRAM_SETTINGS['CLIENT_SECRET'])
user = api.user(user_id)
return user.counts['followed_by']
except Exception as e:
raise InstagramError
|
# -*- coding: utf-8 -*-
from imports import *
import mp_globals
from Screens.Screen import Screen
from Components.ActionMap import NumberActionMap
from Components.Label import Label
from Components.Sources.StaticText import StaticText
from Components.MenuList import MenuList
from Tools.Directories import SCOPE_CURRENT_SKIN, resolveFilename
from Tools.LoadPixmap import LoadPixmap
from enigma import RT_HALIGN_LEFT, RT_VALIGN_CENTER, eListboxPythonMultiContent, gFont
pwidth = 0
class ChoiceListExt(MenuList):
def __init__(self, list, selection = 0, enableWrapAround=True):
MenuList.__init__(self, list, enableWrapAround, eListboxPythonMultiContent)
self.selection = selection
def postWidgetCreate(self, instance):
MenuList.postWidgetCreate(self, instance)
self.moveToIndex(self.selection)
class ChoiceBoxExt(Screen):
IS_DIALOG = True
def ChoiceEntryComponent(self, data):
width = self['list'].instance.size().width()
height = self['list'].l.getItemSize().height()
self.ml.l.setFont(0, gFont(mp_globals.font, height - 4 * mp_globals.sizefactor))
res = [data[1]]
skin_path = mp_globals.pluginPath + mp_globals.skinsPath
key = "key_" + data[0] + ".png"
path = "%s/%s/images/%s" % (skin_path, mp_globals.currentskin, key)
if not fileExists(path):
path = "%s/%s/images/%s" % (skin_path, mp_globals.skinFallback, key)
if not fileExists(path):
path = resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/buttons/" + key)
png = LoadPixmap(path)
if png is not None:
global pwidth
pwidth = png.size().width()
pheight = png.size().height()
vpos = round(float((height-pheight)/2))
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHABLEND, 5, vpos, pwidth, pheight, png))
if data[1][0] == "--":
res.append((eListboxPythonMultiContent.TYPE_TEXT, 0, 0, 1000, height, 0, RT_HALIGN_LEFT | RT_VALIGN_CENTER, "-"*200))
else:
res.append((eListboxPythonMultiContent.TYPE_TEXT, pwidth+15, 0, 1000, height, 0, RT_HALIGN_LEFT | RT_VALIGN_CENTER, data[1][0]))
return res
def __init__(self, session, title = "", list = [], keys = None, selection = 0, titlebartext = None, allow_cancel = True):
self.skin_path = mp_globals.pluginPath + mp_globals.skinsPath
path = "%s/%s/MP_ChoiceBox.xml" % (self.skin_path, mp_globals.currentskin)
if not fileExists(path):
path = self.skin_path + mp_globals.skinFallback + "/MP_ChoiceBox.xml"
with open(path, "r") as f:
self.skin = f.read()
f.close()
Screen.__init__(self, session)
self.allow_cancel = allow_cancel
self["text"] = Label(title)
self["title"] = Label()
self["bgup"] = Label()
self["bgdown"] = Label()
self.list = []
self.summarylist = []
if keys is None:
self.__keys = [ "1", "2", "3", "4", "5", "6", "7", "8", "9", "0", "red", "green", "yellow", "blue", "text" ] + (len(list) - 10) * [""]
else:
self.__keys = keys + (len(list) - len(keys)) * [""]
self.keymap = {}
pos = 0
for x in list:
strpos = str(self.__keys[pos])
self.list.append((strpos, x))
if self.__keys[pos] != "":
self.keymap[self.__keys[pos]] = list[pos]
self.summarylist.append((self.__keys[pos],x[0]))
pos += 1
self.ml = ChoiceListExt([], selection = selection)
self["list"] = self.ml
self["summary_list"] = StaticText()
self.updateSummary()
self["actions"] = NumberActionMap(["WizardActions", "InputActions", "ColorActions", "DirectionActions"],
{
"ok": self.go,
"back": self.cancel,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal,
"red": self.keyRed,
"green": self.keyGreen,
"yellow": self.keyYellow,
"blue": self.keyBlue,
"text": self.keyText,
"up": self.up,
"down": self.down
}, -1)
self.titlebartext = titlebartext
self.onLayoutFinish.append(self.layoutFinished)
def layoutFinished(self):
self.ml.setList(map(self.ChoiceEntryComponent, self.list))
if self.titlebartext:
self.setTitle(self.titlebartext)
self["title"].setText(self.titlebartext)
else:
self["title"].setText(_("Input"))
def keyLeft(self):
pass
def keyRight(self):
pass
def up(self):
if len(self["list"].list) > 0:
while 1:
self["list"].instance.moveSelection(self["list"].instance.moveUp)
self.updateSummary(self["list"].l.getCurrentSelectionIndex())
if self["list"].l.getCurrentSelection()[0][0] != "--" or self["list"].l.getCurrentSelectionIndex() == 0:
break
def down(self):
if len(self["list"].list) > 0:
while 1:
self["list"].instance.moveSelection(self["list"].instance.moveDown)
self.updateSummary(self["list"].l.getCurrentSelectionIndex())
if self["list"].l.getCurrentSelection()[0][0] != "--" or self["list"].l.getCurrentSelectionIndex() == len(self["list"].list) - 1:
break
def keyNumberGlobal(self, number):
self.goKey(str(number))
def go(self):
cursel = self["list"].l.getCurrentSelection()
if cursel:
self.goEntry(cursel[0])
else:
self.cancel()
def goEntry(self, entry):
if len(entry) > 2 and isinstance(entry[1], str) and entry[1] == "CALLFUNC":
arg = self["list"].l.getCurrentSelection()[0]
entry[2](arg)
else:
self.close(entry)
def goKey(self, key):
if self.keymap.has_key(key):
entry = self.keymap[key]
self.goEntry(entry)
def keyRed(self):
self.goKey("red")
def keyGreen(self):
self.goKey("green")
def keyYellow(self):
self.goKey("yellow")
def keyBlue(self):
self.goKey("blue")
def keyText(self):
self.goKey("text")
def updateSummary(self, curpos=0):
pos = 0
summarytext = ""
for entry in self.summarylist:
if pos > curpos-2 and pos < curpos+5:
if pos == curpos:
summarytext += ">"
else:
summarytext += entry[0]
summarytext += ' ' + entry[1] + '\n'
pos += 1
self["summary_list"].setText(summarytext)
def cancel(self):
if self.allow_cancel:
self.close(None)
|
import GemRB
from ie_stats import *
from GUIDefines import *
import GUICommon
import CommonTables
from ie_restype import RES_BMP
CharGenWindow = 0
TextAreaControl = 0
PortraitName = ""
def PositionCharGenWin(window, offset = 0):
global CharGenWindow
CGFrame = CharGenWindow.GetFrame()
WFrame = window.GetFrame()
window.SetPos(CGFrame['x'], offset + CGFrame['y'] + (CGFrame['h'] - WFrame['h']))
def DisplayOverview(step):
"""Sets up the primary character generation window."""
global CharGenWindow, TextAreaControl, PortraitName
CharGenWindow = GemRB.LoadWindow (0, "GUICG")
CharGenWindow.SetFlags(IE_GUI_VIEW_IGNORE_EVENTS, OP_OR)
GemRB.SetVar ("Step", step)
###
# Buttons
###
PortraitButton = CharGenWindow.GetControl (12)
PortraitButton.SetFlags(IE_GUI_BUTTON_PICTURE|IE_GUI_BUTTON_NO_IMAGE,OP_SET)
PortraitName = GemRB.GetToken ("LargePortrait")
if PortraitName != None:
if GemRB.HasResource (PortraitName, RES_BMP, 1) or GemRB.HasResource ("NOPORTMD", RES_BMP, 1):
PortraitButton.SetPicture (PortraitName, "NOPORTMD")
PortraitButton.SetState (IE_GUI_BUTTON_LOCKED)
GenderButton = CharGenWindow.GetControl (0)
GenderButton.SetText (11956)
SetButtonStateFromStep ("GenderButton", GenderButton, step)
RaceButton = CharGenWindow.GetControl (1)
RaceButton.SetText (11957)
SetButtonStateFromStep ("RaceButton", RaceButton, step)
ClassButton = CharGenWindow.GetControl (2)
ClassButton.SetText (11959)
SetButtonStateFromStep ("ClassButton", ClassButton, step)
AlignmentButton = CharGenWindow.GetControl (3)
AlignmentButton.SetText (11958)
SetButtonStateFromStep ("AlignmentButton", AlignmentButton, step)
AbilitiesButton = CharGenWindow.GetControl (4)
AbilitiesButton.SetText (11960)
SetButtonStateFromStep ("AbilitiesButton", AbilitiesButton, step)
SkillButton = CharGenWindow.GetControl (5)
SkillButton.SetText (17372)
SetButtonStateFromStep ("SkillButton", SkillButton, step)
AppearanceButton = CharGenWindow.GetControl (6)
AppearanceButton.SetText (11961)
SetButtonStateFromStep ("AppearanceButton", AppearanceButton, step)
NameButton = CharGenWindow.GetControl (7)
NameButton.SetText (11963)
SetButtonStateFromStep ("NameButton", NameButton, step)
BackButton = CharGenWindow.GetControl (11)
BackButton.SetText (15416)
BackButton.SetState (IE_GUI_BUTTON_ENABLED)
BackButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, BackPress)
BackButton.MakeEscape()
AcceptButton = CharGenWindow.GetControl (8)
playmode = GemRB.GetVar ("PlayMode")
if playmode>=0:
AcceptButton.SetText (11962)
else:
AcceptButton.SetText (13956)
SetButtonStateFromStep ("AcceptButton", AcceptButton, step)
#AcceptButton.MakeDefault()
AcceptButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, NextPress)
# now automatically ignored and added instead
#ScrollBar = CharGenWindow.GetControl (10)
#ScrollBar.SetDefaultScrollBar ()
ImportButton = CharGenWindow.GetControl (13)
ImportButton.SetText (13955)
ImportButton.SetState (IE_GUI_BUTTON_ENABLED)
ImportButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, ImportPress)
CancelButton = CharGenWindow.GetControl (15)
if step == 1:
CancelButton.SetText (13727) # Cancel
else:
CancelButton.SetText (8159) # Start over
CancelButton.SetState (IE_GUI_BUTTON_ENABLED)
CancelButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, CancelPress)
BiographyButton = CharGenWindow.GetControl (16)
BiographyButton.SetText (18003)
BiographyButton.SetEvent (IE_GUI_BUTTON_ON_PRESS, BiographyPress)
if step == 9:
BiographyButton.SetState (IE_GUI_BUTTON_ENABLED)
else:
BiographyButton.SetState (IE_GUI_BUTTON_DISABLED)
###
# Stat overview
###
AbilityTable = GemRB.LoadTable ("ability")
MyChar = GemRB.GetVar ("Slot")
for part in range(1, step+1):
if part == 1:
TextAreaControl= CharGenWindow.GetControl (9)
if step == 1:
TextAreaControl.SetText (GemRB.GetString(16575))
elif step == 9:
TextAreaControl.SetText ("")
elif part == 2:
if step == 9:
TextAreaControl.Append (GemRB.GetString(1047) + ": " + GemRB.GetToken ("CHARNAME") + "\n")
if GemRB.GetPlayerStat (MyChar, IE_SEX) == 1:
gender = GemRB.GetString (1050)
else:
gender = GemRB.GetString (1051)
TextAreaControl.Append (GemRB.GetString(12135) + ": " + gender + "\n")
elif part == 3:
stat = GemRB.GetPlayerStat(MyChar, IE_RACE)
v = CommonTables.Races.FindValue (3, stat)
TextAreaControl.Append (GemRB.GetString (1048) + ": " + CommonTables.Races.GetValue (v, 2, GTV_REF) + "\n")
elif part == 4:
ClassTitle = GUICommon.GetActorClassTitle (MyChar)
TextAreaControl.Append (GemRB.GetString(12136) + ": " + ClassTitle + "\n")
elif part == 5:
stat = GemRB.GetPlayerStat (MyChar, IE_ALIGNMENT)
v = CommonTables.Aligns.FindValue (3, stat)
TextAreaControl.Append (GemRB.GetString(1049) + ": " + CommonTables.Aligns.GetValue (v, 2, GTV_REF) + "\n")
elif part == 6:
TextAreaControl.Append ("\n")
ClassName = GUICommon.GetClassRowName (MyChar)
hasextra = CommonTables.Classes.GetValue (ClassName, "SAVE") == "SAVEWAR"
strextra = GemRB.GetPlayerStat (MyChar, IE_STREXTRA)
for i in range(6):
v = AbilityTable.GetValue (i, 2, GTV_REF)
StatID = AbilityTable.GetValue (i, 3)
stat = GemRB.GetPlayerStat (MyChar, StatID)
if (i == 0) and hasextra and (stat==18):
TextAreaControl.Append (v + ": " + str(stat) + "/" + str(strextra) + "\n")
else:
TextAreaControl.Append (v + ": " + str(stat) + "\n")
elif part == 7:
# thieving and other skills
info = ""
SkillTable = GemRB.LoadTable ("skills")
RangerSkills = CommonTables.ClassSkills.GetValue (ClassName, "RANGERSKILL")
BardSkills = CommonTables.ClassSkills.GetValue (ClassName, "BARDSKILL")
KitName = GUICommon.GetKitIndex (MyChar)
if KitName == 0:
KitName = ClassName
else:
KitName = CommonTables.KitList.GetValue (KitName, 0)
if SkillTable.GetValue ("RATE", KitName) != -1:
for skill in range(SkillTable.GetRowCount () - 2):
name = SkillTable.GetValue (skill+2, 1, GTV_REF)
available = SkillTable.GetValue (SkillTable.GetRowName (skill+2), KitName)
statID = SkillTable.GetValue (skill+2, 2)
value = GemRB.GetPlayerStat (MyChar, statID, 1)
if value >= 0 and available != -1:
info += name + ": " + str(value) + "\n"
elif BardSkills != "*" or RangerSkills != "*":
for skill in range(SkillTable.GetRowCount () - 2):
name = SkillTable.GetValue (skill+2, 1, GTV_REF)
StatID = SkillTable.GetValue (skill+2, 2)
value = GemRB.GetPlayerStat (MyChar, StatID, 1)
if value > 0:
info += name + ": " + str(value) + "\n"
if info != "":
TextAreaControl.Append ("\n" + GemRB.GetString(8442) + "\n" + info)
# arcane spells
info = ""
for level in range(0, 9):
for j in range(0, GemRB.GetKnownSpellsCount (MyChar, IE_SPELL_TYPE_WIZARD, level) ):
Spell = GemRB.GetKnownSpell (MyChar, IE_SPELL_TYPE_WIZARD, level, j)
Spell = GemRB.GetSpell (Spell['SpellResRef'], 1)['SpellName']
info += GemRB.GetString (Spell) + "\n"
if info != "":
TextAreaControl.Append ("\n" + GemRB.GetString(11027) + "\n" + info)
# divine spells
info = ""
for level in range(0, 7):
for j in range(0, GemRB.GetKnownSpellsCount (MyChar, IE_SPELL_TYPE_PRIEST, level) ):
Spell = GemRB.GetKnownSpell (MyChar, IE_SPELL_TYPE_PRIEST, level, j)
Spell = GemRB.GetSpell (Spell['SpellResRef'], 1)['SpellName']
info += GemRB.GetString (Spell) + "\n"
if info != "":
TextAreaControl.Append ("\n" + GemRB.GetString(11028) + "\n" + info)
# racial enemy
info = ""
Race = GemRB.GetVar ("HatedRace")
if Race:
HateRaceTable = GemRB.LoadTable ("HATERACE")
Row = HateRaceTable.FindValue (1, Race)
info = HateRaceTable.GetValue (Row, 0, GTV_REF) + "\n"
if info != "":
TextAreaControl.Append ("\n" + GemRB.GetString(15982) + "\n" + info)
# weapon proficiencies
TextAreaControl.Append ("\n")
TextAreaControl.Append (9466)
TextAreaControl.Append ("\n")
TmpTable=GemRB.LoadTable ("weapprof")
ProfCount = TmpTable.GetRowCount ()
#bg2 weapprof.2da contains the bg1 proficiencies too, skipping those
for i in range(ProfCount-8):
# 4294967296 overflows to -1 on some arches, so we use a smaller invalid strref
strref = TmpTable.GetValue (i+8, 1)
if strref == -1 or strref > 500000:
continue
Weapon = GemRB.GetString (strref)
StatID = TmpTable.GetValue (i+8, 0)
Value = GemRB.GetPlayerStat (MyChar, StatID )
if Value:
pluses = " "
for plus in range(0, Value):
pluses += "+"
TextAreaControl.Append (Weapon + pluses + "\n")
elif part == 8:
break
CharGenWindow.Focus()
return
def SetButtonStateFromStep (buttonName, button, step):
"""Updates selectable buttons based upon current step."""
global CharGenWindow
state = IE_GUI_BUTTON_DISABLED
if buttonName == "GenderButton":
if step == 1:
state = IE_GUI_BUTTON_ENABLED
elif buttonName == "RaceButton":
if step == 2:
state = IE_GUI_BUTTON_ENABLED
elif buttonName == "ClassButton":
if step == 3:
state = IE_GUI_BUTTON_ENABLED
elif buttonName == "AlignmentButton":
if step == 4:
state = IE_GUI_BUTTON_ENABLED
elif buttonName == "AbilitiesButton":
if step == 5:
state = IE_GUI_BUTTON_ENABLED
elif buttonName == "SkillButton":
if step == 6:
state = IE_GUI_BUTTON_ENABLED
elif buttonName == "AppearanceButton":
if step == 7:
state = IE_GUI_BUTTON_ENABLED
elif buttonName == "NameButton":
if step == 8:
state = IE_GUI_BUTTON_ENABLED
elif buttonName == "AcceptButton":
if step == 9:
state = IE_GUI_BUTTON_ENABLED
button.SetState (state)
if state == IE_GUI_BUTTON_ENABLED:
button.MakeDefault()
button.SetEvent (IE_GUI_BUTTON_ON_PRESS, NextPress)
return
def CancelPress():
"""Revert back to the first step; if there, free the actor."""
global CharGenWindow
if CharGenWindow:
CharGenWindow.Unload ()
step = GemRB.GetVar ("Step")
if step == 1:
#free up the slot before exiting
MyChar = GemRB.GetVar ("Slot")
GemRB.CreatePlayer ("", MyChar | 0x8000 )
GemRB.SetNextScript ("Start")
else:
GemRB.SetNextScript ("CharGen")
GemRB.SetToken ("LargePortrait", "")
GemRB.SetToken ("SmallPortrait", "")
return
def ImportPress():
"""Opens the character import window."""
step = GemRB.GetVar ("Step")
# TODO: check why this is handled differently
if step == 1:
GemRB.SetNextScript("GUICG24")
else:
GemRB.SetToken ("NextScript", "CharGen9")
GemRB.SetNextScript ("ImportFile") #import
return
def BackPress():
"""Moves to the previous step."""
global CharGenWindow
if CharGenWindow:
CharGenWindow.Unload ()
step = GemRB.GetVar ("Step")
if step == 1:
GemRB.SetNextScript ("Start")
elif step == 2:
GemRB.SetNextScript ("CharGen")
else:
GemRB.SetNextScript ("CharGen" + str(step-1))
return
def NextPress():
"""Moves to the next step."""
step = GemRB.GetVar ("Step")
if step == 1:
GemRB.SetNextScript ("GUICG1")
elif step == 2:
GemRB.SetNextScript ("GUICG8")
elif step == 6:
GemRB.SetNextScript ("GUICG15")
elif step == 7:
GemRB.SetNextScript ("GUICG13")
elif step == 8:
GemRB.SetNextScript ("GUICG5")
elif step == 9:
GemRB.SetNextScript ("CharGenEnd")
else: # 3, 4, 5
GemRB.SetNextScript ("GUICG" + str(step-1))
return
def BiographyPress():
"""Opens the biography window."""
GemRB.SetNextScript("GUICG23") #biography
return
|
"""This is just a helpful demo server for testing this site."""
import BaseHTTPServer
import sys
from SimpleHTTPServer import SimpleHTTPRequestHandler
HandlerClass = SimpleHTTPRequestHandler
ServerClass = BaseHTTPServer.HTTPServer
Protocol = "HTTP/1.0"
if sys.argv[1:]:
port = int(sys.argv[1])
else:
port = 8000
server_address = ('127.0.0.1', port)
HandlerClass.protocol_version = Protocol
httpd = ServerClass(server_address, HandlerClass)
sa = httpd.socket.getsockname()
print "Serving HTTP on", sa[0], "port", sa[1], "..."
httpd.serve_forever()
|
"""Unittests for mysql.connector.locales
"""
from datetime import datetime
import tests
from . import PY2
from mysql.connector import errorcode, locales
def _get_client_errors():
errors = {}
for name in dir(errorcode):
if name.startswith('CR_'):
errors[name] = getattr(errorcode, name)
return errors
class LocalesModulesTests(tests.MySQLConnectorTests):
def test_defaults(self):
# There should always be 'eng'
try:
from mysql.connector.locales import eng # pylint: disable=W0612
except ImportError:
self.fail("locales.eng could not be imported")
# There should always be 'eng.client_error'
some_error = None
try:
from mysql.connector.locales.eng import client_error
some_error = client_error.CR_UNKNOWN_ERROR
except ImportError:
self.fail("locales.eng.client_error could not be imported")
some_error = some_error + '' # fool pylint
def test_get_client_error(self):
try:
locales.get_client_error(2000, language='spam')
except ImportError as err:
self.assertEqual("No localization support for language 'spam'",
str(err))
else:
self.fail("ImportError not raised")
exp = "Unknown MySQL error"
self.assertEqual(exp, locales.get_client_error(2000))
self.assertEqual(exp, locales.get_client_error('CR_UNKNOWN_ERROR'))
try:
locales.get_client_error(tuple())
except ValueError as err:
self.assertEqual(
"error argument needs to be either an integer or string",
str(err))
else:
self.fail("ValueError not raised")
class LocalesEngClientErrorTests(tests.MySQLConnectorTests):
"""Testing locales.eng.client_error"""
def test__GENERATED_ON(self):
try:
from mysql.connector.locales.eng import client_error
except ImportError:
self.fail("locales.eng.client_error could not be imported")
self.assertTrue(isinstance(client_error._GENERATED_ON, str))
try:
generatedon = datetime.strptime(client_error._GENERATED_ON,
'%Y-%m-%d').date()
except ValueError as err:
self.fail(err)
delta = datetime.now().date() - generatedon
self.assertTrue(
delta.days < 120, # pylint disable=E1103
"eng/client_error.py is more than 120 days old ({0})".format(
delta.days)) # pylint disable=E1103
def test__MYSQL_VERSION(self):
try:
from mysql.connector.locales.eng import client_error
except ImportError:
self.fail("locales.eng.client_error could not be imported")
minimum = (5, 6, 6)
self.assertTrue(isinstance(client_error._MYSQL_VERSION, tuple))
self.assertTrue(len(client_error._MYSQL_VERSION) == 3)
self.assertTrue(client_error._MYSQL_VERSION >= minimum)
def test_messages(self):
try:
from mysql.connector.locales.eng import client_error
except ImportError:
self.fail("locales.eng.client_error could not be imported")
errors = _get_client_errors()
count = 0
for name in dir(client_error):
if name.startswith('CR_'):
count += 1
self.assertEqual(len(errors), count)
if PY2:
strtype = unicode # pylint: disable=E0602
else:
strtype = str
for name in errors.keys():
self.assertTrue(isinstance(getattr(client_error, name), strtype))
|
"""Simple script that creates the first distributiongroup in Cerebrum.
If you want some other group name, you will add getopt-stuff to this script!
Or else!!1
"""
import sys
from Cerebrum.Utils import Factory
from Cerebrum.modules.Email import EmailDomain
from Cerebrum.modules.Email import EmailAddress
from Cerebrum.modules.Email import EmailPrimaryAddressTarget
db = Factory.get('Database')()
db.cl_init(change_program='create_dg_moderator')
ac = Factory.get('Account')(db)
co = Factory.get('Constants')(db)
dg = Factory.get('DistributionGroup')(db)
gr = Factory.get('Group')(db)
et = Factory.get('EmailTarget')(db)
epat = EmailPrimaryAddressTarget(db)
ed = EmailDomain(db)
ea = EmailAddress(db)
group_name = 'groupadmin'
if len(sys.argv) > 1:
group_domain = sys.argv[1]
else:
group_domain = 'groups.uio.no'
ac.clear()
ac.find_by_name('bootstrap_account')
gr.clear()
gr.populate(
creator_id=ac.entity_id,
visibility=co.group_visibility_all,
name='groupadmin',
description='Default group moderator',
group_type=co.group_type_unknown,
)
gr.write_db()
et.clear()
et.populate(co.email_target_dl_group, gr.entity_id, co.entity_group)
et.write_db()
ed.clear()
ed.find_by_domain(group_domain)
lp = 'dl-admin'
ea.clear()
ea.populate(lp, ed.entity_id, et.entity_id, expire=None)
ea.write_db()
epat.clear()
epat.populate(ea.entity_id, parent=et)
epat.write_db()
dg.clear()
dg.populate(roomlist='F', hidden='T', parent=gr)
dg.write_db()
db.commit()
|
from pyspark import SparkContext
from pyspark import SparkConf
import os
import sys
conf = SparkConf()
sparkmaster=sys.argv[1]
wordcountfile=sys.argv[2]
conf.setMaster(sparkmaster)
conf.setAppName("test")
sc = SparkContext(conf = conf)
file = sc.textFile(wordcountfile)
counts = file.flatMap(lambda line: line.split(" ")) \
.map(lambda word: (word, 1)) \
.reduceByKey(lambda a, b: a + b)
output = counts.collect()
for (k, v) in output:
print k + ": " + str(v)
|
from ... import SCHEMA_VERSION
from .annotation import AnnotatableEncoder
from omero.model import ChannelI
class Channel201501Encoder(AnnotatableEncoder):
TYPE = 'http://www.openmicroscopy.org/Schemas/OME/2015-01#Channel'
def encode(self, obj):
v = super(Channel201501Encoder, self).encode(obj)
color = self.rgba_to_int(obj.red, obj.green, obj.blue, obj.alpha)
self.set_if_not_none(v, 'Color', color)
self.set_if_not_none(v, 'omero:lookupTable', obj.lookupTable)
logical_channel = obj.logicalChannel
if logical_channel is not None and logical_channel.isLoaded():
self.set_if_not_none(
v, 'omero:LogicalChannelId', logical_channel.id
)
self.set_if_not_none(
v, 'EmissionWavelength', logical_channel.emissionWave
)
self.set_if_not_none(
v, 'ExcitationWavelength', logical_channel.excitationWave
)
self.set_if_not_none(v, 'Fluor', logical_channel.fluor)
self.set_if_not_none(v, 'Name', logical_channel.name)
self.set_if_not_none(v, 'NDFilter', logical_channel.ndFilter)
self.set_if_not_none(v, 'PinholeSize', logical_channel.pinHoleSize)
self.set_if_not_none(
v, 'PockelCellSetting', logical_channel.pockelCellSetting
)
self.set_if_not_none(
v, 'SamplesPerPixel', logical_channel.samplesPerPixel
)
contrast_method = logical_channel.contrastMethod
if contrast_method is not None and contrast_method.isLoaded():
contrast_method_encoder = \
self.ctx.get_encoder(contrast_method.__class__)
v['ContrastMethod'] = \
contrast_method_encoder.encode(contrast_method)
illumination = logical_channel.illumination
if illumination is not None and illumination.isLoaded():
illumination_encoder = \
self.ctx.get_encoder(illumination.__class__)
v['Illumination'] = \
illumination_encoder.encode(illumination)
acquisition_mode = logical_channel.mode
if acquisition_mode is not None and acquisition_mode.isLoaded():
acquisition_mode_encoder = \
self.ctx.get_encoder(acquisition_mode.__class__)
v['AcquisitionMode'] = \
acquisition_mode_encoder.encode(acquisition_mode)
photometric_interpretation = \
logical_channel.photometricInterpretation
if photometric_interpretation is not None \
and photometric_interpretation.isLoaded():
photometric_interpretation_encoder = \
self.ctx.get_encoder(photometric_interpretation.__class__)
v['omero:photometricInterpretation'] = \
photometric_interpretation_encoder.encode(
photometric_interpretation
)
return v
class Channel201606Encoder(Channel201501Encoder):
TYPE = 'http://www.openmicroscopy.org/Schemas/OME/2016-06#Channel'
if SCHEMA_VERSION == '2015-01':
encoder = (ChannelI, Channel201501Encoder)
elif SCHEMA_VERSION == '2016-06':
encoder = (ChannelI, Channel201606Encoder)
ChannelEncoder = encoder[1]
|
canvas_width = 600
canvas_height = 390
canvas_grid_size = 30
canvas_grid_color = (0.8, 0.8, 0.8)
built_ins = {
'range': range
}
|
"""The lexer"""
import sys
import re
def lex(characters, token_exprs):
"""
A somewhat generic lexer.
characters -- the string to be lexed
token_exprs -- the tokens that consitute our grammar
returns -- a list of tokens of the form (contents, tag)
"""
pos = 0
tokens = []
while pos < len(characters):
match = None
for token_expr in token_exprs:
pattern, tag = token_expr
regex = re.compile(pattern)
match = regex.match(characters, pos)
if match:
text = match.group(0)
if tag:
token = (text, tag)
tokens.append(token)
break
if not match:
sys.stderr.write('[Lexer] Illegal character at %d: %s(%d)\n'
% (pos, characters[pos], ord(characters[pos])))
raise ValueError(characters[pos])
else:
pos = match.end(0)
return tokens
|
import unittest
import sys
import time
import os
sys.path.append("../../")
from coffee_pot import coffee_pot
class TestWriteLastBrew(unittest.TestCase):
def setUp(self):
self.test_coffee_pot = coffee_pot("test",
full=70,
empty=35,
off=20,
max=125,
file="coffee_pot_test.txt")
def tearDown(self):
if os.path.exists(self.test_coffee_pot.file):
os.remove(self.test_coffee_pot.file)
def test_write(self):
self.test_coffee_pot.write_last_brew()
last_brew = open(self.test_coffee_pot.file, "r").readline()
self.assertAlmostEqual(int(time.time()), int(float(last_brew)))
if __name__ == '__main__':
unittest.main()
|
from __future__ import unicode_literals
import datetime
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.contrib.auth.decorators import permission_required
from django.template.defaulttags import register
from django.contrib import messages
from django.utils.translation import ugettext_lazy as _
from fleetup.managers import FleetUpManager
from authentication.decorators import members_and_blues
import logging
logger = logging.getLogger(__name__)
@register.filter
def get_item(dictionary, key):
return dictionary.get(key)
@login_required
@members_and_blues()
def fleetup_view(request):
logger.debug("fleetup_view called by user %s" % request.user)
operations_list = FleetUpManager.get_fleetup_operations()
if operations_list is None:
messages.add_message(request, messages.ERROR, _("Failed to get operations list, contact your administrator"))
operations_list = {}
timers_list = FleetUpManager.get_fleetup_timers()
if timers_list is None:
messages.add_message(request, messages.ERROR, _("Failed to get timers list, contact your administrator"))
timers_list = {}
now = datetime.datetime.now().strftime('%H:%M:%S')
context = {"timers_list": sorted(timers_list.items()),
"operations_list": sorted(operations_list.items()),
"now": now}
return render(request, 'fleetup/index.html', context=context)
@login_required
@permission_required('auth.human_resources')
def fleetup_characters(request):
logger.debug("fleetup_characters called by user %s" % request.user)
member_list = FleetUpManager.get_fleetup_members()
if member_list is None:
messages.add_message(request, messages.ERROR, _("Failed to get member list, contact your administrator"))
member_list = {}
context = {"member_list": sorted(member_list.items())}
return render(request, 'fleetup/characters.html', context=context)
@login_required
@members_and_blues()
def fleetup_fittings(request):
logger.debug("fleetup_fittings called by user %s" % request.user)
fitting_list = FleetUpManager.get_fleetup_fittings()
if fitting_list is None:
messages.add_message(request, messages.ERROR, _("Failed to get fitting list, contact your administrator"))
fitting_list = {}
context = {"fitting_list": sorted(fitting_list.items())}
return render(request, 'fleetup/fittingsview.html', context=context)
@login_required
@members_and_blues()
def fleetup_fitting(request, fittingnumber):
logger.debug("fleetup_fitting called by user %s" % request.user)
fitting_eft = FleetUpManager.get_fleetup_fitting_eft(fittingnumber)
fitting_data = FleetUpManager.get_fleetup_fitting(fittingnumber)
doctrinenumber = FleetUpManager.get_fleetup_doctrineid(fittingnumber)
doctrines_list = FleetUpManager.get_fleetup_doctrine(doctrinenumber)
if fitting_eft is None or fitting_data is None or doctrinenumber is None:
messages.add_message(request, messages.ERROR, _("There was an error getting some of the data for this fitting. "
"Contact your administrator"))
context = {"fitting_eft": fitting_eft,
"fitting_data": fitting_data,
"doctrines_list": doctrines_list}
return render(request, 'fleetup/fitting.html', context=context)
@login_required
@members_and_blues()
def fleetup_doctrines(request):
logger.debug("fleetup_doctrines called by user %s" % request.user)
doctrines_list = FleetUpManager.get_fleetup_doctrines()
if doctrines_list is None:
messages.add_message(request, messages.ERROR, _("Failed to get doctrines list, contact your administrator"))
context = {"doctrines_list": doctrines_list}
return render(request, 'fleetup/doctrinesview.html', context=context)
@login_required
@members_and_blues()
def fleetup_doctrine(request, doctrinenumber):
logger.debug("fleetup_doctrine called by user %s" % request.user)
doctrine = FleetUpManager.get_fleetup_doctrine(doctrinenumber)
if doctrine is None:
messages.add_message(request, messages.ERROR, _("Failed to get doctine, contact your administrator"))
context = {"doctrine": doctrine}
return render(request, 'fleetup/doctrine.html', context=context)
|
__author__ = "Adrian Weber, Centre for Development and Environment, University of Bern"
__date__ = "$May 16, 2013 1:24:16 PM$"
from decide_user.model.meta import Base
import hashlib
from sqlalchemy import Boolean
from sqlalchemy import CheckConstraint
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import relationship
group_roles = Table('group_roles', Base.metadata,
Column('gid', Integer, primary_key=True),
Column('fk_groups', Integer, ForeignKey('public.groups.id'), nullable=False),
Column('fk_roles', Integer, ForeignKey('public.roles.gid'), nullable=False),
schema='public'
)
group_members = Table('group_members', Base.metadata,
Column('gid', Integer, primary_key=True),
Column('fk_users', Integer, ForeignKey('public.users.id'), nullable=False),
Column('fk_groups', Integer, ForeignKey('public.groups.id'), nullable=False),
schema='public'
)
user_roles = Table('user_roles', Base.metadata,
Column('gid', Integer, primary_key=True),
Column('fk_users', Integer, ForeignKey('public.users.id'), nullable=False),
Column('fk_roles', Integer, ForeignKey('public.roles.gid'), nullable=False),
schema='public'
)
class UserGroup(Base):
__tablename__ = 'groups'
__table_args__ = {
"schema": 'public'
}
id = Column(Integer, primary_key=True)
name = Column(String(128))
description = Column(String(128))
is_enabled = Column(Boolean)
#users = relationship(User, backref="group", primaryjoin="User.fk_usergroup == UserGroup.id")
#users_requests = relationship(User, backref="requested_group", primaryjoin="User.fk_requested_usergroup == UserGroup.id")
class Role(Base):
__tablename__ = 'roles'
__table_args__ = {
"schema": 'public'
}
gid = Column(Integer, primary_key=True)
name = Column(String(128))
parent = Column(String(128))
description = Column(String(256))
groups = relationship(UserGroup, secondary=group_roles, backref="roles")
class User(Base):
__tablename__ = 'users'
__table_args__ = (
CheckConstraint('(users.is_active = FALSE) = (users.activation_uuid IS NOT NULL)', name="users_activation_uuid_not_null"),
{"schema": 'public'}
)
id = Column(Integer, primary_key=True)
email = Column(String(128), nullable=False, unique=True)
password = Column(String(128), nullable=False)
firstname = Column(String(128), nullable=True)
lastname = Column(String(128), nullable=True)
registration_timestamp = Column(DateTime(timezone=True), nullable=False)
is_active = Column(Boolean, nullable=False, default=False)
activation_uuid = Column(UUID, nullable=True)
position = Column(String(128))
organization = Column(String(512))
purpose = Column(String(512))
fk_requested_usergroup = Column(Integer, ForeignKey('public.groups.id'), nullable=False)
groups = relationship(UserGroup, secondary=group_members, backref="users")
roles = relationship(Role, secondary=user_roles, backref="users")
def __repr__(self):
return (
'<Anr_Users> id [ %s ] | email [ %s ] | firstname [ %s ] | lastname [ %s ]' %
(self.id, self.email, self.firstname, self.lastname)
)
def validate_password(self, password):
"""
Validates the password for repoze.who SQLAlchemy plugin. The user must
be activated in order to success.
"""
pw = hashlib.md5(password).hexdigest()
return (self.password == "md5:%s" % pw) and self.is_active
|
from pykickstart.base import BaseData, KickstartCommand
from pykickstart.options import KSOptionParser
import gettext
import warnings
from pykickstart import _
class F12_GroupData(BaseData):
removedKeywords = BaseData.removedKeywords
removedAttrs = BaseData.removedAttrs
def __init__(self, *args, **kwargs):
BaseData.__init__(self, *args, **kwargs)
self.name = kwargs.get("name", "")
self.gid = kwargs.get("gid", None)
def __eq__(self, y):
if not y:
return False
return self.name == y.name
def __ne__(self, y):
return not self == y
def __str__(self):
retval = BaseData.__str__(self)
retval += "group"
if self.name:
retval += " --name=%s" % self.name
if self.gid:
retval += " --gid=%s" % self.gid
return retval + "\n"
class F12_Group(KickstartCommand):
removedKeywords = KickstartCommand.removedKeywords
removedAttrs = KickstartCommand.removedAttrs
def __init__(self, writePriority=0, *args, **kwargs):
KickstartCommand.__init__(self, writePriority, *args, **kwargs)
self.op = self._getParser()
self.groupList = kwargs.get("groupList", [])
def __str__(self):
retval = ""
for user in self.groupList:
retval += user.__str__()
return retval
def _getParser(self):
op = KSOptionParser()
op.add_option("--name", required=1)
op.add_option("--gid", type="int")
return op
def parse(self, args):
gd = self.handler.GroupData()
(opts, _extra) = self.op.parse_args(args=args, lineno=self.lineno)
self._setToObj(self.op, opts, gd)
gd.lineno = self.lineno
# Check for duplicates in the data list.
if gd in self.dataList():
warnings.warn(_("A group with the name %s has already been defined.") % gd.name)
return gd
def dataList(self):
return self.groupList
|
from django.views.generic import TemplateView, CreateView, \
UpdateView, DeleteView, DetailView
from django.views.generic.detail import SingleObjectMixin
from django.utils.translation import ugettext_lazy as _
from django.urls import reverse_lazy, reverse
from common.permissions import PermissionsMixin, IsOrgAdmin
from common.const import create_success_msg, update_success_msg
from common.utils import get_object_or_none
from ..models import Domain, Gateway
from ..forms import DomainForm, GatewayForm
__all__ = (
"DomainListView", "DomainCreateView", "DomainUpdateView",
"DomainDetailView", "DomainDeleteView", "DomainGatewayListView",
"DomainGatewayCreateView", 'DomainGatewayUpdateView',
)
class DomainListView(PermissionsMixin, TemplateView):
template_name = 'assets/domain_list.html'
permission_classes = [IsOrgAdmin]
def get_context_data(self, **kwargs):
context = {
'app': _('Assets'),
'action': _('Domain list'),
}
kwargs.update(context)
return super().get_context_data(**kwargs)
class DomainCreateView(PermissionsMixin, CreateView):
model = Domain
template_name = 'assets/domain_create_update.html'
form_class = DomainForm
success_url = reverse_lazy('assets:domain-list')
success_message = create_success_msg
permission_classes = [IsOrgAdmin]
def get_context_data(self, **kwargs):
context = {
'app': _('Assets'),
'action': _('Create domain'),
'type': 'create'
}
kwargs.update(context)
return super().get_context_data(**kwargs)
class DomainUpdateView(PermissionsMixin, UpdateView):
model = Domain
template_name = 'assets/domain_create_update.html'
form_class = DomainForm
success_url = reverse_lazy('assets:domain-list')
success_message = update_success_msg
permission_classes = [IsOrgAdmin]
def get_context_data(self, **kwargs):
context = {
'app': _('Assets'),
'action': _('Update domain'),
'type': 'update'
}
kwargs.update(context)
return super().get_context_data(**kwargs)
class DomainDetailView(PermissionsMixin, DetailView):
model = Domain
template_name = 'assets/domain_detail.html'
permission_classes = [IsOrgAdmin]
def get_context_data(self, **kwargs):
context = {
'app': _('Assets'),
'action': _('Domain detail'),
}
kwargs.update(context)
return super().get_context_data(**kwargs)
class DomainDeleteView(PermissionsMixin, DeleteView):
model = Domain
template_name = 'delete_confirm.html'
success_url = reverse_lazy('assets:domain-list')
permission_classes = [IsOrgAdmin]
class DomainGatewayListView(PermissionsMixin, SingleObjectMixin, TemplateView):
template_name = 'assets/domain_gateway_list.html'
model = Domain
object = None
permission_classes = [IsOrgAdmin]
def get(self, request, *args, **kwargs):
self.object = self.get_object(queryset=self.model.objects.all())
return super().get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = {
'app': _('Assets'),
'action': _('Domain gateway list'),
'object': self.get_object()
}
kwargs.update(context)
return super().get_context_data(**kwargs)
class DomainGatewayCreateView(PermissionsMixin, CreateView):
model = Gateway
template_name = 'assets/gateway_create_update.html'
form_class = GatewayForm
success_message = create_success_msg
permission_classes = [IsOrgAdmin]
def get_success_url(self):
domain = self.object.domain
return reverse('assets:domain-gateway-list', kwargs={"pk": domain.id})
def get_form(self, form_class=None):
form = super().get_form(form_class=form_class)
domain_id = self.kwargs.get("pk")
domain = get_object_or_none(Domain, id=domain_id)
if domain:
form['domain'].initial = domain
return form
def get_context_data(self, **kwargs):
context = {
'app': _('Assets'),
'action': _('Create gateway'),
'type': 'create'
}
kwargs.update(context)
return super().get_context_data(**kwargs)
class DomainGatewayUpdateView(PermissionsMixin, UpdateView):
model = Gateway
template_name = 'assets/gateway_create_update.html'
form_class = GatewayForm
success_message = update_success_msg
permission_classes = [IsOrgAdmin]
def get_success_url(self):
domain = self.object.domain
return reverse('assets:domain-gateway-list', kwargs={"pk": domain.id})
def get_context_data(self, **kwargs):
context = {
'app': _('Assets'),
'action': _('Update gateway'),
"type": "update"
}
kwargs.update(context)
return super().get_context_data(**kwargs)
|
import importlib
from django.conf import settings
from django.utils.functional import memoize
APP_FORMAT = getattr(settings, "GLOBAL_STATIC_MODULE_FORMAT", "%s.static")
JS_PROPERTY = getattr(settings, "GLOBAL_STATIC_JS_PROPERTY", "global_js")
CSS_PROPERTY = getattr(settings, "GLOBAL_STATIC_CSS_PROPERTY", "global_css")
_get_global_js_files_cache = {}
def _get_global_js_files():
files = []
for app in settings.INSTALLED_APPS:
try:
module = importlib.import_module(APP_FORMAT % app)
except ImportError:
continue
appfiles = getattr(module, JS_PROPERTY, [])
if appfiles:
files += appfiles
return files
get_global_js_files = memoize(_get_global_js_files, _get_global_js_files_cache, 0) # pylint: disable=C0103
_get_global_css_files_cache = {}
def _get_global_css_files():
files = []
for app in settings.INSTALLED_APPS:
try:
module = importlib.import_module(APP_FORMAT % app)
except ImportError:
continue
appfiles = getattr(module, CSS_PROPERTY, [])
if appfiles:
files += appfiles
return files
get_global_css_files = memoize(_get_global_css_files, _get_global_css_files_cache, 0) # pylint: disable=C0103
def global_static(request):
global_js = [y for _, y in sorted(get_global_js_files())]
global_css = [y for _, y in sorted(get_global_css_files())]
return {
'global_js': global_js,
'global_css': global_css,
}
|
import paho.mqtt.client as mqtt
def on_connect(client, userdata, rc):
print("Connected with result code "+str(rc))
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe("Desert-Home/Weather/#")
def on_message(client, userdata, msg):
print(msg.topic+" "+str(msg.payload))
client = mqtt.Client()
client.on_connect = on_connect
client.on_message = on_message
client.connect("test.mosquitto.org", 1883, 60)
client.loop_forever()
|
'''
Specto Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse,base64
from resources.lib.libraries import cleantitle
from resources.lib.libraries import client
from resources.lib.libraries import control
from resources.lib import resolvers
class source:
def __init__(self):
self.base_link = 'http://tinklepad.is'
self.search_link = '/search.php?q=%s'
def request(self, url, check):
try:
result = client.request(url)
if check in str(result): return result.decode('iso-8859-1').encode('utf-8')
except:
return
def get_movie(self, imdb, title, year):
try:
#query = self.search_link % (urllib.quote_plus(cleantitle.query(title)), str(int(year)-1), str(int(year)+1))
#query = urlparse.urljoin(self.base_link, query)
query = self.search_link % urllib.quote_plus(cleantitle.query(title))
query = urlparse.urljoin(self.base_link, query)
result = str(self.request(query, 'movie_table'))
if 'page=2' in result or 'page%3D2' in result: result += str(self.request(query + '&page=2', 'movie_table'))
result = client.parseDOM(result, 'div', attrs = {'class': 'movie_table'})
title = cleantitle.get(title)
years = ['(%s)' % str(year), '(%s)' % str(int(year)+1), '(%s)' % str(int(year)-1)]
result = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'img', ret='alt')) for i in result]
result = [(i[0][0], i[1][0]) for i in result if len(i[0]) > 0 and len(i[1]) > 0]
result = [i for i in result if any(x in i[1] for x in years)]
try: result = [(urlparse.parse_qs(urlparse.urlparse(i[0]).query)['q'][0], i[1]) for i in result]
except: pass
try: result = [(urlparse.parse_qs(urlparse.urlparse(i[0]).query)['u'][0], i[1]) for i in result]
except: pass
try: result = [(urlparse.urlparse(i[0]).path, i[1]) for i in result]
except: pass
match = [i[0] for i in result if title == cleantitle.get(i[1]) and '(%s)' % str(year) in i[1]]
match2 = [i[0] for i in result]
match2 = [x for y,x in enumerate(match2) if x not in match2[:y]]
if match2 == []: return
for i in match2[:5]:
try:
if len(match) > 0: url = match[0] ; break
result = self.request(urlparse.urljoin(self.base_link, i), 'link_name')
if imdb in str(result): url = i ; break
except:
pass
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def get_sources(self, url, hosthdDict, hostDict, locDict):
try:
sources = []
if url == None: return sources
url = urlparse.urljoin(self.base_link, url)
result = client.request(url)
#print result
result = result.replace('\n','')
print result
quality = re.compile('>Links - Quality(.+?)<').findall(result)[0]
quality = quality.strip()
print("Q",quality)
if quality == 'CAM' or quality == 'TS': quality = 'CAM'
elif quality == 'SCREENER': quality = 'SCR'
else: quality = 'SD'
links = client.parseDOM(result, 'div', attrs = {'id': 'links'})[0]
links = links.split('link_name')
for i in links:
try:
url = client.parseDOM(i, 'a', ret='href')[0]
try: url = urlparse.parse_qs(urlparse.urlparse(url).query)['u'][0]
except: pass
try: url = urlparse.parse_qs(urlparse.urlparse(url).query)['q'][0]
except: pass
url = urlparse.urlparse(url).query
url = base64.b64decode(url)
url = re.findall('((?:http|https)://.+?/.+?)(?:&|$)', url)[0]
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
print("URL1",url)
host = re.findall('([\w]+[.][\w]+)$', urlparse.urlparse(url.strip().lower()).netloc)[0]
if not host in hostDict: raise Exception()
try: host = host.split('.')[0]
except: pass
host = client.replaceHTMLCodes(host)
host = host.encode('utf-8')
print("URL4", host)
sources.append({'source': host.split('.')[0], 'quality': 'SD', 'provider': 'Movie25', 'url': url})
except:
pass
return sources
except Exception as e:
control.log('ERROR MOVIE25 %s' % e)
pass
return sources
def resolve(self, url):
try:
url = resolvers.request(url)
return url
except:
return
|
import os.path
from PyQt4.QtCore import *
from qgis.core import *
import exiftool
import geotagphotos_utils as utils
class ImportThread(QThread):
rangeChanged = pyqtSignal(list)
updateProgress = pyqtSignal()
processFinished = pyqtSignal(bool)
processInterrupted = pyqtSignal()
wildcards = [".jpg", ".jpeg", ".JPG", ".JPEG"]
geotags = [u"EXIF:GPSLatitude", u"EXIF:GPSLatitudeRef", u"EXIF:GPSLongitude", u"EXIF:GPSLongitudeRef"]
def __init__(self, photosDir, recurseDir, tagsList, outFileName, outEncoding, appendFile, config):
QThread.__init__(self, QThread.currentThread())
self.mutex = QMutex()
self.stopMe = 0
self.interrupted = False
self.photosDir = photosDir
self.recurseDir = recurseDir
self.tagsList = tagsList
self.outFileName = outFileName
self.outEncoding = outEncoding
self.appendFile = appendFile
self.config = config
def run(self):
if self.appendFile:
layer = self.openExistingLayer()
else:
layer = self.createNewLayer()
provider = layer.dataProvider()
fields = provider.fields()
if not set(self.tagsList).issuperset(self.geotags):
self.tagsList.extend(self.geotags)
etPath = utils.getExifToolPath()
if etPath != "":
etPath = os.path.join(os.path.normpath(unicode(etPath)), "exiftool")
else:
etPath = "exiftool"
# config file
if self.config != "":
etPath += " -config " + unicode(self.config)
et = exiftool.ExifTool(etPath)
filters = QDir.Files | QDir.NoSymLinks | QDir.NoDotAndDotDot
nameFilter = ["*.jpg", "*.jpeg", "*.JPG", "*.JPEG"]
count = 0
with et:
for root, dirs, files in os.walk(unicode(self.photosDir)):
fileCount = len(QDir(root).entryList(nameFilter, filters))
if fileCount > 0:
self.rangeChanged.emit([self.tr("Import: %p%"), fileCount])
for f in files:
if os.path.splitext(f)[1] not in self.wildcards:
continue
fName = os.path.normpath(os.path.join(root, f))
md = et.get_tags(self.tagsList, unicode(fName))
# create new feature
ft = QgsFeature()
ft.setFields(fields)
for k, v in md.iteritems():
tagName = k.replace("EXIF:", "")
if tagName in self.tagFieldMap.keys():
ft[self.tagFieldMap[tagName]] = unicode(v)
# hardcoded fields
if "filepath" in self.fieldNames:
ft["filepath"] = os.path.join(root, f)
if "filename" in self.fieldNames:
ft["filename"] = f
# get geometry
if not set(md.keys()).issuperset(self.geotags):
pass
else:
lat = float(md["EXIF:GPSLatitude"])
lon = float(md["EXIF:GPSLongitude"])
if md["EXIF:GPSLongitudeRef"] == "W":
lon = 0 - lon
if md["EXIF:GPSLatitudeRef"] == "S":
lat = 0 - lat
ft.setGeometry(QgsGeometry.fromPoint(QgsPoint(lon, lat)))
count += 1
if ft.geometry() is not None:
provider.addFeatures([ft])
self.updateProgress.emit()
self.mutex.lock()
s = self.stopMe
self.mutex.unlock()
if s == 1:
self.interrupted = True
break
if not self.recurseDir:
break
haveShape = True
if count == 0:
if not self.appendFile:
QgsVectorFileWriter.deleteShapeFile(self.outFileName)
haveShape = False
if not self.interrupted:
self.processFinished.emit(haveShape)
else:
self.processInterrupted.emit()
def createNewLayer(self):
self.tagFieldMap = dict()
fields = QgsFields()
self.fieldNames = []
# hardcoded fields
fields.append(QgsField("filepath", QVariant.String, "", 255))
self.fieldNames.append("filepath")
fields.append(QgsField("filename", QVariant.String, "", 255))
self.fieldNames.append("filename")
for tag in self.tagsList:
tagName = tag.replace("EXIF:", "")
fName = utils.createUniqueFieldName(tagName, self.fieldNames)
fields.append(QgsField(fName, QVariant.String, "", 80))
self.fieldNames.append(fName)
self.tagFieldMap[tagName] = fName
crs = QgsCoordinateReferenceSystem(4326)
shapeWriter = QgsVectorFileWriter(self.outFileName, self.outEncoding, fields, QGis.WKBPoint, crs)
del shapeWriter
layer = QgsVectorLayer(self.outFileName, QFileInfo(self.outFileName).baseName(), "ogr")
return layer
def openExistingLayer(self):
layer = QgsVectorLayer(self.outFileName, QFileInfo(self.outFileName).baseName(), "ogr")
fMap = layer.dataProvider().fieldNameMap()
fNames = fMap.keys()
self.fieldNames = ["filepath", "filename"]
self.tagFieldMap = dict()
for tag in self.tagsList:
tagName = tag.replace("EXIF:", "")
fName = utils.createUniqueFieldName(tagName, self.fieldNames)
if fName in fNames:
self.tagFieldMap[tagName] = fName
self.fieldNames.append(fName)
return layer
def stop(self):
self.mutex.lock()
self.stopMe = 1
self.mutex.unlock()
QThread.wait(self)
|
import wx, sys, os, logging
import wx.lib.newevent
log = logging.getLogger( 'squaremap' )
SquareHighlightEvent, EVT_SQUARE_HIGHLIGHTED = wx.lib.newevent.NewEvent()
SquareSelectionEvent, EVT_SQUARE_SELECTED = wx.lib.newevent.NewEvent()
SquareActivationEvent, EVT_SQUARE_ACTIVATED = wx.lib.newevent.NewEvent()
class HotMapNavigator(object):
''' Utility class for navigating the hot map and finding nodes. '''
@classmethod
def findNode(class_, hot_map, targetNode, parentNode=None):
''' Find the target node in the hot_map. '''
for index, (rect, node, children) in enumerate(hot_map):
if node == targetNode:
return parentNode, hot_map, index
result = class_.findNode(children, targetNode, node)
if result:
return result
return None
@classmethod
def findNodeAtPosition(class_, hot_map, position, parent=None):
''' Retrieve the node at the given position. '''
for rect, node, children in hot_map:
if rect.Contains(position):
return class_.findNodeAtPosition(children, position, node)
return parent
@staticmethod
def firstChild(hot_map, index):
''' Return the first child of the node indicated by index. '''
children = hot_map[index][2]
if children:
return children[0][1]
else:
return hot_map[index][1] # No children, return the node itself
@staticmethod
def nextChild(hotmap, index):
''' Return the next sibling of the node indicated by index. '''
nextChildIndex = min(index + 1, len(hotmap) - 1)
return hotmap[nextChildIndex][1]
@staticmethod
def previousChild(hotmap, index):
''' Return the previous sibling of the node indicated by index. '''
previousChildIndex = max(0, index - 1)
return hotmap[previousChildIndex][1]
@staticmethod
def firstNode(hot_map):
''' Return the very first node in the hot_map. '''
return hot_map[0][1]
@classmethod
def lastNode(class_, hot_map):
''' Return the very last node (recursively) in the hot map. '''
children = hot_map[-1][2]
if children:
return class_.lastNode(children)
else:
return hot_map[-1][1] # Return the last node
class SquareMap( wx.Panel ):
"""Construct a nested-box trees structure view"""
BackgroundColor = wx.Color( 128,128,128 )
max_depth = None
max_depth_seen = None
def __init__(
self, parent=None, id=-1, pos=wx.DefaultPosition,
size=wx.DefaultSize,
style=wx.TAB_TRAVERSAL|wx.NO_BORDER|wx.FULL_REPAINT_ON_RESIZE,
name='SquareMap', model = None,
adapter = None,
labels = True, # set to True to draw textual labels within the boxes
highlight = True, # set to False to turn of highlighting
padding = 2, # amount to reduce the children's box from the parent's box
):
super( SquareMap, self ).__init__(
parent, id, pos, size, style, name
)
self.model = model
self.padding = padding
self.labels = labels
self.highlight = highlight
self.selectedNode = None
self.highlightedNode = None
self.Bind( wx.EVT_PAINT, self.OnPaint)
self.Bind( wx.EVT_SIZE, self.OnSize )
if highlight:
self.Bind( wx.EVT_MOTION, self.OnMouse )
self.Bind( wx.EVT_LEFT_UP, self.OnClickRelease )
self.Bind( wx.EVT_LEFT_DCLICK, self.OnDoubleClick )
self.Bind( wx.EVT_KEY_UP, self.OnKeyUp )
self.hot_map = []
self.adapter = adapter or DefaultAdapter()
self.DEFAULT_PEN = wx.Pen( wx.BLACK, 1, wx.SOLID )
self.SELECTED_PEN = wx.Pen( wx.WHITE, 2, wx.SOLID )
self.OnSize(None)
def OnMouse( self, event ):
"""Handle mouse-move event by selecting a given element"""
node = HotMapNavigator.findNodeAtPosition(self.hot_map, event.GetPosition())
self.SetHighlight( node, event.GetPosition() )
def OnClickRelease( self, event ):
"""Release over a given square in the map"""
node = HotMapNavigator.findNodeAtPosition(self.hot_map, event.GetPosition())
self.SetSelected( node, event.GetPosition() )
def OnDoubleClick(self, event):
"""Double click on a given square in the map"""
node = HotMapNavigator.findNodeAtPosition(self.hot_map, event.GetPosition())
if node:
wx.PostEvent( self, SquareActivationEvent( node=node, point=event.GetPosition(), map=self ) )
def OnKeyUp(self, event):
event.Skip()
if not self.selectedNode or not self.hot_map:
return
if event.KeyCode == wx.WXK_HOME:
self.SetSelected(HotMapNavigator.firstNode(self.hot_map))
return
elif event.KeyCode == wx.WXK_END:
self.SetSelected(HotMapNavigator.lastNode(self.hot_map))
return
parent, children, index = HotMapNavigator.findNode(self.hot_map, self.selectedNode)
if event.KeyCode == wx.WXK_DOWN:
self.SetSelected(HotMapNavigator.nextChild(children, index))
elif event.KeyCode == wx.WXK_UP:
self.SetSelected(HotMapNavigator.previousChild(children, index))
elif event.KeyCode == wx.WXK_RIGHT:
self.SetSelected(HotMapNavigator.firstChild(children, index))
elif event.KeyCode == wx.WXK_LEFT and parent:
self.SetSelected(parent)
elif event.KeyCode == wx.WXK_RETURN:
wx.PostEvent(self, SquareActivationEvent(node=self.selectedNode,
map=self))
def GetSelected(self):
return self.selectedNode
def SetSelected( self, node, point=None, propagate=True ):
"""Set the given node selected in the square-map"""
if node == self.selectedNode:
return
self.selectedNode = node
self.Refresh()
if node:
wx.PostEvent( self, SquareSelectionEvent( node=node, point=point, map=self ) )
def SetHighlight( self, node, point=None, propagate=True ):
"""Set the currently-highlighted node"""
if node == self.highlightedNode:
return
self.highlightedNode = node
self.Refresh()
if node and propagate:
wx.PostEvent( self, SquareHighlightEvent( node=node, point=point, map=self ) )
def SetModel( self, model, adapter=None ):
"""Set our model object (root of the tree)"""
self.model = model
if adapter is not None:
self.adapter = adapter
self.Refresh()
def Refresh(self):
self.UpdateDrawing()
def OnPaint(self, event):
dc = wx.BufferedPaintDC(self, self._buffer)
def OnSize(self, event):
# The buffer is initialized in here, so that the buffer is always
# the same size as the Window.
width, height = self.GetClientSizeTuple()
# Make new off-screen bitmap: this bitmap will always have the
# current drawing in it, so it can be used to save the image to
# a file, or whatever.
if width and height:
# Macs can generate events with 0-size values
self._buffer = wx.EmptyBitmap(width, height)
self.UpdateDrawing()
def UpdateDrawing(self):
dc = wx.BufferedDC(wx.ClientDC(self), self._buffer)
self.Draw(dc)
def Draw(self, dc):
''' Draw the tree map on the device context. '''
self.hot_map = []
dc.BeginDrawing()
brush = wx.Brush( self.BackgroundColor )
dc.SetBackground( brush )
dc.Clear()
if self.model:
self.max_depth_seen = 0
dc.SetFont(self.FontForLabels(dc))
w, h = dc.GetSize()
self.DrawBox( dc, self.model, 0,0,w,h, hot_map = self.hot_map )
dc.EndDrawing()
def FontForLabels(self, dc):
''' Return the default GUI font, scaled for printing if necessary. '''
font = wx.SystemSettings_GetFont(wx.SYS_DEFAULT_GUI_FONT)
scale = dc.GetPPI()[0] / wx.ScreenDC().GetPPI()[0]
font.SetPointSize(scale*font.GetPointSize())
return font
def BrushForNode( self, node, depth=0 ):
"""Create brush to use to display the given node"""
if node == self.selectedNode:
color = wx.SystemSettings_GetColour(wx.SYS_COLOUR_HIGHLIGHT)
elif node == self.highlightedNode:
color = wx.Color( red=0, green=255, blue=0 )
else:
color = self.adapter.background_color(node, depth)
if not color:
red = (depth * 10)%255
green = 255-((depth * 5)%255)
blue = (depth * 25)%255
color = wx.Color( red, green, blue )
return wx.Brush( color )
def PenForNode( self, node, depth=0 ):
"""Determine the pen to use to display the given node"""
if node == self.selectedNode:
return self.SELECTED_PEN
return self.DEFAULT_PEN
def TextForegroundForNode(self, node, depth=0):
"""Determine the text foreground color to use to display the label of
the given node"""
if node == self.selectedNode:
fg_color = wx.SystemSettings_GetColour(wx.SYS_COLOUR_HIGHLIGHTTEXT)
else:
fg_color = self.adapter.foreground_color(node, depth)
if not fg_color:
fg_color = wx.SystemSettings_GetColour(wx.SYS_COLOUR_WINDOWTEXT)
return fg_color
def DrawBox( self, dc, node, x,y,w,h, hot_map, depth=0 ):
"""Draw a model-node's box and all children nodes"""
log.debug( 'Draw: %s to (%s,%s,%s,%s) depth %s',
node, x,y,w,h, depth,
)
if self.max_depth and depth > self.max_depth:
return
self.max_depth_seen = max( (self.max_depth_seen,depth))
dc.SetBrush( self.BrushForNode( node, depth ) )
dc.SetPen( self.PenForNode( node, depth ) )
if sys.platform == 'darwin':
# Macs don't like drawing small rounded rects...
if w < self.padding*2 or h < self.padding*2:
dc.DrawRectangle( x,y,w,h)
else:
dc.DrawRoundedRectangle( x,y,w,h, self.padding )
else:
dc.DrawRoundedRectangle( x,y,w,h, self.padding*3 )
children_hot_map = []
hot_map.append( (wx.Rect( int(x),int(y),int(w),int(h)), node, children_hot_map ) )
x += self.padding
y += self.padding
w -= self.padding*2
h -= self.padding*2
empty = self.adapter.empty( node )
icon_drawn = False
if self.max_depth and depth == self.max_depth:
self.DrawIconAndLabel(dc, node, x, y, w, h, depth)
icon_drawn = True
elif empty:
# is a fraction of the space which is empty...
log.debug( ' empty space fraction: %s', empty )
new_h = h * (1.0-empty)
self.DrawIconAndLabel(dc, node, x, y, w, h-new_h, depth)
icon_drawn = True
y += (h-new_h)
h = new_h
if w >self.padding*2 and h> self.padding*2:
children = self.adapter.children( node )
if children:
log.debug( ' children: %s', children )
self.LayoutChildren( dc, children, node, x,y,w,h, children_hot_map, depth+1 )
else:
log.debug( ' no children' )
if not icon_drawn:
self.DrawIconAndLabel(dc, node, x, y, w, h, depth)
else:
log.debug( ' not enough space: children skipped' )
def DrawIconAndLabel(self, dc, node, x, y, w, h, depth):
''' Draw the icon, if any, and the label, if any, of the node. '''
dc.SetClippingRegion(x+1, y+1, w-2, h-2) # Don't draw outside the box
icon = self.adapter.icon(node, node==self.selectedNode)
if icon and h >= icon.GetHeight() and w >= icon.GetWidth():
iconWidth = icon.GetWidth() + 2
dc.DrawIcon(icon, x+2, y+2)
else:
iconWidth = 0
if self.labels and h >= dc.GetTextExtent('ABC')[1]:
dc.SetTextForeground(self.TextForegroundForNode(node, depth))
dc.DrawText(self.adapter.label(node), x + iconWidth + 2, y+2)
dc.DestroyClippingRegion()
def LayoutChildren( self, dc, children, parent, x,y,w,h, hot_map, depth=0 ):
"""Layout the set of children in the given rectangle"""
nodes = [ (self.adapter.value(node,parent),node) for node in children ]
nodes.sort()
total = self.adapter.children_sum( children,parent )
if total:
(firstSize,firstNode) = nodes[-1]
rest = [node for (size,node) in nodes[:-1]]
fraction = firstSize/float(total)
if w >= h:
new_w = int(w*fraction)
if new_w:
self.DrawBox( dc, firstNode, x,y, new_w, h, hot_map, depth+1 )
else:
return # no other node will show up as non-0 either
w = w-new_w
x += new_w
else:
new_h = int(h*fraction)
if new_h:
self.DrawBox( dc, firstNode, x,y, w, new_h, hot_map, depth + 1 )
else:
return # no other node will show up as non-0 either
h = h-new_h
y += new_h
if rest and (h > self.padding*2) and (w > self.padding*2):
self.LayoutChildren( dc, rest, parent, x,y,w,h, hot_map, depth )
class DefaultAdapter( object ):
"""Default adapter class for adapting node-trees to SquareMap API"""
def children( self, node ):
"""Retrieve the set of nodes which are children of this node"""
return node.children
def value( self, node, parent=None ):
"""Return value used to compare size of this node"""
return node.size
def label( self, node ):
"""Return textual description of this node"""
return node.path
def overall( self, node ):
"""Calculate overall size of the node including children and empty space"""
return sum( [self.value(value,node) for value in self.children(node)] )
def children_sum( self, children,node ):
"""Calculate children's total sum"""
return sum( [self.value(value,node) for value in children] )
def empty( self, node ):
"""Calculate empty space as a fraction of total space"""
overall = self.overall( node )
if overall:
return (overall - self.children_sum( self.children(node), node))/float(overall)
return 0
def background_color(self, node, depth):
''' The color to use as background color of the node. '''
return None
def foreground_color(self, node, depth):
''' The color to use for the label. '''
return None
def icon(self, node, isSelected):
''' The icon to display in the node. '''
return None
def parents( self, node ):
"""Retrieve/calculate the set of parents for the given node"""
return []
class TestApp(wx.App):
"""Basic application for holding the viewing Frame"""
def OnInit(self):
"""Initialise the application"""
wx.InitAllImageHandlers()
self.frame = frame = wx.Frame( None,
)
frame.CreateStatusBar()
model = model = self.get_model( sys.argv[1])
self.sq = SquareMap( frame, model=model)
EVT_SQUARE_HIGHLIGHTED( self.sq, self.OnSquareSelected )
frame.Show(True)
self.SetTopWindow(frame)
return True
def get_model( self, path ):
nodes = []
for f in os.listdir( path ):
full = os.path.join( path,f )
if not os.path.islink( full ):
if os.path.isfile( full ):
nodes.append( Node( full, os.stat( full ).st_size, () ) )
elif os.path.isdir( full ):
nodes.append( self.get_model( full ))
return Node( path, sum([x.size for x in nodes]), nodes )
def OnSquareSelected( self, event ):
text = self.sq.adapter.label( event.node )
self.frame.SetToolTipString( text )
class Node( object ):
"""Really dumb file-system node object"""
def __init__( self, path, size, children ):
self.path = path
self.size = size
self.children = children
def __repr__( self ):
return '%s( %r, %r, %r )'%( self.__class__.__name__, self.path, self.size, self.children )
usage = 'squaremap.py somedirectory'
def main():
"""Mainloop for the application"""
if not sys.argv[1:]:
print usage
else:
app = TestApp(0)
app.MainLoop()
if __name__ == "__main__":
main()
|
from distutils.core import setup, Extension
from setup import *
CdiLib_module = Extension('_CdiLib',
sources=['cdilib_wrap.c'],
extra_compile_args = INCFLAGS,
library_dirs = LDFLAGS,
extra_objects = ['../../src/cdilib.o'],
extra_link_args = LIBS,
)
setup (name = 'CdiLib',
version = '0.1',
author = "Ralf Mueller",
description = """pyhton bindings to CDI function library""",
ext_modules = [CdiLib_module],
py_modules = ["CdiLib"],
)
|
from django.db import models
from django.db.models import Q
from django.conf import settings
from django.contrib.auth.models import AbstractUser
from tournaments.models import Tournament, Team, Fixture, Match
class Player(AbstractUser):
friends = models.ManyToManyField('self', through = 'PlayerFriend', symmetrical = False)
class Meta:
verbose_name = "Jugador"
verbose_name_plural = "Jugadores"
def __unicode__(self):
return self.username
# Think other way to do this
# < --
def get_all_friends(self):
players = [pf.player for pf in self.friend.filter()]
friends = [pf.friend for pf in self.friend_player.filter()]
return players + friends
def get_true_friends(self):
players = [pf.player for pf in self.friend.filter(status = True)]
friends = [pf.friend for pf in self.friend_player.filter(status = True)]
return players + friends
def get_ignored_friends(self):
# Friends that ask you to be your friend but you didn't answer the Friends Petition
friends = [pf.player for pf in self.friend.filter(status = None)]
return friends
def get_friends_that_ignored_us(self):
friends = [pf.friend for pf in self.friend_player.filter(status = None)]
return friends
def get_bad_friends(self):
friends = [pf.friend for pf in self.friend_player.filter(status = False)]
return friends
def get_friends_we_rejected(self):
friends = [pf.player for pf in self.friend.filter(status = False)]
return friends
# -- >
def is_friend(self, player):
return player in self.get_true_friends()
def get_all_gameplayers(self):
return self.gameplayer_set.filter(status = True)
def get_all_games_points(self, gameplayers):
return sum([x.get_total_points() for x in gameplayers])
class PlayerFriend(models.Model):
player = models.ForeignKey(settings.AUTH_USER_MODEL, related_name = 'friend_player')
friend = models.ForeignKey(settings.AUTH_USER_MODEL, related_name = 'friend')
status = models.NullBooleanField()
created_at = models.DateTimeField(auto_now_add = True)
updated_at = models.DateTimeField(auto_now = True)
def __unicode__(self):
return '{0} - {1}'.format(self.player, self.friend)
class Game(models.Model):
owner = models.ForeignKey(settings.AUTH_USER_MODEL, related_name = 'owner_games')
players = models.ManyToManyField(settings.AUTH_USER_MODEL, related_name = 'games',through = 'GamePlayer')
name = models.CharField(max_length = 100)
tournament = models.ForeignKey(Tournament)
classic = models.BooleanField(default = True, verbose_name = "Modo Clasico")
open_predictions = models.BooleanField(default = True, verbose_name = "Pronosticos Abiertos")
points_exact = models.PositiveIntegerField(default = 3)
points_general = models.PositiveIntegerField(default = 3)
points_classic = models.PositiveIntegerField(default = 2)
points_double = models.PositiveIntegerField(default = 2)
created_at = models.DateTimeField(auto_now_add = True)
updated_at = models.DateTimeField(auto_now = True)
def __unicode__(self):
return self.name
class Meta:
verbose_name = "Juego"
class GamePlayer(models.Model):
player = models.ForeignKey(settings.AUTH_USER_MODEL)
game = models.ForeignKey(Game)
status = models.NullBooleanField()
another_chance = models.NullBooleanField()
initial_points = models.PositiveIntegerField(default = 0)
created_at = models.DateTimeField(auto_now_add = True)
updated_at = models.DateTimeField(auto_now = True)
def reset(self):
self.status = None
self.another_chance = None
def is_invited(self):
""" Was invited to play """
return self.status == None and self.another_chance == None
def is_answered_request(self):
""" Answered the request to play """
return (not self.status == None) and self.another_chance == None
def is_another_chance(self):
""" Asks for antother invitation """
return self.another_chance and self.status == False
def get_total_points(self):
return FixturePlayerPoints.get_player_points(self) + self.initial_points
def get_fixture_points(self, fixture):
points = self.initial_points
classic = False
if fixture.is_finished:
predictions = self.match_predictions.filter(match__fixture = fixture, match__is_finished = True)
for player_prediction in predictions:
prediction_points = player_prediction.get_points()
if prediction_points > 0 and player_prediction.match.is_classic:
classic = True
points = prediction_points + points
return (points , classic)
def get_fixture_predictions(self, fixture):
match_ids = [x.pk for x in fixture.matches.all()]
predictions = self.match_predictions.filter(match_id__in = match_ids)
return predictions
def __unicode__(self):
return '{0} | {1} | {2}'.format(self.player, self.status, self.another_chance)
class PlayerMatchPrediction(models.Model):
gameplayer = models.ForeignKey(GamePlayer, related_name = 'match_predictions')
match = models.ForeignKey(Match)
local_team_goals = models.PositiveIntegerField()
visitor_team_goals = models.PositiveIntegerField()
is_double = models.BooleanField(verbose_name = "Doble", default = False)
created_at = models.DateTimeField(auto_now_add = True)
updated_at = models.DateTimeField(auto_now = True)
def is_general_prediction(self):
if not self.match.is_finished:
return False
prediction_local_team_had_won = self.__class__.has_local_team_won(self.local_team_goals, self.visitor_team_goals)
match_local_team_had_won = self.__class__.has_local_team_won(self.match.local_team_goals, self.match.visitor_team_goals)
return prediction_local_team_had_won == match_local_team_had_won
def is_exact_prediction(self):
if not self.match.is_finished:
return False
return self.match.local_team_goals == self.local_team_goals and \
self.match.visitor_team_goals == self.visitor_team_goals
def get_points(self):
if not self.match.is_finished:
return None
points = 0
# General Prediction: Who won or if they draw
if self.is_general_prediction():
points += self.gameplayer.game.points_general
# Exact Prediction: The exact score of the match
if (not self.gameplayer.game.classic) and self.is_exact_prediction():
points += self.gameplayer.game.points_exact
# If the match is classic
# Only sums if the player predicted correctly a General or Exact Prediction
if self.match.is_classic and points > 0:
points += self.gameplayer.game.points_classic
# If the match is double
# Only multiplies if the player predicted correctly a General or Exact Prediction
if self.is_double and points > 0:
points *= self.gameplayer.game.points_double
return points
@classmethod
def has_local_team_won(cls, local_team_goals, visitor_team_goals):
# None = draw, True = won, False = lose
return None if visitor_team_goals == local_team_goals else (visitor_team_goals < local_team_goals)
class FixturePlayerPoints(models.Model):
fixture = models.ForeignKey(Fixture)
gameplayer = models.ForeignKey(GamePlayer)
points = models.IntegerField(verbose_name = 'Puntos')
classic_prediction = models.BooleanField(default = False)
def __unicode__(self):
return "{0} - {1} = {2} puntos".format(self.gameplayer.player, self.fixture, self.points)
@classmethod
def get_player_points(cls, gameplayer):
fixtures_points = [x.points for x in cls.objects.filter(gameplayer = gameplayer)]
return sum(fixtures_points)
class Meta:
verbose_name = "Fecha punto"
|
streaming_subreddits = [
{'name': 'Soccer Streams', 'url': 'soccerstreams'},
{'name': 'MMA Streams', 'url': 'MMAStreams'},
{'name': 'NFL Streams', 'url': 'NFLStreams'},
{'name': 'NBA Streams', 'url': 'nbastreams'},
{'name': 'NCAA BBall Streams', 'url': 'ncaaBBallStreams'},
{'name': 'CFB Streams', 'url': 'CFBStreams'},
{'name': 'NHL Streams', 'url': 'NHLStreams'},
{'name': 'Puck Streams', 'url': 'puckstreams'},
{'name': 'MLB Streams', 'url': 'MLBStreams'},
{'name': 'Tennis Streams', 'url': 'TennisStreams'},
{'name': 'Boxing Streams', 'url': 'BoxingStreams'},
{'name': 'Rugby Streams', 'url': 'RugbyStreams'},
{'name': 'Motor Sports Streams', 'url': 'motorsportsstreams'},
{'name': 'WWE Streams', 'url': 'WWEstreams'},
]
|
from phantom_team.strategy.formation import positions
from smsoccer.strategy import formation
from smsoccer.strategy.formation import player_position
from smsoccer.world.world_model import WorldModel, PlayModes
from smsoccer.players.abstractgoalie import AbstractGoalie
from smsoccer.util.fielddisplay import FieldDisplay
from smsoccer.util.geometric import euclidean_distance
from superman import SuperMan
from shapely.geometry import *
class GoalieAgent(AbstractGoalie, SuperMan):
"""
Goalie Agent for Robocup Soccer Team
"""
def __init__(self, visualization = False):
AbstractGoalie.__init__(self)
SuperMan.__init__(self)
self.visualization = visualization
if visualization:
self.display = FieldDisplay()
self._my_goal_position = None
def position_rules(self):
if self._my_goal_position != None and self.wm.ball is not None:
# euclidean_distance
# Base Circle
root_point = Point( self._my_goal_position )
circle = root_point.buffer(13)
circle_points = list(circle.exterior.coords)
# Base Line
ball_coords = self.wm.get_object_absolute_coords(self.wm.ball)
line = LineString([ self._my_goal_position, ball_coords ])
line_points = list( line.coords )
# Destination Point
inter = circle.intersection( line )
point = list( inter.coords )[1]
return { "circle_points": circle_points, "line_points": line_points, "destination_point": point }
return False
def think(self):
"""
Think method
"""
self.update_super()
rules = self.position_rules()
# Draw Debug map
if self.visualization:
if self.wm.abs_coords is None:
return
self.display.clear()
self.display.draw_robot(self.wm.abs_coords, self.wm.abs_body_dir)
if self.wm.ball is not None:
self.display.draw_circle(self.wm.get_object_absolute_coords(self.wm.ball), 4)
if rules != False:
self.display.draw_points( rules["circle_points"] );
self.display.draw_line( rules["line_points"][0], rules["line_points"][1] )
self.display.show()
# END Draw Debug Map
# DEBUG
# print self.wm.abs_body_dir
# END DEBUG
if not self.in_kick_off_formation:
self.teleport_to_point(positions[1])
# Player is ready in formation
self.in_kick_off_formation = True
else:
# Act
if self.wm.play_mode != PlayModes.BEFORE_KICK_OFF:
if rules == False:
# self.turn_body_to_point( self._my_goal_position )
# self.wm.ah.turn(10)
if self.dash_to_point( self._my_goal_position, radio = 1 ) == True:
# turn to field
print "reach !"
return
self.dash_to_point( rules["destination_point"], radio = 1 );
|
import logging
from .HTMLElement import HTMLElement
from .attr_property import attr_property
from .bool_property import bool_property
from .text_property import text_property
log = logging.getLogger("Thug")
class HTMLScriptElement(HTMLElement):
_async = bool_property("async", readonly = True, novalue = True)
text = text_property()
htmlFor = None
event = None
charset = attr_property("charset", default = "")
defer = bool_property("defer", readonly = True, novalue = True)
_src = attr_property("src", default = "")
type = attr_property("type")
def __init__(self, doc, tag):
HTMLElement.__init__(self, doc, tag)
def __getattr__(self, name):
if name in ("async", ):
return self._async
raise AttributeError
def get_src(self):
return self._src
def set_src(self, src):
self._src = src
log.DFT.handle_script(self.tag)
src = property(get_src, set_src)
|
from django.core.management.base import BaseCommand, CommandError
from projects.models import *
from time import sleep
import requests
class Command(BaseCommand):
def handle(self, *args, **options):
OC_URL = "http://opencoesione.gov.it/api/nature.json?page=1"
"""
{
"count": 6,
"next": null,
"previous": null,
"results": [
{
"filters": {
"progetti": "http://opencoesione.gov.it/api/progetti?natura=conferimenti-capitale",
"soggetti": "http://opencoesione.gov.it/api/soggetti?natura=conferimenti-capitale"
},
"codice": "08",
"descrizione": "ACQUISTO DI PARTECIPAZIONI AZIONARIE E CONFERIMENTI DI CAPITALE",
"short_label": " Conferimenti capitale",
"slug": "conferimenti-capitale"
}
]
}
"""
next_url = OC_URL
while next_url is not None :
print ">>", next_url, type(next_url)
try:
jj = requests.get(next_url).json()
if "next" in jj:
next_url = jj.get('next')
print ">>>", next_url
for result in jj.get('results'):
print result
try:
l = Tags()
l.slug = result.get('slug')
l.name = result.get('short_label')
l.description = result.get('descrizione')
l.tagtype = "natura"
l.save()
except:
pass
else:
sleep(5)
except:
pass
OC_URL = "http://opencoesione.gov.it/api/temi.json?page=1"
"""
{
"count": 13,
"next": null,
"previous": null,
"results": [
{
"filters": {
"progetti": "http://opencoesione.gov.it/api/progetti?tema=agenda-digitale",
"soggetti": "http://opencoesione.gov.it/api/soggetti?tema=agenda-digitale"
},
"codice": "7",
"descrizione": "Agenda digitale",
"short_label": "Agenda digitale",
"slug": "agenda-digitale"
}
]
}
"""
next_url = OC_URL
while next_url is not None:
print ">>", next_url, type(next_url)
jj = requests.get(next_url).json()
if "next" in jj:
next_url = jj.get('next')
print ">>>", next_url
for result in jj.get('results'):
print result
l = Tag()
l.slug = result.get('slug')
l.name = result.get('short_label')
l.description = result.get('descrizione')
l.tagtype = "tema"
l.save()
else:
sleep(5)
|
from setuptools import setup
import shutil
import os
from . import version
PACKAGE = 'periscope-gnome'
VERSION = version.VERSION
try:
os.makedirs("./debian/periscope-gnome/usr/share/nautilus-python/extensions")
except:
pass
shutil.copy('periscope-nautilus/periscope-nautilus.py', 'debian/periscope-gnome/usr/share/nautilus-python/extensions')
|
fname = input("Enter file name: ")
if len(fname) < 1 : fname = "mbox-short.txt"
lst = list()
fh = open(fname)
count = 0
for line in fh:
if not line.startswith("From"): continue
if line.startswith('From:'): continue
line = line.split()
words = line[1]
lst.append(words)
count = count + 1
for elements in lst:
print (elements)
print ("There were" , count , "lines in the file with From as the first word")
|
""" This file contains unit tests for the SubtypeGraph module """
from unittest import TestCase
import os
from lib.NormaLoader import NormaLoader
from lib.SubtypeGraph import SubtypeGraph
import lib.TestDataLocator as TestData
class TestSubtypeGraph(TestCase):
""" Unit tests for the SubtypeGraph module. """
def setUp(self):
self.basic_model = NormaLoader(TestData.path("subtypes.orm")).model
def test_compatible_object_types(self):
""" Test compatible() function. """
model = self.basic_model
graph = SubtypeGraph(model)
w = model.object_types.get("W")
x = model.object_types.get("X")
y = model.object_types.get("Y")
z = model.object_types.get("Z")
x1 = model.object_types.get("X1")
y1 = model.object_types.get("Y1")
z1 = model.object_types.get("Z1")
e1 = model.object_types.get("E1")
self.assertTrue(graph.compatible(z, z))
self.assertTrue(graph.compatible(z, x))
self.assertTrue(graph.compatible(x, z))
self.assertTrue(graph.compatible(z, w))
self.assertTrue(graph.compatible(w, z))
self.assertTrue(graph.compatible(z, z1))
self.assertTrue(graph.compatible(z1, z))
self.assertFalse(graph.compatible(z, x1))
self.assertFalse(graph.compatible(x1, z))
self.assertFalse(graph.compatible(z, e1))
self.assertFalse(graph.compatible(e1, z))
self.assertFalse(graph.compatible(y1, x))
self.assertFalse(graph.compatible(x, y1))
def test_subtype_graph_mult_root(self):
""" Test subtype graph with multiple root types. """
with self.assertRaises(ValueError) as ex:
fname = TestData.path("subtype_graph_with_multiple_roots.orm")
model = NormaLoader(fname).model
graph = SubtypeGraph(model)
self.assertEquals(ex.exception.message, \
"Subtype graph containing ObjectTypes.F has more than one root type")
def test_load_basic_subtypes(self):
""" Test that basic subtype graphs are properly loaded. """
model = self.basic_model
graph = SubtypeGraph(model)
# Primitive value and entity types that belong to no subtype graph
v1 = model.object_types.get("V1")
self.assertTrue(v1.primitive)
self.assertEquals(graph.root_of[v1], v1)
self.assertItemsEqual(v1.direct_supertypes, [])
self.assertItemsEqual(graph.supertypes_of[v1], [])
self.assertItemsEqual(v1.direct_subtypes, [])
e1 = model.object_types.get("E1")
self.assertTrue(e1.primitive)
self.assertEquals(graph.root_of[e1], e1)
self.assertItemsEqual(e1.direct_supertypes, [])
self.assertItemsEqual(graph.supertypes_of[e1], [])
self.assertItemsEqual(e1.direct_subtypes, [])
# Simple linear subtype graph
a = model.object_types.get("A")
b = model.object_types.get("B")
c = model.object_types.get("C")
self.assertTrue(a.primitive)
self.assertEquals(graph.root_of[a], a)
self.assertItemsEqual(a.direct_supertypes, [])
self.assertItemsEqual(graph.supertypes_of[a], [])
self.assertItemsEqual(a.direct_subtypes, [b])
self.assertFalse(b.primitive)
self.assertEquals(graph.root_of[b], a)
self.assertItemsEqual(b.direct_supertypes, [a])
self.assertItemsEqual(graph.supertypes_of[b], [a])
self.assertItemsEqual(b.direct_subtypes, [c])
self.assertFalse(c.primitive)
self.assertEquals(graph.root_of[c], a)
self.assertItemsEqual(c.direct_supertypes, [b])
self.assertItemsEqual(graph.supertypes_of[c], [a,b])
self.assertItemsEqual(c.direct_subtypes, [])
def test_diamond_subtype_graphs(self):
""" Test that diamond-shaped graph is properly loaded. """
model = self.basic_model
graph = SubtypeGraph(model)
# Diamond-shaped subtype graph
w = model.object_types.get("W")
x = model.object_types.get("X")
y = model.object_types.get("Y")
z = model.object_types.get("Z")
x1 = model.object_types.get("X1")
y1 = model.object_types.get("Y1")
z1 = model.object_types.get("Z1")
self.assertTrue(w.primitive)
self.assertEquals(graph.root_of[w], w)
self.assertItemsEqual(w.direct_supertypes, [])
self.assertItemsEqual(graph.supertypes_of[w], [])
self.assertItemsEqual(w.direct_subtypes, [x, y])
self.assertFalse(x.primitive)
self.assertEquals(graph.root_of[x], w)
self.assertItemsEqual(x.direct_supertypes, [w])
self.assertItemsEqual(graph.supertypes_of[x], [w])
self.assertItemsEqual(x.direct_subtypes, [x1, z])
self.assertFalse(x1.primitive)
self.assertEquals(graph.root_of[x1], w)
self.assertItemsEqual(x1.direct_supertypes, [x])
self.assertItemsEqual(graph.supertypes_of[x1], [x, w])
self.assertItemsEqual(x1.direct_subtypes, [])
self.assertFalse(z.primitive)
self.assertEquals(graph.root_of[z], w)
self.assertItemsEqual(z.direct_supertypes, [x, y])
self.assertItemsEqual(graph.supertypes_of[z], [w, x, y])
self.assertItemsEqual(z.direct_subtypes, [z1])
self.assertFalse(y1.primitive)
self.assertEquals(graph.root_of[y1], w)
self.assertItemsEqual(y1.direct_supertypes, [y])
self.assertItemsEqual(graph.supertypes_of[y1], [w, y])
self.assertItemsEqual(y1.direct_subtypes, [z1])
self.assertFalse(z1.primitive)
self.assertEquals(graph.root_of[z1], w)
self.assertItemsEqual(z1.direct_supertypes, [z, y1])
self.assertItemsEqual(graph.supertypes_of[z1], [x, y, w, z, y1])
self.assertItemsEqual(z1.direct_subtypes, [])
|
import os
import os.path
import shutil
import hashlib
import filecmp
import createrepo_c as cr
from .plugins_common import GlobalBundle, Metadata
from .common import LoggingInterface, DEFAULT_CHECKSUM_NAME
from .errors import DeltaRepoPluginError
PLUGINS = []
METADATA_MAPPING = {} # { "wanted_metadata_type": ["required_metadata_from_deltarepo", ...] }
GENERAL_PLUGIN = None
COMPRESSION_SUFFIXES = [".bz2", ".gz", ".lz", ".lzma", ".lzo", ".xz",
".7z", ".s7z", ".apk", ".rar", ".sfx", ".tgz",
".tbz2", ".tlz", ".zip", ".zipx", ".zz"]
class DeltaRepoPlugin(LoggingInterface):
# Plugin name
NAME = ""
# Plugin version (integer number!)
VERSION = 1
# List of Metadata this plugin takes care of.
# The plugin HAS TO do deltas for each of listed metadata and be able
# to apply deltas on them!
METADATA = []
# Says which delta metadata are needed to get required metadata
# e.g. { "primary": ["primary"], "filelists": ["primary", "filelists"] }
METADATA_MAPPING = {}
def __init__(self, pluginbundle, globalbundle, logger=None):
LoggingInterface.__init__(self, logger)
# PluginBundle object.
# This object store data in persistent way to the generated delta repodata.
# This object is empty when gen() plugin method is called and plugin
# should use it to store necessary information.
# During apply() this object should be filled with data
# previously stored during gen() method
self.pluginbundle = pluginbundle
# Global bundle carry
self.globalbundle = globalbundle
# Internal stuff
self.__metadata_notes_cache = None
def _log(self, level, msg):
new_msg = "{0}: {1}".format(self.NAME, msg)
LoggingInterface._log(self, level, new_msg)
def _metadata_notes_from_plugin_bundle(self, type):
"""From the pluginbundle extract info about specific metadata element"""
if self.__metadata_notes_cache is None:
self.__metadata_notes_cache = {}
for dict in self.pluginbundle.get_list("metadata", []):
if "type" not in dict:
self._warning("Metadata element in deltametadata.xml hasn't "
"an attribute 'type'")
continue
self.__metadata_notes_cache[dict["type"]] = dict
return self.__metadata_notes_cache.get(type)
def _metadata_notes_to_plugin_bundle(self, type, dictionary):
"""Store info about metadata persistently to pluginbundle"""
notes = {"type": type}
notes.update(dictionary)
self.pluginbundle.append("metadata", notes)
def gen_use_original(self, md, compression_type=cr.NO_COMPRESSION):
"""Function that takes original metadata file and
copy it to the delta repo unmodified.
Plugins could use this function when they cannot generate delta file
for some reason (eg. file is newly added, so delta is
meaningless/impossible)."""
md.delta_fn = os.path.join(md.out_dir, os.path.basename(md.new_fn))
# Compress or copy original file
stat = None
if (compression_type != cr.NO_COMPRESSION):
md.delta_fn += cr.compression_suffix(compression_type)
stat = cr.ContentStat(md.checksum_type)
cr.compress_file(md.new_fn, md.delta_fn, compression_type, stat)
else:
shutil.copy2(md.new_fn, md.delta_fn)
# Prepare repomd record of xml file
rec = cr.RepomdRecord(md.metadata_type, md.delta_fn)
if stat is not None:
rec.load_contentstat(stat)
rec.fill(md.checksum_type)
if self.globalbundle.unique_md_filenames:
rec.rename_file()
md.delta_fn = rec.location_real
return rec
def apply_use_original(self, md, decompress=False):
"""Reversal function for the gen_use_original"""
md.new_fn = os.path.join(md.out_dir, os.path.basename(md.delta_fn))
if decompress:
md.new_fn = md.new_fn.rsplit('.', 1)[0]
cr.decompress_file(md.delta_fn, md.new_fn, cr.AUTO_DETECT_COMPRESSION)
else:
shutil.copy2(md.delta_fn, md.new_fn)
# Prepare repomd record of xml file
rec = cr.RepomdRecord(md.metadata_type, md.new_fn)
rec.fill(md.checksum_type)
if self.globalbundle.unique_md_filenames:
rec.rename_file()
md.new_fn = rec.location_real
return rec
def _gen_basic_delta(self, md, force_gen=False):
"""Resolve some common situation during delta generation.
There is some situation which could appear during
delta generation:
# - Metadata file has a record in repomd.xml and the file really exists
O - Metadata file has a record in repomd.xml but the file is missing
X - Metadata file doesn't have a record in repomd.xml
Old repository | New repository
---------------|---------------
# | # - Valid case
# | X - Valid case - metadata was removed
# | O - Invalid case - incomplete repo
X | # - Valid case - metadata was added
X | X - This shouldn't happen
X | O - Invalid case - incomplete repo
O | # - Invalid case - incomplete repo
O | X - Invalid case - incomplete repo
O | O - Invalid case - both repos are incomplete
By default, Deltarepo should raise an exception when a invalid
case is meet. But user could use --ignore-missing option and
in that case, the Deltarepo should handle all invalid case
like a charm.
For example:
O | # - Just copy the new metadata to the delta repo as is
O | X - Just ignore that the old metadata is missing
O | O - Just ignore this
# | O - Just ignore this
X | O - Just ignore this
Most delta plugins should be only interested to "# | #" use case.
The case where we have the both, old and new, metadata available.
Other cases are mostly not important to the delta plugins.
This is the reason why this function exits. It should solve the
cases when the sophisticated delta is not possible.
Returns (SC - Status code,
REC - Repomd record,
NOTES - Dict with persistent notes)
If RC is True, then delta plugin shouldn't continue with
processing of this metadata.
"""
if not md:
# No metadata - Nothing to do
return (True, None, None)
md.delta_rec = None
md.delta_fn_exists = False
if not md.old_rec and not md.new_rec:
# None metadata record exists.
self._debug("\"{0}\": Doesn't exist "
"in any repo".format(md.metadata_type))
return (True, None, None)
if not md.new_rec:
# This record doesn't exists in the new version of repodata
# This metadata were removed in the new version of repo
self._debug("\"{0}\": Removed in the new version of repodata"
"".format(md.metadata_type))
return (True, None, None)
if not md.new_fn_exists:
# The new metadata file is missing
assert self.globalbundle.ignore_missing
self._warning("\"{0}\": Delta cannot be generated - new metadata "
"are missing".format(md.metadata_type))
return (True, None, None)
if not md.old_rec or not md.old_fn_exists or \
(force_gen and not filecmp.cmp(md.old_fn, md.new_fn)):
# This metadata was newly added in the new version of repodata
# Or we are just missing the old version of this metadata
# Or we have both versions of metadata but the metadata are not
# same and in that case we simply want to gen a delta as a copy
if md.old_fn_exists:
self._debug("\"{0}\": Newly added in the new version of repodata"
"".format(md.metadata_type))
elif not md.old_fn_exists:
self._warning("\"{0}\": Delta cannot be generated - old metadata "
"are missing - Using copy of the new one"
"".format(md.metadata_type))
else:
self._debug("\"{0}\": Delta is just a copy of the new metadata")
# Suffix based detection of compression
compressed = False
for suffix in COMPRESSION_SUFFIXES:
if md.new_fn.endswith(suffix):
compressed = True
break
compression = cr.NO_COMPRESSION
if not compressed:
compression = cr.XZ
# Gen record
rec = self.gen_use_original(md, compression_type=compression)
notes = {}
notes["original"] = '1'
if compression != cr.NO_COMPRESSION:
notes["compressed"] = "1"
md.delta_rec = rec
md.delta_fn_exists = True
return (True, rec, notes)
# At this point we are sure that we have both metadata files
if filecmp.cmp(md.old_fn, md.new_fn):
# Both metadata files exists and are the same
self._debug("\"{0}\": Same in both version of repodata"
"".format(md.metadata_type))
notes = {}
if os.path.basename(md.old_fn) != os.path.basename(md.new_fn):
notes["new_name"] = os.path.basename(md.new_fn)
notes["unchanged"] = "1"
notes["checksum_name"] = cr.checksum_name_str(md.checksum_type)
return (True, None, notes)
# Both metadata files exists and are different,
# this is job for a real delta plugin :)
return (False, None, None)
def _apply_basic_delta(self, md, notes):
"""
"""
if not md:
# No metadata - Nothing to do
return (True, None)
# Init some stuff in md
# This variables should be set only if new record was generated
# Otherwise it should by None/False
md.new_rec = None
md.new_fn_exists = False
if not notes:
# No notes - Nothing to do
return (True, None)
if not md.old_rec and not md.delta_rec:
# None metadata record exists.
self._debug("\"{0}\": Doesn't exist "
"in any repo".format(md.metadata_type))
return (True, None)
if not md.delta_rec:
# This record is missing in delta repo
if notes.get("unchanged") != "1":
# This metadata were removed in the new version of repo
self._debug("\"{0}\": Removed in the new version of repodata"
"".format(md.metadata_type))
return (True, None)
# Copy from the old repo should be used
if not md.old_fn_exists:
# This is missing in the old repo
self._warning("\"{0}\": From old repo should be used, but "
"it is missing".format(md.metadata_type))
return (True, None)
# Use copy from the old repo
# Check if old file should have a new name
basename = notes.get("new_name")
if not basename:
basename = os.path.basename(md.old_fn)
md.new_fn = os.path.join(md.out_dir, basename)
checksum_name = notes.get("checksum_name", DEFAULT_CHECKSUM_NAME)
checksum_type = cr.checksum_type(checksum_name)
# Copy the file and create repomd record
shutil.copy2(md.old_fn, md.new_fn)
rec = cr.RepomdRecord(md.metadata_type, md.new_fn)
rec.fill(checksum_type)
if self.globalbundle.unique_md_filenames:
rec.rename_file()
md.new_fn = rec.location_real
md.new_rec = rec
md.new_fn_exists = True
return (True, rec)
if not md.delta_fn_exists:
# Delta is missing
self._warning("\"{0}\": Delta file is missing"
"".format(md.metadata_type))
return (True, None)
# At this point we are sure, we have a delta file
if notes.get("original") == "1":
# Delta file is the target file
# Check if file should be uncompressed
decompress = False
if notes.get("compressed") == "1":
decompress = True
rec = self.apply_use_original(md, decompress)
self._debug("\"{0}\": Used delta is just a copy")
md.new_rec = rec
md.new_fn_exists = True
return (True, rec)
if not md.old_fn_exists:
# Old file is missing
self._warning("\"{0}\": Old file is missing"
"".format(md.metadata_type))
return (True, None)
# Delta file exists and it is not a copy nor metadata
# file from old repo should be used.
# this is job for a real delta plugin :)
return (False, None)
def apply(self, metadata):
raise NotImplementedError("Not implemented")
def gen(self, metadata):
raise NotImplementedError("Not implemented")
class GeneralDeltaRepoPlugin(DeltaRepoPlugin):
NAME = "GeneralDeltaPlugin"
VERSION = 1
METADATA = []
METADATA_MAPPING = {}
def gen(self, metadata):
gen_repomd_recs = []
for md in metadata.values():
rc, rec, notes = self._gen_basic_delta(md, force_gen=True)
assert rc
if rec:
gen_repomd_recs.append(rec)
if notes:
self._metadata_notes_to_plugin_bundle(md.metadata_type, notes)
return gen_repomd_recs
def apply(self, metadata):
gen_repomd_recs = []
for md in metadata.values():
notes = self._metadata_notes_from_plugin_bundle(md.metadata_type)
rc, rec = self._apply_basic_delta(md, notes)
assert rc
if rec:
gen_repomd_recs.append(rec)
return gen_repomd_recs
GENERAL_PLUGIN = GeneralDeltaRepoPlugin
class MainDeltaRepoPlugin(DeltaRepoPlugin):
NAME = "MainDeltaPlugin"
VERSION = 1
METADATA = ["primary", "filelists", "other",
"primary_db", "filelists_db", "other_db"]
METADATA_MAPPING = {
"primary": ["primary"],
"filelists": ["primary", "filelists"],
"other": ["primary", "other"],
"primary_db": ["primary"],
"filelists_db": ["primary", "filelists"],
"other_db": ["primary", "other"],
}
def _pkg_id_tuple(self, pkg):
"""Return tuple identifying a package in repodata.
(pkgId, location_href, location_base)"""
return (pkg.pkgId, pkg.location_href, pkg.location_base)
def _pkg_id_str(self, pkg):
"""Return string identifying a package in repodata.
This strings are used for the content hash calculation."""
if not pkg.pkgId:
self._warning("Missing pkgId in a package!")
if not pkg.location_href:
self._warning("Missing location_href at package %s %s" % \
(pkg.name, pkg.pkgId))
idstr = "%s%s%s" % (pkg.pkgId or '',
pkg.location_href or '',
pkg.location_base or '')
return idstr
def _gen_db_from_xml(self, md):
"""Gen sqlite db from the delta metadata.
"""
mdtype = md.metadata_type
if mdtype == "primary":
dbclass = cr.PrimarySqlite
parsefunc = cr.xml_parse_primary
elif mdtype == "filelists":
dbclass = cr.FilelistsSqlite
parsefunc = cr.xml_parse_filelists
elif mdtype == "other":
dbclass = cr.OtherSqlite
parsefunc = cr.xml_parse_other
else:
raise DeltaRepoPluginError("Unsupported type of metadata {0}".format(mdtype))
src_fn = md.new_fn
src_rec = md.new_rec
md.db_fn = os.path.join(md.out_dir, "{0}.sqlite".format(mdtype))
db = dbclass(md.db_fn)
def pkgcb(pkg):
db.add_pkg(pkg)
parsefunc(src_fn, pkgcb=pkgcb)
db.dbinfo_update(src_rec.checksum)
db.close()
db_stat = cr.ContentStat(md.checksum_type)
db_compressed = md.db_fn+".bz2"
cr.compress_file(md.db_fn, None, cr.BZ2, db_stat)
os.remove(md.db_fn)
# Prepare repomd record of database file
db_rec = cr.RepomdRecord("{0}_db".format(md.metadata_type),
db_compressed)
db_rec.load_contentstat(db_stat)
db_rec.fill(md.checksum_type)
if self.globalbundle.unique_md_filenames:
db_rec.rename_file()
return db_rec
def apply(self, metadata):
# Check input arguments
if "primary" not in metadata:
self._error("primary.xml metadata file is missing")
raise DeltaRepoPluginError("Primary metadata missing")
gen_repomd_recs = []
removed_packages = {}
pri_md = metadata.get("primary")
fil_md = metadata.get("filelists")
oth_md = metadata.get("other")
def try_simple_delta(md, dbclass):
if not md:
return
notes = self._metadata_notes_from_plugin_bundle(md.metadata_type)
if not notes:
self._warning("Metadata \"{0}\" doesn't have a record in "
"deltametadata.xml - Ignoring")
return True
rc, rec = self._apply_basic_delta(md, notes)
if not rc:
return False
if rec:
gen_repomd_recs.append(rec)
if not md.new_fn_exists:
return True
# Gen DB here
if self.globalbundle.force_database or notes.get("database") == "1":
rec = self._gen_db_from_xml(md)
gen_repomd_recs.append(rec)
return True
# At first try to simple delta
simple_pri_delta = try_simple_delta(pri_md, cr.PrimarySqlite)
simple_fil_delta = try_simple_delta(fil_md, cr.FilelistsSqlite)
simple_oth_delta = try_simple_delta(oth_md, cr.OtherSqlite)
if simple_pri_delta:
assert simple_fil_delta
assert simple_oth_delta
return gen_repomd_recs
# Ignore already processed metadata
if simple_fil_delta:
fil_md = None
if simple_oth_delta:
oth_md = None
# Make a dict of removed packages key is location_href,
# value is location_base
for record in self.pluginbundle.get_list("removedpackage", []):
location_href = record.get("location_href")
if not location_href:
continue
location_base = record.get("location_base")
removed_packages[location_href] = location_base
# Prepare output xml files and check if dbs should be generated
# Note: This information are stored directly to the Metadata
# object which someone could see as little hacky.
def prepare_paths_in_metadata(md, xmlclass, dbclass):
if md is None:
return
notes = self._metadata_notes_from_plugin_bundle(md.metadata_type)
if not notes:
# TODO: Add flag to ignore this kind of warnings (?)
self._warning("Metadata \"{0}\" doesn't have a record in "
"deltametadata.xml - Ignoring")
return
suffix = cr.compression_suffix(md.compression_type) or ""
md.new_fn = os.path.join(md.out_dir,
"{0}.xml{1}".format(
md.metadata_type, suffix))
md.new_f_stat = cr.ContentStat(md.checksum_type)
md.new_f = xmlclass(md.new_fn,
md.compression_type,
md.new_f_stat)
if self.globalbundle.force_database or notes.get("database") == "1":
md.db_fn = os.path.join(md.out_dir, "{0}.sqlite".format(
md.metadata_type))
md.db = dbclass(md.db_fn)
else:
md.db_fn = None
md.db = None
# Primary
prepare_paths_in_metadata(pri_md,
cr.PrimaryXmlFile,
cr.PrimarySqlite)
# Filelists
prepare_paths_in_metadata(fil_md,
cr.FilelistsXmlFile,
cr.FilelistsSqlite)
# Other
prepare_paths_in_metadata(oth_md,
cr.OtherXmlFile,
cr.OtherSqlite)
# Apply delta
all_packages = {} # dict { 'pkgId': pkg }
old_contenthash_strings = []
new_contenthash_strings = []
def old_pkgcb(pkg):
old_contenthash_strings.append(self._pkg_id_str(pkg))
if pkg.location_href in removed_packages:
if removed_packages[pkg.location_href] == pkg.location_base:
# This package won't be in new metadata
return
new_contenthash_strings.append(self._pkg_id_str(pkg))
all_packages[pkg.pkgId] = pkg
def delta_pkgcb(pkg):
new_contenthash_strings.append(self._pkg_id_str(pkg))
all_packages[pkg.pkgId] = pkg
filelists_from_primary = True
if fil_md:
filelists_from_primary = False
# Parse both old and delta primary.xml files
cr.xml_parse_primary(pri_md.old_fn, pkgcb=old_pkgcb,
do_files=filelists_from_primary)
cr.xml_parse_primary(pri_md.delta_fn, pkgcb=delta_pkgcb,
do_files=filelists_from_primary)
# Calculate content hashes
h = hashlib.new(self.globalbundle.contenthash_type_str)
old_contenthash_strings.sort()
for i in old_contenthash_strings:
h.update(i)
self.globalbundle.calculated_old_contenthash = h.hexdigest()
h = hashlib.new(self.globalbundle.contenthash_type_str)
new_contenthash_strings.sort()
for i in new_contenthash_strings:
h.update(i)
self.globalbundle.calculated_new_contenthash = h.hexdigest()
# Sort packages
def cmp_pkgs(x, y):
# Compare only by filename
ret = cmp(os.path.basename(x.location_href),
os.path.basename(y.location_href))
if ret != 0:
return ret
# Compare by full location_href path
return cmp(x.location_href, y.location_href)
all_packages_sorted = sorted(all_packages.values(), cmp=cmp_pkgs)
def newpkgcb(pkgId, name, arch):
return all_packages.get(pkgId, None)
# Parse filelists
if fil_md:
self._debug("Parsing filelists xmls")
cr.xml_parse_filelists(fil_md.old_fn, newpkgcb=newpkgcb)
cr.xml_parse_filelists(fil_md.delta_fn, newpkgcb=newpkgcb)
if oth_md:
self._debug("Parsing other xmls")
cr.xml_parse_other(oth_md.old_fn, newpkgcb=newpkgcb)
cr.xml_parse_other(oth_md.delta_fn, newpkgcb=newpkgcb)
num_of_packages = len(all_packages_sorted)
# Write out primary
self._debug("Writing primary xml: {0}".format(pri_md.new_fn))
pri_md.new_f.set_num_of_pkgs(num_of_packages)
for pkg in all_packages_sorted:
pri_md.new_f.add_pkg(pkg)
if pri_md.db:
pri_md.db.add_pkg(pkg)
# Write out filelists
if fil_md:
self._debug("Writing filelists xml: {0}".format(fil_md.new_fn))
fil_md.new_f.set_num_of_pkgs(num_of_packages)
for pkg in all_packages_sorted:
fil_md.new_f.add_pkg(pkg)
if fil_md.db:
fil_md.db.add_pkg(pkg)
# Write out other
if oth_md:
self._debug("Writing other xml: {0}".format(oth_md.new_fn))
oth_md.new_f.set_num_of_pkgs(num_of_packages)
for pkg in all_packages_sorted:
oth_md.new_f.add_pkg(pkg)
if oth_md.db:
oth_md.db.add_pkg(pkg)
# Finish metadata
def finish_metadata(md):
if md is None:
return
# Close XML file
md.new_f.close()
# Prepare repomd record of xml file
rec = cr.RepomdRecord(md.metadata_type, md.new_fn)
rec.load_contentstat(md.new_f_stat)
rec.fill(md.checksum_type)
if self.globalbundle.unique_md_filenames:
rec.rename_file()
md.new_rec = rec
md.new_fn_exists = True
gen_repomd_recs.append(rec)
# Prepare database
if hasattr(md, "db") and md.db:
self._debug("Generating database: {0}".format(md.db_fn))
md.db.dbinfo_update(rec.checksum)
md.db.close()
db_stat = cr.ContentStat(md.checksum_type)
db_compressed = md.db_fn+".bz2"
cr.compress_file(md.db_fn, None, cr.BZ2, db_stat)
os.remove(md.db_fn)
# Prepare repomd record of database file
db_rec = cr.RepomdRecord("{0}_db".format(md.metadata_type),
db_compressed)
db_rec.load_contentstat(db_stat)
db_rec.fill(md.checksum_type)
if self.globalbundle.unique_md_filenames:
db_rec.rename_file()
gen_repomd_recs.append(db_rec)
# Add records to the bundle
finish_metadata(pri_md)
finish_metadata(fil_md)
finish_metadata(oth_md)
return gen_repomd_recs
def gen(self, metadata):
# Check input arguments
if "primary" not in metadata:
self._error("primary.xml metadata file is missing")
raise DeltaRepoPluginError("Primary metadata missing")
gen_repomd_recs = []
# Medadata info that will be persistently stored
metadata_notes = {}
pri_md = metadata.get("primary")
fil_md = metadata.get("filelists")
oth_md = metadata.get("other")
def try_simple_delta(md, force_gen=False):
"""Try to do simple delta. If successful, return True"""
rc, rec, notes = self._gen_basic_delta(md, force_gen=force_gen)
if not rc:
return False
if rec:
gen_repomd_recs.append(rec)
if not notes:
notes = {}
if metadata.get(md.metadata_type+"_db").new_fn_exists:
notes["database"] = "1"
else:
notes["database"] = "0"
self._metadata_notes_to_plugin_bundle(md.metadata_type, notes)
return True
# At first try to do simple delta for primary
# If successful, force simple delta for filelists and other too
simple_pri_delta = try_simple_delta(pri_md)
simple_fil_delta = try_simple_delta(fil_md, force_gen=simple_pri_delta)
simple_oth_delta = try_simple_delta(oth_md, force_gen=simple_pri_delta)
if simple_pri_delta:
# Simple delta for primary means that simple deltas were done
# for all other metadata too
return gen_repomd_recs
# At this point we know that simple delta for the primary wasn't done
# This mean that at lest for primary, both metadata files (the new one
# and the old one) exists, and we have to do a more sophisticated delta
# Ignore files for which, the simple delta was successful
if simple_fil_delta:
fil_md = None
if simple_oth_delta:
oth_md = None
# Prepare output xml files and check if dbs should be generated
# Note: This information are stored directly to the Metadata
# object which someone could see as little hacky.
def prepare_paths_in_metadata(md, xmlclass):
if md is None:
return None
# Make a note about if the database should be generated
db_available = metadata.get(md.metadata_type+"_db").new_fn_exists
if db_available or self.globalbundle.force_database:
metadata_notes.setdefault(md.metadata_type, {})["database"] = "1"
else:
metadata_notes.setdefault(md.metadata_type, {})["database"] = "0"
suffix = cr.compression_suffix(md.compression_type) or ""
md.delta_fn = os.path.join(md.out_dir,
"{0}.xml{1}".format(
md.metadata_type, suffix))
md.delta_f_stat = cr.ContentStat(md.checksum_type)
md.delta_f = xmlclass(md.delta_fn,
md.compression_type,
md.delta_f_stat)
return md
# Primary
pri_md = prepare_paths_in_metadata(pri_md, cr.PrimaryXmlFile)
# Filelists
fil_md = prepare_paths_in_metadata(fil_md, cr.FilelistsXmlFile)
# Other
oth_md = prepare_paths_in_metadata(oth_md, cr.OtherXmlFile)
# Gen delta
old_packages = set()
added_packages = {} # dict { 'pkgId': pkg }
added_packages_ids = [] # list of package ids
old_contenthash_strings = []
new_contenthash_strings = []
def old_pkgcb(pkg):
old_packages.add(self._pkg_id_tuple(pkg))
old_contenthash_strings.append(self._pkg_id_str(pkg))
def new_pkgcb(pkg):
new_contenthash_strings.append(self._pkg_id_str(pkg))
pkg_id_tuple = self._pkg_id_tuple(pkg)
if not pkg_id_tuple in old_packages:
# This package is only in new repodata
added_packages[pkg.pkgId] = pkg
added_packages_ids.append(pkg.pkgId)
else:
# This package is also in the old repodata
old_packages.remove(pkg_id_tuple)
filelists_from_primary = True
if fil_md:
# Filelists will be parsed from filelists
filelists_from_primary = False
cr.xml_parse_primary(pri_md.old_fn, pkgcb=old_pkgcb, do_files=False)
cr.xml_parse_primary(pri_md.new_fn, pkgcb=new_pkgcb,
do_files=filelists_from_primary)
# Calculate content hashes
h = hashlib.new(self.globalbundle.contenthash_type_str)
old_contenthash_strings.sort()
for i in old_contenthash_strings:
h.update(i)
src_contenthash = h.hexdigest()
self.globalbundle.calculated_old_contenthash = src_contenthash
h = hashlib.new(self.globalbundle.contenthash_type_str)
new_contenthash_strings.sort()
for i in new_contenthash_strings:
h.update(i)
dst_contenthash = h.hexdigest()
self.globalbundle.calculated_new_contenthash = dst_contenthash
# Set the content hashes to the plugin bundle
self.pluginbundle.set("contenthash_type", self.globalbundle.contenthash_type_str)
self.pluginbundle.set("src_contenthash", src_contenthash)
self.pluginbundle.set("dst_contenthash", dst_contenthash)
# Prepare list of removed packages
removed_pkgs = sorted(old_packages)
for _, location_href, location_base in removed_pkgs:
dictionary = {"location_href": location_href}
if location_base:
dictionary["location_base"] = location_base
self.pluginbundle.append("removedpackage", dictionary)
num_of_packages = len(added_packages)
# Filelists and Other cb
def newpkgcb(pkgId, name, arch):
return added_packages.get(pkgId, None)
# Parse filelist.xml and write out its delta
if fil_md:
cr.xml_parse_filelists(fil_md.new_fn, newpkgcb=newpkgcb)
fil_md.delta_f.set_num_of_pkgs(num_of_packages)
for pkgid in added_packages_ids:
fil_md.delta_f.add_pkg(added_packages[pkgid])
fil_md.delta_f.close()
# Parse other.xml and write out its delta
if oth_md:
cr.xml_parse_other(oth_md.new_fn, newpkgcb=newpkgcb)
oth_md.delta_f.set_num_of_pkgs(num_of_packages)
for pkgid in added_packages_ids:
oth_md.delta_f.add_pkg(added_packages[pkgid])
oth_md.delta_f.close()
# Write out primary delta
# Note: Writing of primary delta has to be after parsing of filelists
# Otherwise cause missing files if filelists_from_primary was False
pri_md.delta_f.set_num_of_pkgs(num_of_packages)
for pkgid in added_packages_ids:
pri_md.delta_f.add_pkg(added_packages[pkgid])
pri_md.delta_f.close()
# Finish metadata
def finish_metadata(md):
if md is None:
return
# Close XML file
md.delta_f.close()
# Prepare repomd record of xml file
rec = cr.RepomdRecord(md.metadata_type, md.delta_fn)
rec.load_contentstat(md.delta_f_stat)
rec.fill(md.checksum_type)
if self.globalbundle.unique_md_filenames:
rec.rename_file()
md.delta_rec = rec
md.delta_fn_exists = True
gen_repomd_recs.append(rec)
# Prepare database
if hasattr(md, "db") and md.db:
md.db.dbinfo_update(rec.checksum)
md.db.close()
db_stat = cr.ContentStat(md.checksum_type)
db_compressed = md.db_fn+".bz2"
cr.compress_file(md.db_fn, None, cr.BZ2, db_stat)
os.remove(md.db_fn)
# Prepare repomd record of database file
db_rec = cr.RepomdRecord("{0}_db".format(md.metadata_type),
db_compressed)
db_rec.load_contentstat(db_stat)
db_rec.fill(md.checksum_type)
if self.globalbundle.unique_md_filenames:
db_rec.rename_file()
gen_repomd_recs.append(db_rec)
# Add records to medata objects
finish_metadata(pri_md)
finish_metadata(fil_md)
finish_metadata(oth_md)
# Store data persistently
for metadata_type, notes in metadata_notes.items():
self._metadata_notes_to_plugin_bundle(metadata_type, notes)
return gen_repomd_recs
PLUGINS.append(MainDeltaRepoPlugin)
class GroupsDeltaRepoPlugin(DeltaRepoPlugin):
NAME = "GroupDeltaRepoPlugin"
VERSION = 1
METADATA = ["group", "group_gz"]
METADATA_MAPPING = {
"group": ["group"],
"group_gz": ["group", "group_gz"]
}
def gen(self, metadata):
gen_repomd_recs = []
md_group = metadata.get("group")
md_group_gz = metadata.get("group_gz")
if md_group and not md_group.new_fn_exists:
md_group = None
if md_group_gz and not md_group_gz.new_fn_exists:
md_group_gz = None
if md_group:
rc, rec, notes = self._gen_basic_delta(md_group, force_gen=True)
assert rc
if rec:
gen_repomd_recs.append(rec)
if notes:
if md_group_gz:
notes["gen_group_gz"] = "1"
else:
notes["gen_group_gz"] = "0"
self._metadata_notes_to_plugin_bundle(md_group.metadata_type,
notes)
elif md_group_gz:
rc, rec, notes = self._gen_basic_delta(md_group_gz, force_gen=True)
assert rc
if rec:
gen_repomd_recs.append(rec)
if notes:
self._metadata_notes_to_plugin_bundle(md_group_gz.metadata_type,
notes)
return gen_repomd_recs
def apply(self, metadata):
gen_repomd_recs = []
md_group = metadata.get("group")
md_group_gz = metadata.get("group_gz")
if md_group and (not md_group.delta_fn_exists
and not md_group.old_fn_exists):
md_group = None
if md_group_gz and (not md_group_gz.delta_fn_exists
and not md_group_gz.old_fn_exists):
md_group_gz = None
if md_group:
notes = self._metadata_notes_from_plugin_bundle(md_group.metadata_type)
rc, rec = self._apply_basic_delta(md_group, notes)
assert rc
if rec:
gen_repomd_recs.append(rec)
if notes.get("gen_group_gz"):
# Gen group_gz metadata from the group metadata
stat = cr.ContentStat(md_group.checksum_type)
group_gz_fn = md_group.new_fn+".gz"
cr.compress_file(md_group.new_fn, group_gz_fn, cr.GZ, stat)
rec = cr.RepomdRecord("group_gz", group_gz_fn)
rec.load_contentstat(stat)
rec.fill(md_group.checksum_type)
if self.globalbundle.unique_md_filenames:
rec.rename_file()
gen_repomd_recs.append(rec)
elif md_group_gz:
notes = self._metadata_notes_from_plugin_bundle(md_group_gz.metadata_type)
rc, rec = self._apply_basic_delta(md_group_gz, notes)
assert rc
if rec:
gen_repomd_recs.append(rec)
return gen_repomd_recs
PLUGINS.append(GroupsDeltaRepoPlugin)
for plugin in PLUGINS:
METADATA_MAPPING.update(plugin.METADATA_MAPPING)
def needed_delta_metadata(required_metadata):
"""
@param required_metadata List of required metadatas.
@return None if required_metadata is None
List of needed delta metadata files
in case that required_metadata is list
"""
if required_metadata is None:
return None
needed_metadata = set(["deltametadata"])
needed_metadata.add("primary") # Currently, we always need primary.xml
for required in required_metadata:
if required in METADATA_MAPPING:
needed_metadata |= set(METADATA_MAPPING[required])
else:
needed_metadata.add(required)
return list(needed_metadata)
|
import logging
logger = logging.getLogger(__name__)
from gi.repository import GObject
from gi.repository import Gdk
from gi.repository import Gtk
from gettext import gettext as _
from advene.gui.views import AdhocView
from advene.gui.edit.frameselector import FrameSelector
import advene.util.helper as helper
from advene.util.tools import clamp
name="Shot validation view plugin"
def register(controller):
controller.register_viewclass(ShotValidation)
class ShotValidation(AdhocView):
view_name = _("Shot validation view")
view_id = 'shotvalidation'
tooltip=_("Display shot validation interface")
def __init__(self, controller=None, parameters=None, annotationtype=None):
super(ShotValidation, self).__init__(controller=controller)
self.close_on_package_load = True
self.contextual_actions = ()
self.controller=controller
self._annotationtype=None
self.annotations = []
self.current_index = Gtk.Adjustment.new(10, 1, 1000, 1, 1, 1)
self.options={}
# Load options
opt, arg = self.load_parameters(parameters)
self.options.update(opt)
self.annotationtype=annotationtype
self.widget = self.build_widget()
def set_annotationtype(self, at):
self._annotationtype=at
if self._annotationtype is not None:
self.annotations = sorted(at.annotations, key=lambda a: a.fragment.begin)
else:
self.annotations = []
self.current_index.set_upper(len(self.annotations) + 2)
def get_annotationtype(self):
return self._annotationtype
annotationtype=property(get_annotationtype, set_annotationtype)
def set_title(self, s):
self.title_widget.set_markup(s)
def set_index(self, i):
self.current_index.set_value(i + 1)
def get_index(self):
return int(self.current_index.get_value()) - 1
index = property(get_index, set_index)
def update_annotationtype(self, annotationtype=None, event=None):
if annotationtype == self.annotationtype and event == 'AnnotationTypeDelete':
self.close()
return True
def update_annotation(self, annotation=None, event=None):
if annotation.type == self.annotationtype and event in ('AnnotationCreate', 'AnnotationDelete'):
# Update annotation type, which will trigger an update of self.annotations
self.annotationtype = annotation.type
self.current_index.emit('value-changed')
def goto_current(self, *p):
"""Select annotation containing current player time.
"""
l=[ a for a in self.annotations if self.controller.player.current_position_value in a.fragment ]
if l:
self.set_index(self.annotations.index(l[0]))
return True
def merge(self, *p):
"""Merge the annotation with the previous one.
"""
i = self.index
if i == 0:
return True
annotation = self.annotations[i]
previous = self.annotations[i - 1]
batch=object()
self.controller.notify('EditSessionStart', element=previous, immediate=True)
previous.fragment.end = annotation.fragment.end
self.controller.notify('AnnotationEditEnd', annotation=previous, batch=batch)
self.controller.notify('EditSessionEnd', element=previous)
self.annotations.remove(annotation)
self.controller.delete_element(annotation, immediate_notify=True, batch=batch)
self.message(_("Merged #%(first)d-#%(second)d into #%(first)d" % { 'first': i + 1,
'second': i + 2 }))
self.undo_button.set_sensitive(True)
# We want to display the next annotation, i.e. at i. But we
# were already at i, so the handle_index_change would not be
# triggered. Force value-changed emission
self.set_index(i)
self.current_index.emit('value-changed')
return True
def handle_scroll_event(self, widget, event):
return self.selector.handle_scroll_event(widget, event)
def handle_keypress(self, widget, event):
if self.selector.handle_key_press(widget, event):
return True
elif event.keyval == Gdk.KEY_Page_Up or event.keyval == Gdk.KEY_Up:
# Next annotation
self.set_index(self.index + 1)
return True
elif event.keyval == Gdk.KEY_Page_Down or event.keyval == Gdk.KEY_Down:
# Previous annotation
self.set_index(self.index - 1)
return True
return False
def undo(self, *p):
"""Undo the last modification.
"""
self.message(_("Last action undone"))
self.controller.gui.undo()
if self.index > 0:
self.index = self.index - 1
return True
def validate_and_next(self, new):
"""Validate the current annotation and display the next one.
"""
i = clamp(self.index, 0, len(self.annotations) - 1)
annotation = self.annotations[i]
batch=object()
event = Gtk.get_current_event()
if event.get_state().state & Gdk.ModifierType.CONTROL_MASK:
# Control-key is held. Split the annotation.
if new > annotation.fragment.begin and new < annotation.fragment.end:
self.controller.split_annotation(annotation, new)
self.message(_("Split annotation #%(current)d into #%(current)d and #%(next)d") % {
'current': i + 1,
'next': i + 2
})
else:
self.message(_("Cannot split annotation #%(current)d: out of bounds.") % {
'current': i + 1,
})
return True
if new != annotation.fragment.begin:
logger.debug("Updating annotation begin from %s to %s", helper.format_time(annotation.fragment.begin), helper.format_time_reference(new))
self.controller.notify('EditSessionStart', element=annotation, immediate=True)
annotation.fragment.begin = new
self.controller.notify('AnnotationEditEnd', annotation=annotation, batch=batch)
self.controller.notify('EditSessionEnd', element=annotation)
self.undo_button.set_sensitive(True)
# Update previous annotation end.
if i > 0:
annotation = self.annotations[i - 1]
if new != annotation.fragment.end:
self.controller.notify('EditSessionStart', element=annotation, immediate=True)
annotation.fragment.end = new
self.controller.notify('AnnotationEditEnd', annotation=annotation, batch=batch)
self.controller.notify('EditSessionEnd', element=annotation)
self.message(_("Changed cut between #%(first)d and %(second)d") % { 'first': i + 1,
'second': i + 2 })
else:
self.message(_("Changed begin time for first annotation"))
self.set_index(i + 1)
return True
def build_widget(self):
if not self.annotations:
return Gtk.Label(label=(_("No annotations to adjust")))
vbox = Gtk.VBox()
self.title_widget = Gtk.Label()
vbox.pack_start(self.title_widget, True, True, 0)
self.selector = FrameSelector(self.controller, self.annotations[0].fragment.begin, label=_("Click on the frame just after the cut to adjust the cut time.\nControl-click on a frame to indicate a missing cut."))
self.selector.callback = self.validate_and_next
def handle_index_change(adj):
i = int(adj.get_value()) - 1
if i >= 0 and i <= len(self.annotations) - 1:
a=self.annotations[i]
self.selector.set_timestamp(a.fragment.begin)
self.set_title(_("Begin of #%(index)d (title: %(content)s)") % { 'index': i + 1,
'content': self.controller.get_title(a, max_size=60).replace('&', '&').replace('<', '<') })
self.prev_button.set_sensitive(i > 0)
self.next_button.set_sensitive(i < len(self.annotations) - 1)
else:
# End: display a message ?
pass
self.current_index.connect('value-changed', handle_index_change)
vbox.add(self.selector.widget)
# Button bar
hb=Gtk.HBox()
self.prev_button = Gtk.Button(_("< Previous cut"))
self.prev_button.set_tooltip_text(_("Display previous cut"))
self.prev_button.connect("clicked", lambda b: self.set_index(self.index - 1))
hb.add(self.prev_button)
l = Gtk.Label(label="#")
hb.pack_start(l, False, True, 0)
self.next_button = Gtk.Button(_("Next cut >"))
self.next_button.set_tooltip_text(_("Display next cut"))
self.next_button.connect("clicked", lambda b: self.set_index(self.index + 1))
s=Gtk.SpinButton.new(self.current_index, 1, 0)
s.set_increments(1, 10)
#s.set_update_policy(Gtk.UPDATE_IF_VALID)
s.set_numeric(True)
hb.add(s)
hb.add(self.next_button)
vbox.pack_start(hb, False, True, 0)
hb = Gtk.HButtonBox()
b=Gtk.Button(_("Current time"))
b.set_tooltip_text(_("Go to annotation containing current player time."))
b.connect("clicked", self.goto_current)
hb.add(b)
b=Gtk.Button(_("Refresh"))
b.set_tooltip_text(_("Refresh missing snapshots"))
b.connect("clicked", lambda b: self.selector.refresh_snapshots())
hb.add(b)
b=Gtk.Button(_("Undo"))
b.set_tooltip_text(_("Undo last modification"))
b.connect("clicked", self.undo)
hb.add(b)
b.set_sensitive(False)
self.undo_button = b
b=Gtk.Button(_("Merge"))
b.set_tooltip_text(_("Merge with previous annotation, i.e. remove this bound."))
b.connect("clicked", self.merge)
hb.add(b)
b=Gtk.Button(stock=Gtk.STOCK_CLOSE)
b.set_tooltip_text(_("Close view."))
b.connect("clicked", self.close)
hb.add(b)
vbox.pack_start(hb, False, True, 0)
self.statusbar = Gtk.Statusbar()
vbox.pack_start(self.statusbar, False, True, 0)
self.set_index(0)
# vbox is window-less so cannot get events. Since we want to handle scroll and keypress,
# we have to wrap it inside a GtkEventBox
# https://developer.gnome.org/gtk3/stable/chap-input-handling.html#event-masks
eb = Gtk.EventBox()
eb.set_above_child(False)
eb.set_visible_window(False)
# Make sure vbox gets events
eb.add_events(Gdk.EventMask.KEY_PRESS_MASK |
Gdk.EventMask.SCROLL_MASK)
eb.connect('key-press-event', self.handle_keypress)
eb.connect('scroll-event', self.handle_scroll_event)
eb.add(vbox)
eb.show_all()
# Hack: since the view if often launched from the timeline
# view, moving the mouse in timeline steals the focus from the
# window. Let's only grab focus after a small timeout, so that
# the user has time to get the mouse out of the timeline
# window
GObject.timeout_add(2000, lambda: self.next_button.grab_focus())
return eb
|
"""
EasyBuild support for building and installing dummy extensions, implemented as an easyblock
@author: Kenneth Hoste (Ghent University)
"""
from easybuild.framework.extensioneasyblock import ExtensionEasyBlock
class DummyExtension(ExtensionEasyBlock):
"""Support for building/installing dummy extensions."""
|
"""
Toolbox for showing packets that is sent via the communication link when
debugging.
"""
import os
from time import time
from PyQt4 import QtGui
from PyQt4 import uic
from PyQt4.QtCore import pyqtSignal
from PyQt4.QtCore import pyqtSlot
from PyQt4.QtCore import Qt
import cfclient
__author__ = 'Bitcraze AB'
__all__ = ['CrtpSharkToolbox']
param_tab_class = uic.loadUiType(
cfclient.module_path + "/ui/toolboxes/crtpSharkToolbox.ui")[0]
class CrtpSharkToolbox(QtGui.QWidget, param_tab_class):
"""Show packets that is sent vie the communication link"""
nameModified = pyqtSignal()
_incoming_packet_signal = pyqtSignal(object)
_outgoing_packet_signal = pyqtSignal(object)
def __init__(self, helper, *args):
super(CrtpSharkToolbox, self).__init__(*args)
self.setupUi(self)
self.helper = helper
# Init the tree widget
self.logTree.setHeaderLabels(['ms', 'Direction', 'Port/Chan', 'Data'])
# Connect GUI signals
self.clearButton.clicked.connect(self.clearLog)
self.saveButton.clicked.connect(self._save_data)
self._incoming_packet_signal.connect(lambda p: self._packet("IN", p))
self._outgoing_packet_signal.connect(lambda p: self._packet("OUT", p))
self._ms_offset = int(round(time() * 1000))
self._data = []
def _packet(self, dir, pk):
if self.masterCheck.isChecked():
line = QtGui.QTreeWidgetItem()
ms_diff = int(round(time() * 1000)) - self._ms_offset
line.setData(0, Qt.DisplayRole, "%d" % ms_diff)
line.setData(1, Qt.DisplayRole, "%s" % dir)
line.setData(2, Qt.DisplayRole, "%d/%d" % (pk.port, pk.channel))
line.setData(3, Qt.DisplayRole, pk.data.decode("UTF-8"))
s = "%d, %s, %d/%d, %s" % (ms_diff, dir, pk.port, pk.channel,
pk.data.decode("UTF-8"))
self._data.append(s)
self.logTree.addTopLevelItem(line)
self.logTree.scrollToItem(line)
@pyqtSlot()
def clearLog(self):
self.logTree.clear()
self._data = []
def getName(self):
return 'Crtp sniffer'
def getTabName(self):
return 'Crtp sniffer'
def enable(self):
self.helper.cf.packet_received.add_callback(
self._incoming_packet_signal.emit)
self.helper.cf.packet_sent.add_callback(
self._outgoing_packet_signal.emit)
def disable(self):
self.helper.cf.packet_received.remove_callback(
self._incoming_packet_signal.emit)
self.helper.cf.packet_sent.remove_callback(
self._outgoing_packet_signal.emit)
def preferedDockArea(self):
return Qt.RightDockWidgetArea
def _save_data(self):
dir = os.path.join(cfclient.config_path, "logdata")
fname = os.path.join(dir, "shark_data.csv")
if not os.path.exists(dir):
os.makedirs(dir)
f = open(fname, 'w')
for s in self._data:
f.write("%s\n" % s)
f.close()
|
import os
import sys
import stat
import signal
import thread
sys.path.append (os.path.abspath (os.path.realpath(__file__) + '/../CTK'))
import CTK
import OWS_Login
import config_version
from configured import *
def init (scgi_port, cfg_file):
# Translation support
CTK.i18n.install ('cherokee', LOCALEDIR, unicode=True)
# Ensure SIGCHLD is set. It needs to receive the signal in order
# to detect when its child processes finish.
if signal.getsignal (signal.SIGCHLD) == signal.SIG_IGN:
signal.signal (signal.SIGCHLD, signal.SIG_DFL)
# Move to the server directory
pathname, scriptname = os.path.split(sys.argv[0])
os.chdir(os.path.abspath(pathname))
# Let the user know what is going on
version = VERSION
pid = os.getpid()
if scgi_port.isdigit():
print _("Server %(version)s running.. PID=%(pid)d Port=%(scgi_port)s") % (locals())
else:
print _("Server %(version)s running.. PID=%(pid)d Socket=%(scgi_port)s") % (locals())
# Read configuration file
CTK.cfg.file = cfg_file
CTK.cfg.load()
# Update config tree if required
config_version.config_version_update_cfg (CTK.cfg)
# Init CTK
if scgi_port.isdigit():
CTK.init (port=int(scgi_port))
else:
# Remove the unix socket if it already exists
try:
mode = os.stat (scgi_port)[stat.ST_MODE]
if stat.S_ISSOCK(mode):
print "Removing an old '%s' unix socket.." %(scgi_port)
os.unlink (scgi_port)
except OSError:
pass
CTK.init (unix_socket=scgi_port)
# At this moment, CTK must be forced to work as a syncronous
# server. All the initial tests (config file readable, correct
# installation, etc) must be performed in the safest possible way,
# ensuring that race conditions don't cause trouble. It will be
# upgraded to asyncronous mode just before the mainloop is reached
CTK.set_synchronous (True)
def debug_set_up():
def debug_callback (sig, frame):
import code, traceback
d = {'_frame':frame} # Allow access to frame object.
d.update(frame.f_globals) # Unless shadowed by global
d.update(frame.f_locals)
i = code.InteractiveConsole(d)
message = "Signal recieved : entering python shell.\nTraceback:\n"
message += ''.join(traceback.format_stack(frame))
i.interact(message)
def trace_callback (sig, stack):
import traceback, threading
id2name = dict([(th.ident, th.name) for th in threading.enumerate()])
for threadId, stack in sys._current_frames().items():
print '\n# Thread: %s(%d)' %(id2name[threadId], threadId)
for filename, lineno, name, line in traceback.extract_stack(stack):
print 'File: "%s", line %d, in %s' %(filename, lineno, name)
if line:
print ' %s' % (line.strip())
print "DEBUG: SIGUSR1 invokes the console.."
print " SIGUSR2 prints a backtrace.."
signal.signal (signal.SIGUSR1, debug_callback)
signal.signal (signal.SIGUSR2, trace_callback)
def do_OWS_login():
def thread_func (username, password):
try:
OWS_Login.log_in (username, password)
except ProtocolError:
# Do not give up so easily
OWS_Login.log_in (username, password)
username = CTK.cfg.get_val("admin!ows!login!user")
password = CTK.cfg.get_val("admin!ows!login!password")
ows_enable = int(CTK.cfg.get_val("admin!ows!enabled", OWS_ENABLE))
if all((ows_enable, username, password)):
thread.start_new_thread (thread_func, (username, password))
if __name__ == "__main__":
# Read the arguments
try:
scgi_port = sys.argv[1]
cfg_file = sys.argv[2]
except:
print _("Incorrect parameters: PORT CONFIG_FILE")
raise SystemExit
# Debugging mode
if '-x' in sys.argv:
debug_set_up()
# Init
init (scgi_port, cfg_file)
# Ancient config file
def are_vsrvs_num():
tmp = [True] + [x.isdigit() for x in CTK.cfg.keys('vserver')]
return reduce (lambda x,y: x and y, tmp)
if not are_vsrvs_num():
import PageError
CTK.publish (r'', PageError.AncientConfig, file=cfg_file)
while not are_vsrvs_num():
CTK.step()
CTK.unpublish (r'')
# Check config file and set up
if os.path.exists (cfg_file) and os.path.isdir (cfg_file):
import PageError
CTK.publish (r'', PageError.NotWritable, file=cfg_file)
while os.path.isdir (cfg_file):
CTK.step()
CTK.unpublish (r'')
if not os.path.exists (cfg_file):
import PageNewConfig
CTK.publish (r'', PageNewConfig.Render)
while not os.path.exists (cfg_file):
CTK.step()
CTK.unpublish (r'')
CTK.cfg.file = cfg_file
CTK.cfg.load()
if not os.access (cfg_file, os.W_OK):
import PageError
CTK.publish (r'', PageError.NotWritable, file=cfg_file)
while not os.access (cfg_file, os.W_OK):
CTK.step()
CTK.unpublish (r'')
if not os.path.isdir (CHEROKEE_ICONSDIR):
import PageError
CTK.publish (r'', PageError.IconsMissing, path=CHEROKEE_ICONSDIR)
while not os.path.isdir (CHEROKEE_ICONSDIR):
CTK.step()
CTK.unpublish (r'')
# OWS related checks
if not os.path.isdir (CHEROKEE_OWS_DIR):
try: os.makedirs (CHEROKEE_OWS_DIR, 0755)
except OSError: pass
if not os.path.isdir (CHEROKEE_OWS_ROOT):
try: os.makedirs (CHEROKEE_OWS_ROOT, 0755)
except OSError: pass
if not os.access (CHEROKEE_OWS_DIR, os.W_OK) or \
not os.access (CHEROKEE_OWS_ROOT, os.W_OK):
import PageError
CTK.publish (r'', PageError.OWSDirectory)
while not os.access (CHEROKEE_OWS_DIR, os.W_OK) or \
not os.access (CHEROKEE_OWS_ROOT, os.W_OK):
CTK.step()
if not os.path.isdir (CHEROKEE_OWS_DIR):
try: os.makedirs (CHEROKEE_OWS_DIR, 0755)
except OSError: pass
if not os.path.isdir (CHEROKEE_OWS_ROOT):
try: os.makedirs (CHEROKEE_OWS_ROOT, 0755)
except OSError: pass
CTK.unpublish (r'')
# Add the OWS plug-in directory
CTK.add_plugin_dir (CHEROKEE_OWS_DIR)
# Set up the error page
import PageException
CTK.error.page = PageException.Page
# Launch the SystemStats ASAP
import SystemStats
SystemStats.get_system_stats()
# Import the Pages
import PageIndex
import PageGeneral
import PageVServers
import PageVServer
import PageRule
import PageEntry
import PageSources
import PageSource
import PageAdvanced
import PageNewConfig
import PageHelp
import PageStatus
import market
# Init translation
if CTK.cfg['admin!lang']:
PageIndex.language_set (CTK.cfg.get_val('admin!lang'))
# Let's get asyncronous..
CTK.set_synchronous (False)
# Log into OWS if feature is enabled
do_OWS_login()
# Run forever
CTK.run()
|
''' Setup for core modules
'''
console_scripts = [
'align = arachnid.pyspider.align:main',
'defocus = arachnid.pyspider.defocus:main',
'classify = arachnid.pyspider.classify:main',
'create-align = arachnid.pyspider.create_align:main',
'enhancevol = arachnid.pyspider.enhance_volume:main',
'filtervol = arachnid.pyspider.filter_volume:main',
'maskvol = arachnid.pyspider.mask_volume:main',
'prepvol = arachnid.pyspider.prepare_volume:main',
'reconstruct = arachnid.pyspider.reconstruct:main',
'refine = arachnid.pyspider.refine:main',
'resolution = arachnid.pyspider.resolution:main',
'reference = arachnid.pyspider.reference:main',
'autorefine = arachnid.pyspider.autorefine:main',
]
|
"""BibEdit Templates."""
__revision__ = "$Id$"
from invenio.config import CFG_SITE_URL
from invenio.messages import gettext_set_language
class Template:
"""BibEdit Templates Class."""
def __init__(self):
"""Initialize."""
pass
def menu(self):
"""Create the menu."""
imgCompressMenuSection = img('/img/bullet_toggle_minus.png',
'bibEditImgCompressMenuSection')
recordmenu = '<div class="bibEditMenuSectionHeader">\n' \
' %(imgCompressMenuSection)sRecord\n' \
' %(imgNewRecord)s\n' \
' %(imgCloneRecord)s\n' \
' </div>\n' \
' <table>\n' \
' <col width="28px">\n' \
' <col width="40px">\n' \
' <col width="40px">\n' \
' <col width="28px">\n' \
' <tr>\n' \
' <td colspan="2">\n' \
' <form onsubmit="return false;">\n' \
' %(txtSearchPattern)s\n' \
' </form>\n' \
' <td colspan="2">%(sctSearchType)s</td>\n' \
' </tr>\n' \
' <tr>\n' \
' <td colspan="4">%(btnSearch)s</td>\n' \
' </tr>\n' \
' <tr id="rowRecordBrowser" style="display: none">\n' \
' <td>%(btnPrev)s</td>\n' \
' <td colspan="2" id="cellRecordNo"\n' \
' style="text-align: center">1/1</td>\n' \
' <td>%(btnNext)s</td>\n' \
' </tr>\n' \
' <tr>\n' \
' <td colspan="2">%(btnSubmit)s</td>\n' \
' <td colspan="2">%(btnCancel)s</td>\n' \
' </tr>\n' \
' <tr>\n' \
' <td id="tickets" colspan="4"><!--filled by bibedit_menu.js--></td>\n' \
' </tr>\n' \
' <tr class="bibEditMenuMore">\n' \
' <td>%(imgDeleteRecord)s</td>\n' \
' <td colspan="3">%(btnDeleteRecord)s</td>\n' \
' </tr>\n' \
' <tr class="bibEditmenuMore">\n' \
' <td>Switch to:</td>\n' \
' <td colspan="3">%(btnSwitchReadOnly)s</td>\n' \
' </tr>' \
' </table>' % {
'imgCompressMenuSection': imgCompressMenuSection,
'imgNewRecord': img('/img/table.png', 'bibEditImgCtrlEnabled',
id='imgNewRecord', title='New record'), \
'imgCloneRecord': img('/img/table_multiple.png',
'bibEditImgCtrlDisabled', id='imgCloneRecord',
title='Clone record'), \
'txtSearchPattern': inp('text', id='txtSearchPattern'), \
'sctSearchType': '<select id="sctSearchType">\n' \
' <option value="recID">Rec ID</option>\n' \
' <option value="reportnumber">Rep No</option>\n' \
' <option value="anywhere">Anywhere</option>\n' \
' </select>',
'btnSearch': button('button', 'Search', 'bibEditBtnBold',
id='btnSearch'),
'btnPrev': button('button', '<', id='btnPrev', disabled='disabled'),
'btnNext': button('button', '>', id='btnNext', disabled='disabled'),
'btnSubmit': button('button', 'Submit', 'bibEditBtnBold',
id='btnSubmit', disabled='disabled'),
'btnCancel': button('button', 'Cancel', id='btnCancel',
disabled='disabled'),
'imgDeleteRecord': img('/img/table_delete.png'),
'btnDeleteRecord': button('button', 'Delete',
id='btnDeleteRecord', disabled='disabled'),
'btnSwitchReadOnly' : button('button', 'Read-only',
id='btnSwitchReadOnly')
}
fieldmenu = '<div class="bibEditMenuSectionHeader">\n' \
' %(imgCompressMenuSection)sFields\n' \
' </div>\n' \
' <table class="bibEditMenuMore">\n' \
' <col width="28px">\n' \
' <col>\n' \
' <tr>\n' \
' <td>%(imgAddField)s</td>\n' \
' <td>%(btnAddField)s</td>\n' \
' </tr>\n' \
' <tr>\n' \
' <td>%(imgDeleteSelected)s</td>\n' \
' <td>%(btnDeleteSelected)s</td>\n' \
' </tr>\n' \
' </table>' % {
'imgCompressMenuSection': imgCompressMenuSection,
'imgAddField': img('/img/table_row_insert.png'),
'btnAddField': button('button', 'Add', id='btnAddField',
disabled='disabled'),
'imgDeleteSelected': img('/img/table_row_delete.png'),
'btnDeleteSelected': button('button', 'Delete selected',
id='btnDeleteSelected', disabled='disabled')}
viewmenu = '<div class="bibEditMenuSectionHeader">\n' \
' %(imgCompressMenuSection)sView\n' \
' </div>\n' \
' <table>\n' \
' <col width="68px">\n' \
' <col width="68px">\n' \
' <tr class="bibEditMenuMore">\n' \
' <td>%(btnTagMARC)s</td>\n' \
' <td>%(btnTagNames)s</td>\n' \
' </tr>\n' \
' </table>' % {
'imgCompressMenuSection': imgCompressMenuSection,
'btnTagMARC': button('button', 'MARC', id='btnMARCTags',
disabled='disabled'),
'btnTagNames': button('button', 'Human', id='btnHumanTags',
disabled='disabled')
}
historymenu = '<div class="bibEditMenuSectionHeader">\n' \
' %(imgCompressMenuSection)sHistory\n' \
' </div>\n' \
' <div class="bibEditRevHistoryMenuSection">\n' \
' <table>\n' \
' <col width="136px">\n' \
' <tr class="bibEditMenuMore">\n' \
' <td id="bibEditRevisionsHistory"></td>'\
' </tr>\n' \
' </table>\n' \
' </div>\n'% {
'imgCompressMenuSection': imgCompressMenuSection,
}
undoredosection = '<div class="bibEditMenuSectionHeader">\n' \
' %(imgCompressMenuSection)sUndo/Redo\n' \
' </div>\n<table>' \
' <tr class="bibEditMenuMore"><td>' \
' <div class="bibEditURMenuSection">\n' \
' <div class="bibEditURDetailsSection" id="bibEditURUndoListLayer">\n' \
' <div class="bibEditURButtonLayer"><button id="btnUndo"><</button></div>\n' \
' <div id="undoOperationVisualisationField" class="bibEditHiddenElement bibEditURPreviewBox">\n' \
' <div id="undoOperationVisualisationFieldContent"></div>\n' \
' </div>\n' \
' </div>' \
' <div class="bibEditURDetailsSection" id="bibEditURRedoListLayer">\n' \
' <div class="bibEditURButtonLayer"><button id="btnRedo">></button></div>' \
' <div id="redoOperationVisualisationField" class="bibEditHiddenElement bibEditURPreviewBox">\n' \
' <div id="redoOperationVisualisationFieldContent"></div>' \
' </div>\n' \
' </div>\n' \
' </div></td></tr></table>\n' % { \
'imgCompressMenuSection': imgCompressMenuSection }
statusarea = '<table>\n' \
' <tr>\n' \
' <td id="cellIndicator">%(imgIndicator)s</td>\n' \
' <td id="cellStatus">%(lblChecking)s</td>\n' \
' </table>' % {
'imgIndicator': img('/img/indicator.gif'),
'lblChecking': 'Checking status' + '...'
}
holdingpenpanel = '<div class="bibEditMenuSectionHeader">\n' \
' %(imgCompressMenuSection)sHolding Pen\n' \
'<table class="bibEditMenuMore">\n<tr><td>' \
' <div id="bibEditHoldingPenToolbar"> ' \
' <div id="bibeditHPChanges"></div>' \
' </div> </td></tr></table>' \
' </div>\n' % \
{ 'imgCompressMenuSection': imgCompressMenuSection }
bibcirculationpanel = \
' <div class="bibEditMenuSection" ' \
' id="bibEditBibCircConnection">\n' \
'<div class="bibEditMenuSectionHeader">\n' \
' %(imgCompressMenuSection)sPhysical Copies\n' \
' <table class="bibEditMenuMore">\n<tr><td ' \
' class="bibEditBibCircPanel">' \
' Number of copies: ' \
' <div id="bibEditBibCirculationCopies">0</div><br/>' \
' <button id="bibEditBibCirculationBtn">' \
'Edit physical copies</button>' \
' </td></tr></table></div></div>' \
% {
'imgCompressMenuSection': imgCompressMenuSection,
}
lnkhelp = img('/img/help.png', '', style='vertical-align: bottom') + \
link('Help', href='#', onclick='window.open(' \
'\'%s/help/admin/bibedit-admin-guide#2\', \'\', \'width=640,' \
'height=600,left=150,top=150,resizable=yes,scrollbars=yes\');' \
'return false;' % CFG_SITE_URL)
return ' <div id="bibEditMenu">\n' \
' <div class="bibEditMenuSection">\n' \
' %(recordmenu)s\n' \
' </div>\n' \
' <div class="bibEditMenuSection">\n' \
' %(fieldmenu)s\n' \
' </div>\n' \
' <div class="bibEditMenuSection">\n' \
' %(viewmenu)s\n' \
' </div>\n' \
' <div class="bibEditMenuSection">\n' \
' %(holdingpenpanel)s\n'\
' </div>'\
' <div class="bibEditMenuSection">\n' \
' %(undoredosection)s\n' \
' </div>\n' \
' <div class="bibEditMenuSection">\n' \
' %(historymenu)s\n' \
' </div>\n' \
' %(circulationmenu)s\n' \
' <div id="bibEditMenuSection">\n' \
' %(statusarea)s\n' \
' </div>\n' \
' <div class="bibEditMenuSection" align="right">\n' \
' %(lnkhelp)s\n' \
' </div>\n' \
' </div>\n' % {
'recordmenu': recordmenu,
'viewmenu': viewmenu,
'fieldmenu': fieldmenu,
'statusarea': statusarea,
'lnkhelp': lnkhelp,
'holdingpenpanel': holdingpenpanel,
'historymenu': historymenu,
'undoredosection': undoredosection,
'circulationmenu': bibcirculationpanel
}
def history_comparebox(self, ln, revdate, revdate_cmp, comparison):
""" Display the bibedit history comparison box. """
_ = gettext_set_language(ln)
title = '<b>%(comp)s</b><br />%(rev)s %(revdate)s<br />%(rev)s %(revdate_cmp)s' % {
'comp': _('Comparison of:'),
'rev': _('Revision'),
'revdate': revdate,
'revdate_cmp': revdate_cmp}
return '''
<div class="bibEditHistCompare">
<p>%s</p>
<p>
%s
</p>
</div>''' % (title, comparison)
def clean_value(self, value, format):
""" This function clean value for HTML interface and inverse. """
if format != "html":
value = value.replace('"', '"')
value = value.replace('<', '<')
value = value.replace('>', '>')
else:
value = value.replace('"', '"')
value = value.replace('<', '<')
value = value.replace('>', '>')
return value
def img(src, _class='', **kargs):
"""Create an HTML <img> element."""
src = 'src="%s" ' % src
if _class:
_class = 'class="%s" ' % _class
args = ''
for karg in kargs:
args += '%s="%s" ' % (karg, kargs[karg])
return '<img %s%s%s/>' % (src, _class, args)
def inp(_type, _class='', **kargs):
"""Create an HTML <input> element."""
_type = 'type="%s" ' % _type
if _class:
_class = 'class="%s" ' % _class
args = ''
for karg in kargs:
args += '%s="%s" ' % (karg, kargs[karg])
return '<input %s%s%s/>' % (_type, _class, args)
def button(_type, value, _class='', **kargs):
"""Create an HTML <button> element."""
_type = 'type="%s" ' % _type
if _class:
_class = 'class="%s" ' % _class
args = ''
for karg in kargs:
args += '%s="%s" ' % (karg, kargs[karg])
return '<button %s%s%s>%s</button>' % (_type, _class, args, value)
def link(value, _class='', **kargs):
"""Create an HTML <a> (link) element."""
if _class:
_class = 'class="%s" ' % _class
args = ''
for karg in kargs:
args += '%s="%s" ' % (karg, kargs[karg])
return '<a %s%s>%s</a>' % (_class, args, value)
|
"""
Some useful tools dealing with popplerqt5 (PDF) documents.
"""
import os
class Document(object):
"""Represents a (lazily) loaded PDF document."""
def __init__(self, filename=''):
self._filename = filename
self._document = None
self._dirty = True
def filename(self):
"""Returns the filename, set on init or via setFilename()."""
return self._filename
def setFilename(self, filename):
"""Sets a filename.
The document will be reloaded next time it is requested.
"""
self._filename = filename
self._dirty = True
def name(self):
"""Returns the filename without path."""
return os.path.basename(self._filename)
def document(self):
"""Returns the PDF document the filename points to, reloading if the filename was set.
Can return None, in case the document failed to load.
"""
if self._dirty:
self._document = self.load()
self._dirty = False
return self._document
def load(self):
"""Should load and return the popplerqt5 Document for our filename."""
try:
import popplerqt5
except ImportError:
return
return popplerqt5.Poppler.Document.load(self._filename)
|
import os
import json
class Config(object):
def __init__(self, **kwargs):
self.data = {}
self.conf_loader = JSONConfLoader()
self.data.update(self.conf_loader.read_all())
def write_all(self):
self.conf_loader.write_all(self.data)
def get(self, key, default=None):
return self.data.get(key, default)
def set(self, key, value):
self.data[key] = value
self.conf_loader.write_item(key, value)
def update(self, d):
self.data.update(d)
self.conf_loader.write_all(self.data)
class BaseConfLoader(object):
def __init__(self, **kwargs):
pass
def write_item(self, key, value):
pass
def read_item(self, key):
pass
def write_all(self, data):
pass
def read_all(self):
pass
class FileBasedLoader(BaseConfLoader):
_file_extension = None
_default_path = '~/.plotly_system_stats'
def __init__(self, **kwargs):
filename = kwargs.get('filename')
if filename is None:
ext = self._file_extension
filename = self._default_path
if ext is not None:
filename = '.'.join([filename, ext])
self.filename = os.path.expanduser(filename)
def write_file(self, s):
with open(self.filename, 'w') as f:
f.write(s)
def read_file(self):
if not os.path.exists(self.filename):
return None
with open(self.filename, 'r') as f:
s = f.read()
return s
class JSONConfLoader(FileBasedLoader):
def write_item(self, key, value):
d = self.read_all()
if d.get(key) == value:
return
d[key] = value
self.write_all(d)
def read_item(self, key):
d = self.read_all()
return d.get(key)
def write_all(self, data):
s = json.dumps(data, indent=3)
self.write_file(s)
def read_all(self):
s = self.read_file()
if s is None:
return {}
return json.loads(s)
config = Config()
|
from IPGSdb import *
from IPGSdb import deleteUsers
def testUsers():
print "\n"
print "---------------------------------------------------------------------------------------------------------------------"
print "\n"
print "START TESTING USERS CRUD FUNCTIONS"
print "\n"
print "---------------------------------------------------------------------------------------------------------------------"
print readUsers()
deleteUsers()
print readUsers()
A = createUsers('ayolo@yolo.com','dqsv','male','Raheja Township','mumbai','13131','1-1-2012')
print readUsers(A)
B = createUsers(None,'dqsv','male',None,'mumbai','13131',None)
print readUsers(B)
C=createUsers('ayolsca.com','dqsv','male','21','mumbai','13131','1-1-2012')
D=createUsers('ayolscasdqas','dqsv','male','21','mumbai','13131','1-1-2012')
print readUsers(A)
updateUsers(A,"Dad","Dad","Dad","Dad","Dad",'1-1-2000')
print readUsers(A)
updateUsers(A,None,"SAD","SAD","SAD","SAD",'1-1-1994')
print readUsers()
print "---------------------------------------------------------------------------------------------------------------------"
print "\n"
print "DONE WITH USERS CRUD"
print "\n"
print "---------------------------------------------------------------------------------------------------------------------"
return (C,D)
def testIssues(C):
print "\n"
print "---------------------------------------------------------------------------------------------------------------------"
print "\n"
print "START TESTING ISSUES CRUD FUNCTIONS"
print "\n"
print "---------------------------------------------------------------------------------------------------------------------"
print(readIssues(None))
print C[0]
X=createIssues(C[0],'Big problem','This is a very big problem... Blah..Blah..Blah',12312.1231,132123.12312,'c:/ad/image/1.jpg',False,3)
Z=createIssues(C[0],'Big problem','This is a very big problem... Blah..Blah..Blah',12312.1231,132123.12312,'c:/ad/image/1.jpg',False,3)
print X
print readIssues(X)
print readIssues()
return (X,Z)
#Z=createIssues(C[0],'Big proqw','This is a very big problem... Blah..Blah..Blah',12312.1231,132123.12312,'c:/ad/image/1.jpg',True,3)
#print Z
#Y=createIssues(C[1],'Big problem','This is a very big problem... Blah..Blah..Blah',12312.1231,132123.12312,'c:/ad/image/1.jpg',True,3)
#print Y
#print(readIssues())
#print(readIssues(X))
#updateIssues(X,"dwa","adwwa",21312,12312,"wda",False,123,True)
#print(readIssues(X))
#print(readIssues())
#deleteIssues(X)
#print(readIssues(X))
#print readIssues()
#print readUsers()
#deleteIssues()
#print readIssues()
print "---------------------------------------------------------------------------------------------------------------------"
print "\n"
print "DONE WITH ISSUES CRUD"
print "\n"
print "---------------------------------------------------------------------------------------------------------------------"
def testComments(X,C):
print "---------------------------------------------------------------------------------------------------------------------"
print "\n"
print "TESTING Comments TABLE"
print "\n"
print "---------------------------------------------------------------------------------------------------------------------"
print "COMMENT/ISSUE IDS","X[0]:",X[0],"X[1]",X[1]
print "USER IDS","C[0]:",C[0],"C[1]",C[1]
print readUsers()
print readIssues()
comment1=createComments(X[0],C[0],"11111111111HAHAHAHAHH! STUPID COMMENT")
comment2=createComments(X[0],C[1],"USER 2 YOLOLO")
comment3=createComments(X[0],C[0],"111122222222222222222HAHAHAHAHH! STUPID COMMENT")
print readComments(X[0])
updateComments(comment1[0],comment1[1],comment1[2],"NOOBS")
print readComments(X[0])
deleteComments(comment1[0],comment1[1],comment1[2])
print readComments(X[0])
def testVotes(X,C):
print "---------------------------------------------------------------------------------------------------------------------"
print "\n"
print "TESTING VOTES TABLE"
print "\n"
print "---------------------------------------------------------------------------------------------------------------------"
print "COMMENT/ISSUE IDS","X[0]:",X[0],"X[1]",X[1]
print "USER IDS","C[0]:",C[0],"C[1]",C[1]
print readUsers()
print readIssues()
createVotes(X[0],C[0],False)
createVotes(X[0],C[1],False)
print readVotes(X[0])
createVotes(X[1],C[0],True)
print readVotes()
deleteVotes(X[0])
print readVotes()
print readVotes(X[0])
updateVotes(X[0],C[0],True)
print readVotes(X[0])
if __name__ == '__main__':
C=testUsers()
X=testIssues(C)
testVotes(X,C)
testComments(X,C)
print "Success! All tests passed!"
|
DEBUG = True
BCRYPT_LEVEL = 12
MAIL_FROM = "someone@somewhere.com"
RECAPTCHA_PUBLIC_KEY = "6LcIFAITAAAAAEQZFaKrW3DoDVyeux4iSk9yJItk"
RECAPTCHA_PRIVATE_KEY = "6LcIFAITAAAAAIQwieMGyB5dGCbOmwgoIufLzsVH"
MONGODB_SETTINGS = {'DB' : 'ctfdb'}
SECRET_KEY = 'Oooooh!S3cr3T!!!!'
|
"""
@author: zengchunyun
"""
import logging
logging.basicConfig(filename="acces8s.log", level=logging.INFO,
format='%(asctime)s %(message)s')
logging.warning("heled")
logging.debug('This message should go to the log file')
logging.info('So should this')
logging.warning('And this, too')
|
from __future__ import absolute_import
from django.conf import settings
import os, sys
import traceback
import StringIO
import logging
import Queue
import threadpool
import uuid
import threading
import datetime
import time
import importlib
_dispatchQueue = Queue.Queue()
_scheduleQueue = []
_dispatchpool = threadpool.ThreadPool(5)
_schedulepool = threadpool.ThreadPool(2)
SIMPLETASK_SCHEDULE = {}
def test_add(x,y):
print x
print y
a = x+y
logging.debug( "add:%s", a)
class AsyncTask:
def __init__(self,func, args, kwargs):
self.id = '%s' % (uuid.uuid1())
self.func = func
self.args = args
self.kwargs = kwargs
class ScheduleTask:
def __init__(self,func,interval, args, kwargs):
self.id = '%s' % (uuid.uuid1())
self.interval = interval
self.func = func
self.args = args
self.kwargs = kwargs
self.lasttime = datetime.datetime.now() -interval
def pushtask(func, *args, **kwargs):
task = AsyncTask(func, args, kwargs)
_dispatchQueue.put( task )
logging.debug("get a task:%s", task.id)
return task.id
def canceltasks():
while True:
try:
timeout = 0.01
_dispatchQueue.get(block = True, timeout = timeout)
except Exception,e:
break
def scheduletask(func, interval, *args, **kwargs):
task = ScheduleTask(func, interval, args, kwargs)
_scheduleQueue.append( task )
logging.debug("get a schedule task:%s", task.id)
return task.id
def run():
def dispatcher():
logging.debug("dispatcher...")
while True:
jobs = []
while True:
try:
timeout = 1
if len(jobs) > 0:
timeout = 0.01
task = _dispatchQueue.get(block = True, timeout = timeout)
jobs.append( task )
except Exception,e:
break
if len(jobs) > 0:
for task in jobs:
_dispatchpool.putRequest( threadpool.WorkRequest(task.func, task.args, task.kwargs ) )
logging.debug("process %s... ", task.id)
_dispatchpool.wait()
logging.debug("dispatcher DONE")
def scheduler():
SIMPLETASK_SCHEDULE = getattr(settings, 'SIMPLETASK_SCHEDULE', {})
logging.debug("scheduler...%s", SIMPLETASK_SCHEDULE.items())
for name, item in SIMPLETASK_SCHEDULE.items():
modfunc = item.get('task', '').strip()
if modfunc == '':
logging.error("schedule job:%s is illegl", name)
continue
interval = item.get('interval', None)
if interval == None or not isinstance(interval, datetime.timedelta) :
logging.error("schedule job:%s interval is illegl", name)
continue
args = item.get('args', None)
i = modfunc.rfind('.')
modname = ''
if i >=0:
modname = modfunc[:i]
funcname = modfunc[i+1:]
else:
funcname = modfunc
try:
print modname, funcname
if modname != '':
obj = importlib.import_module(modname)
#obj = sys.modules[modname]
func = getattr(obj, funcname)
else:
func = eval(funcname)
print func
logging.info("imported:%s %s-%s", modfunc, modname, func)
scheduletask( func, interval, *args)
except Exception,e:
logging.error("Load %s, exception:%s", modfunc,e)
continue
while True:
jobs = []
now = datetime.datetime.now()
count = 0
for task in _scheduleQueue:
if now - task.lasttime >= task.interval:
_schedulepool.putRequest( threadpool.WorkRequest(task.func, task.args, task.kwargs ) )
task.lasttime = now
logging.debug("process %s... ", task.id)
count +=1
if count > 0:
_schedulepool.wait()
logging.debug("scheduler DONE")
time.sleep(0.5)
a = threading.Thread(target = dispatcher)
a.daemon = True
a.start()
b = threading.Thread(target = scheduler)
b.daemon = True
b.start()
print "gogogo"
|
from Container import Container
HTML = """
<div id="%(id)s" class="indenter">
%(content)s
</div>
"""
class Indenter (Container):
def __init__ (self, widget=None, level=1):
Container.__init__ (self)
self.level = level
if widget:
self += widget
def Render (self):
render = Container.Render (self)
for n in range(self.level):
render.html = HTML%({'id': self.id, 'content': render.html})
return render
|
from __future__ import absolute_import
import time
import dnf.cli
import dnfpluginsextras
import subprocess
_ = dnfpluginsextras._
class Tracer(dnf.Plugin):
"""DNF plugin for `tracer` command"""
name = "tracer"
def __init__(self, base, cli):
super(Tracer, self).__init__(base, cli)
self.timestamp = time.time()
self.base = base
self.cli = cli
if self.cli is not None:
self.cli.register_command(TracerCommand)
def transaction(self):
"""
Call after successful transaction
See https://rpm-software-management.github.io/dnf/api_transaction.html
"""
# Don't run tracer when uninstalling it
if dnfpluginsextras.is_erasing(self.base.transaction,
"tracer"):
return
# Don't run tracer when preparing chroot for mock
if self.base.conf.installroot != "/":
return
# Don't run tracer when "nothing to do"
if not len(self.base.transaction):
return
installed = set([package.name for package in
self.base.transaction.install_set])
erased = set([package.name for package in
self.base.transaction.remove_set])
args = ["tracer", "-n"] + list(installed | erased)
process = subprocess.Popen(
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = process.communicate()
_print_output(out, err)
if len(out) != 0:
print("\n" + _("For more information run:"))
print(" sudo tracer -iat " + str(self.timestamp))
class TracerCommand(dnf.cli.Command):
"""DNF tracer plugin"""
aliases = ["tracer"]
def run(self, args):
"""Called after running `dnf tracer ...`"""
args = ["tracer"] + args
process = subprocess.Popen(
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = process.communicate()
_print_output(out, err)
def _print_output(out, err):
if len(err) != 0:
print("Tracer:")
print(" " + _("Program 'tracer' crashed with following error:") + "\n")
print(err)
print(_("Please visit ") +
"https://github.com/FrostyX/tracer/issues " +
"and submit the issue. Thank you")
print(_("We apologize for any inconvenience"))
return
if len(out) == 0:
print(_("You should restart:"))
print(" " + _("Nothing needs to be restarted"))
return
# Last value is blank line
for line in out.decode("utf8").split("\n")[:-1]:
print(line)
|
NAME = "Pinguino IDE tk"
VERSION = "11.0"
SUBVERSION = "beta.1"
"""-------------------------------------------------------------------------
Pinguino IDE tk
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
-------------------------------------------------------------------------"""
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
import os
import debugger
debugger.Debugger(sys, clear=True)
os.environ["NAME"] = NAME
os.environ["VERSION"] = VERSION
os.environ["SUBVERSION"] = SUBVERSION
os.environ["PINGUINO_HOME"] = os.path.abspath(sys.path[0])
if os.path.exists(os.path.abspath("pinguino_data")):
os.environ["PINGUINO_DATA"] = os.path.abspath("pinguino_data")
else:
os.environ["PINGUINO_DATA"] = os.getenv("PINGUINO_HOME")
import argparse
from tkgui.pinguino_api.boards import boardlist
def build_argparse():
parser = argparse.ArgumentParser(description="*** %s ***"%os.getenv("NAME"))
parser.add_argument("-v", "--version", dest="version", action="store_true", default=False, help="show %s version and exit"%os.getenv("NAME"))
parser.add_argument("-a", "--author", dest="author", action="store_true", default=False, help="show authors of this %s version and exit"%os.getenv("NAME"))
parser.add_argument("-f", "--filename", dest="filename", nargs=1, default=False, help="filename to process")
parser.add_argument("-l", "--boot", dest="bootloader", nargs=1, default=False, help="set bootloader option")
parser.add_argument("-x", "--upload", dest="upload", action="store_true", default=False, help="upload code")
parser.add_argument("-g", "--hex", dest="hex_file", action="store_true", default=False, help="print hex_file")
for board in boardlist:
parser.add_argument(board.shortarg, board.longarg, dest="board", const=board, action="store_const", default=False,
help="compile code for " + board.board + " board")
return parser.parse_args()
try:
parser = build_argparse()
parser_state = True
except:
parser_state = False
if __name__ == "__main__":
#sys.path.append(os.path.join(os.getenv("PINGUINO_DATA"), "qtgui", "resources"))
python_path_modules = os.path.join(os.getenv("PINGUINO_DATA"), "python_requirements")
if os.path.isdir(python_path_modules): sys.path.append(python_path_modules)
if len(sys.argv) == 1 or not parser_state:
from Tkinter import Tk
from tkgui.ide import PinguinoIDE
root = Tk()
#icon = os.path.join("tkgui", "resources", "art", "pinguino11.ico")
#root.iconbitmap(icon)
#root.tk.call('wm', 'iconbitmap', self._w, '-default', 'pinguino11.ico')
app = PinguinoIDE(master=root)
#app.parent.configure(background = 'red')
app.mainloop()
#root.destroy()
elif parser_state: #command line
from tkgui.pinguino_api.pinguino import Pinguino
from tkgui.pinguino_api.pinguino_config import PinguinoConfig
from tkgui.ide.methods.config import Config
pinguino = Pinguino()
PinguinoConfig.set_environ_vars()
PinguinoConfig.check_user_files()
config = Config()
PinguinoConfig.update_pinguino_paths(config, Pinguino)
PinguinoConfig.update_pinguino_extra_options(config, Pinguino)
PinguinoConfig.update_user_libs(pinguino)
#parser = pinguino.build_argparse()
if parser.version:
print("\t" + VERSION)
sys.exit()
if parser.author:
print("\tJean-Pierre Mandon")
print("\tRegis Blanchot")
print("\tYeison Cardona")
sys.exit()
if parser.board:
pinguino.set_board(parser.board)
print("using %s board" % parser.board.name)
if parser.bootloader:
bootloader = pinguino.dict_boot.get(parser.bootloader[0].lower(), parser.board.bldr)
pinguino.set_bootloader(bootloader)
print("using %s bootloader" % pinguino.get_board().bldr)
if not parser.filename:
print("ERROR: missing filename")
sys.exit(1)
else:
filename = parser.filename[0]
fname, extension = os.path.splitext(filename)
if extension != ".pde":
print("ERROR: bad file extension, it should be .pde")
sys.exit()
del fname, extension
pinguino.compile_file(filename)
if not pinguino.compiled():
print("\nERROR: no compiled\n")
errors_proprocess = pinguino.get_errors_preprocess()
if errors_proprocess:
for error in errors_proprocess["preprocess"]: print(error)
errors_c = pinguino.get_errors_compiling_c()
if errors_c:
print(errors_c["complete_message"])
errors_asm = pinguino.get_errors_compiling_asm()
if errors_asm:
for error in errors_asm["error_symbols"]: print(error)
errors_link = pinguino.get_errors_linking()
if errors_link:
for error in errors_link["linking"]: print(error)
sys.exit()
else:
result = pinguino.get_result()
print("compilation time: %s" % result["time"])
print("compiled to: %s" % result["hex_file"])
if parser.hex_file:
hex_file = open(result["hex_file"], "r")
content_hex = hex_file.readlines()
hex_file.close()
print("\n" + "*" * 70)
print(result["hex_file"])
print("*" * 70)
for line in content_hex: print(line),
print("*" * 70 + "\n")
if parser.upload:
try:
uploaded, result = pinguino.upload()
if result:
print(result)
except:
if pinguino.get_board().arch == 8:
print("ERROR: is possible that a parameter is incorrect, try another bootloader option.")
print("Boloader options: "),
print(", ".join(pinguino.dict_boot.keys()))
|
from django.core.exceptions import PermissionDenied, ValidationError
from django.template.defaultfilters import filesizeformat
from django.utils.translation import gettext as _
from rest_framework import viewsets
from rest_framework.response import Response
from misago.acl import add_acl
from ..models import Attachment, AttachmentType
from ..serializers import AttachmentSerializer
IMAGE_EXTENSIONS = ('jpg', 'jpeg', 'png', 'gif')
class AttachmentViewSet(viewsets.ViewSet):
def create(self, request):
if not request.user.acl['max_attachment_size']:
raise PermissionDenied(_("You don't have permission to upload new files."))
try:
return self.create_attachment(request)
except ValidationError as e:
return Response({
'detail': e.args[0]
}, status=400)
def create_attachment(self, request):
upload = request.FILES.get('upload')
if not upload:
raise ValidationError(_("No file has been uploaded."))
user_roles = set(r.pk for r in request.user.get_roles())
filetype = validate_filetype(upload, user_roles)
validate_filesize(upload, filetype, request.user.acl['max_attachment_size'])
attachment = Attachment(
secret=Attachment.generate_new_secret(),
filetype=filetype,
size=upload.size,
uploader=request.user,
uploader_name=request.user.username,
uploader_slug=request.user.slug,
uploader_ip=request.user_ip,
filename=upload.name,
)
if is_upload_image(upload):
try:
attachment.set_image(upload)
except IOError:
raise ValidationError(_("Uploaded image was corrupted or invalid."))
else:
attachment.set_file(upload)
attachment.save()
add_acl(request.user, attachment)
return Response(AttachmentSerializer(attachment, context={'user': request.user}).data)
def validate_filetype(upload, user_roles):
filename = upload.name.strip().lower()
queryset = AttachmentType.objects.filter(status=AttachmentType.ENABLED)
for filetype in queryset.prefetch_related('limit_uploads_to'):
for extension in filetype.extensions_list:
if filename.endswith('.%s' % extension):
break
else:
continue
if filetype.mimetypes_list and upload.content_type not in filetype.mimetypes_list:
continue
if filetype.limit_uploads_to.exists():
allowed_roles = set(r.pk for r in filetype.limit_uploads_to.all())
if not user_roles & allowed_roles:
continue
return filetype
raise ValidationError(_("You can't upload files of this type."))
def validate_filesize(upload, filetype, hard_limit):
if upload.size > hard_limit * 1024:
message = _("You can't upload files larger than %(limit)s (your file has %(upload)s).")
raise ValidationError(message % {
'upload': filesizeformat(upload.size).rstrip('.0'),
'limit': filesizeformat(hard_limit * 1024).rstrip('.0')
})
if filetype.size_limit and upload.size > filetype.size_limit * 1024:
message = _("You can't upload files of this type larger than %(limit)s (your file has %(upload)s).")
raise ValidationError(message % {
'upload': filesizeformat(upload.size).rstrip('.0'),
'limit': filesizeformat(filetype.size_limit * 1024).rstrip('.0')
})
def is_upload_image(upload):
filename = upload.name.strip().lower()
for extension in IMAGE_EXTENSIONS:
if filename.endswith('.%s' % extension):
return True
return False
|
from openerp.osv import fields, osv
from openerp.tools.translate import _
import openerp.netsvc as netsvc
class account_invoice(osv.Model):
_inherit = 'account.invoice'
def search_asociated_invoice(self, cr, uid, ids, context=None):
if context is None:
context = {}
data_pool = self.pool.get('ir.model.data')
inv_type = self.read(cr, uid, ids[0], ['type', 'name'])
name = inv_type['name']
inv_type = inv_type['type']
invoice_ids = self.search(cr, uid, [('name', '=', name)])
# inv_type = context.get('inv_type', False)
action_model = False
action = {}
if not invoice_ids:
raise osv.except_osv(_('Error'), _('No Invoices were created'))
if inv_type == "out_invoice":
action_model, action_id = data_pool.get_object_reference(
cr, uid, 'account', "action_invoice_tree1")
elif inv_type == "in_invoice":
action_model, action_id = data_pool.get_object_reference(
cr, uid, 'account', "action_invoice_tree2")
elif inv_type == "out_refund":
action_model, action_id = data_pool.get_object_reference(
cr, uid, 'account', "action_invoice_tree3")
elif inv_type == "in_refund":
action_model, action_id = data_pool.get_object_reference(
cr, uid, 'account', "action_invoice_tree4")
if action_model:
action_pool = self.pool.get(action_model)
action = action_pool.read(cr, uid, action_id, context=context)
action['domain'] = "[('id','in', ["+','.join(
map(str, invoice_ids))+"])]"
action.update({'nodestroy': True})
return action
|
"""Add a packages table.
Revision ID: 15f941de8d61
Revises: None
Create Date: 2015-05-07 10:28:02.798256
"""
revision = '15f941de8d61'
down_revision = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import text
def upgrade():
# Create the new structures
op.create_table(
'packages',
sa.Column('pkg_name', sa.Text(), nullable=False),
sa.PrimaryKeyConstraint('pkg_name')
)
op.create_table(
'releases',
sa.Column('pkg_name', sa.Text(), nullable=False),
sa.Column('tarball', sa.Text(), nullable=False),
sa.Column('tar_sum', sa.String(length=64), nullable=False),
sa.ForeignKeyConstraint(['pkg_name'], ['packages.pkg_name'], onupdate='CASCADE'),
sa.PrimaryKeyConstraint('tarball', 'tar_sum'),
sa.UniqueConstraint('tarball', 'tar_sum')
)
op.create_index(op.f('ix_releases_pkg_name'), 'releases', ['pkg_name'], unique=False)
def downgrade():
pass
|
import cgi
import cgitb
cgitb.enable()
from shared.functionality.rmvgridowner import main
from shared.cgiscriptstub import run_cgi_script
run_cgi_script(main)
|
n = int(input('Digite o valor de n: '))
a, b = 1, 1
k = 1
while k <= n-2:
a, b = b, a + b
k = k + 1
print (b)
|
import subprocess, functools
from supermegazord.db import path
def ScriptSubprocess(path, args = []):
command = path
for arg in args:
command = command + " " + arg
subprocess.call(command, shell=True)
#print command
class ScriptArg:
def __init__(self, data):
try: self.description = data["description"].encode("UTF-8")
except: self.description = "Sem descrição".encode("UTF-8")
try: self.default = data["default"].encode("UTF-8")
except: self.default = "";
try: self.prefix = data["prefix"].encode("UTF-8")
except: self.prefix = ""
def Parse(self, input):
resp = input
if input == "":
resp = self.default
return self.prefix + resp
class Script:
def __init__(self, data, megazord):
self.func = lambda args = []: False
self.disable_curses = False
# Simple "execute a file"
if data["type"] == "shell":
self.disable_curses = True
spath = data["path"].encode("UTF-8").replace("{MEGAZORD}", path.MEGAZORD)
self.func = functools.partial(ScriptSubprocess, spath)
self.args = []
if "args" in data:
for arg in data["args"]:
self.args.append(ScriptArg(arg))
|
class MenuItem(object):
def __init__(self, id, text, route, weight, parentId = False ):
self.id = id
self.text = text
self.route = route
self.weight = weight
self.parentId = parentId
self.plugin = ''
def __repr__(self):
return "<%s/%d (%s)::'%s' (%s)>" % (self.id, self.weight, self.route, self.text, str(self.plugin and self.plugin.pluginId()))
|
import bluepy.btle as ble
import threading
import struct
def perform_scan(iface):
a = ble.Scanner(iface)
res = a.scan(0.2)
devices = {}
for line in res:
addr = str(line.addr)
name = line.getValueText(9)
rssi = line.rssi
if addr in devices:
devices[addr][1].append(rssi)
else:
devices[addr] = [name, [rssi]]
result = {}
for addr in devices:
name = devices[addr][0]
rssis = devices[addr][1]
result[addr] = [name, sum(rssis) / len(rssis)]
return result
class bt_inerface_le(ble.DefaultDelegate):
def __init__(self, addr, rx_mtu, iface):
ble.DefaultDelegate.__init__(self)
self.rx_buffer = ""
self.tx_buffer = []
self.delegate = self
self.sock = ble.Peripheral(addr, ble.ADDR_TYPE_RANDOM, iface).withDelegate(self)
#set MTU
self.sock.setMTU(rx_mtu)
#enable notification
self.sock.writeCharacteristic(0x0212, struct.pack('<h', 0x001), withResponse=True)
self.is_alive = True
threading.Thread(target=self.run).start()
def end(self):
self.is_alive = False
def run(self):
while self.is_alive:
self.sock.waitForNotifications(0.0005)
while self.tx_buffer:
data = self.tx_buffer[0]
self.tx_buffer = self.tx_buffer[1:]
self.sock.writeCharacteristic(0x0211, data, withResponse=True)
self.sock.disconnect()
def handleNotification(self, cHandle, data):
self.rx_buffer += data
f = open("rx_data.bin", "a")
f.write(data)
f.close()
ble.DefaultDelegate.handleNotification(self, cHandle, data)
def read(self):
try:
data = self.rx_buffer
self.rx_buffer = ""
return map(ord, data)
except:
#print "no data"
return []
def write(self, data):
self.tx_buffer.append("".join(map(chr, data)))
|
"""\
A Twitter reader and personal manager - Twitter API management.
"""
__metaclass__ = type
import simplejson
import twyt.twitter, twyt.data
import Common, Scheduler, Strip
class Error(Common.Error):
pass
twytter = twyt.twitter.Twitter()
user = None
password = None
class twytcall:
def __init__(this, message):
this.message = message
def __call__(this, func):
def decorated(self, *args, **kws):
self.message(this.message + '…')
try:
return func(self, *args, **kws)
except twyt.twitter.TwitterException, exception:
diagnostic = str(exception) + ', ' + this.message
self.error(diagnostic)
raise Error(diagnostic)
finally:
self.message('')
return decorated
class Twitter:
auth_limit = 50
ip_limit = 50
def __init__(self):
twytter.set_user_agent("TweeTabs")
twytter.set_auth(user, password)
self.error_list = []
def start(self):
pass
def message(self, message=None):
if message:
Common.gui.twitter_message_widget.set_markup(
'<span size="small">' + Common.escape(message) + '</span>')
else:
Common.gui.twitter_message_widget.set_label('')
Common.gui.refresh()
def error(self, diagnostic):
self.error_list.append(diagnostic)
if len(self.error_list) == 1:
Scheduler.Thread(self.error_thread())
def error_thread(self):
while self.error_list:
diagnostic = self.error_list[0]
Common.gui.twitter_error_widget.set_markup(
'<span size="small" weight="bold" foreground="red">'
+ Common.escape(diagnostic) + '</span>')
yield Common.gui.blanking_delay
Common.gui.twitter_error_widget.set_label('')
yield 0.2
self.error_list.pop(0)
## Twitter services.
@twytcall("getting Auth limit")
def get_auth_limit(self):
response = twyt.data.RateLimit(twytter.account_rate_limit_status(True))
self.auth_limit = response['remaining_hits']
self.display_limits()
@twytcall("getting IP limit")
def get_ip_limit(self):
response = twyt.data.RateLimit(twytter.account_rate_limit_status(False))
self.ip_limit = response['remaining_hits']
self.display_limits()
@twytcall("fetching followers")
def fetch_followers(self, tab):
tab.preset_strips = user_strips_from_json(
twytter.social_graph_followers_ids())
tab.refresh()
@twytcall("fetching following")
def fetch_following(self, tab):
tab.preset_strips = user_strips_from_json(
twytter.social_graph_friends_ids())
tab.refresh()
@twytcall("getting user info")
def get_user_info(self, id):
print "Getting", repr(id), "info"
return twytter.user_show(id)
@twytcall("loading direct timeline")
def load_direct_timeline(self, tab):
tab.preset_strips |= set(map(
Strip.Tweet,
twyt.data.StatusList(twytter.direct_messages())))
tab.refresh()
@twytcall("loading direct sent timeline")
def load_direct_sent_timeline(self, tab):
tab.preset_strips |= set(map(
Strip.Tweet, twyt.data.StatusList(twytter.direct_sent())))
tab.refresh()
@twytcall("loading friends timeline")
def load_friends_timeline(self, tab):
tab.preset_strips |= set(map(
Strip.Tweet,
twyt.data.StatusList(twytter.status_friends_timeline())))
tab.refresh()
@twytcall("loading public timeline")
def load_public_timeline(self, tab):
tab.preset_strips |= set(map(
Strip.Tweet,
twyt.data.StatusList(twytter.status_public_timeline())))
tab.refresh()
@twytcall("loading replies timeline")
def load_replies_timeline(self, tab):
tab.preset_strips |= set(map(
Strip.Tweet,
twyt.data.StatusList(twytter.status_replies())))
tab.refresh()
@twytcall("loading user timeline")
def load_user_timeline(self, tab):
tab.preset_strips |= set(map(
Strip.Tweet,
twyt.data.StatusList(twytter.status_user_timeline())))
tab.refresh()
@twytcall("sending tweet")
def send_tweet(self, message):
twytter.status_update(message)
## Services.
def display_limits(self):
Common.gui.twitter_limits_widget.set_markup(
'<span size="small" foreground="gray50">%s/%s</span>'
% (self.auth_limit, self.ip_limit))
Common.gui.refresh()
def user_strips_from_json(json):
return set(Strip.User(Strip.user_loader.load(id) or dummy_user(id))
for id in simplejson.loads(json))
def dummy_user(id):
return twyt.data.User({
'id': id,
'name': '',
'screen_name': str(id),
'location': None,
'description': None,
'profile_image_url': None,
'url': None,
'protected': False})
if Common.threaded:
import Queue, threading
class Threaded_Twitter(threading.Thread, Twitter):
quit_flag = False
def __init__(self):
threading.Thread.__init__(self, name="Twitter manager")
self.queue = Queue.Queue()
Twitter.__init__(self)
def run(self):
while not self.quit_flag:
try:
func, args, kws = self.queue.get(timeout=delay)
func(*args, **kws)
self.queue.task_done()
except Queue.Empty:
pass
def enqueue(self, func, *args, **kws):
self.queue.put((func, args, kws))
def quit(self):
self.quit_flag = True
|
from json import JSONEncoder
from sys import int_info
from typing import List
class AuthUser:
def __init__(self,
user: str = '',
password: str = '',
device: str = "nakamori"):
self.user: str = user
self.password: str = password
self.device: str = device
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return AuthUser()
authuser: AuthUser = AuthUser()
authuser.user = json.get("user")
authuser.password = json.get("pass")
authuser.device = json.get("device")
return authuser
class QueryOptions:
def __init__(self,
query: str = None,
limit: int = None,
limit_tag: int = None,
filter: int = None,
tags: int = None,
tagfilter: int = None,
fuzzy: int = None,
nocast: int = None,
notag: int = None,
id: int = None,
score: int = None,
offset: int = None,
level: int = None,
all: int = None,
progress: int = None,
status: int = None,
ismovie: int = None,
filename: str = None,
hash: str = None,
allpics: int = None,
pic: int = None,
skip: int = None
):
self.query: str = query
self.limit: int = limit
self.limit_tag: int = limit_tag
self.filter: int = filter
self.tags: int = tags
self.tagfilter: int = tagfilter
self.fuzzy: int = fuzzy
self.nocast: int = nocast
self.notag: int = notag
self.id: int = id
self.score: int = score
self.offset: int = offset
self.level: int = level
self.all: int = all
self.progress: int = progress
self.status: int = status
self.ismovie: int = ismovie
self.filename: str = filename
self.hash: str = hash
self.allpics: int = allpics
self.pic: int = pic
self.skip: int = skip
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(obj):
return QueryOptions(obj.get('query'),
obj.get('limit'),
obj.get('limit_tag'),
obj.get('filter'),
obj.get('tags'),
obj.get('tagfilter'),
obj.get('fuzzy'),
obj.get('nocast'),
obj.get('notag'),
obj.get('id'),
obj.get('score'),
obj.get('offset'),
obj.get('level'),
obj.get('all'),
obj.get('progress'),
obj.get('status'),
obj.get('ismovie'),
obj.get('filename'),
obj.get('hash'),
obj.get('allpics'),
obj.get('pic'),
obj.get('skip'))
class AnimeTitle:
def __init__(self,
Type: str = '',
Language: str = '',
Title: str = ''
):
self.Type: str = Type
self.Language: str = Language
self.Title: str = Title
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return AnimeTitle()
animetitle: AnimeTitle = AnimeTitle()
animetitle.Type = json.get("Type")
animetitle.Language = json.get("Language")
animetitle.Title = json.get("Title")
return animetitle
class Sizes:
def __init__(self,
Episodes: int = 0,
Specials: int = 0,
Credits: int = 0,
Trailers: int = 0,
Parodies: int = 0,
Others: int = 0
):
self.Episodes: int = Episodes
self.Specials: int = Specials
self.Credits: int = Credits
self.Trailers: int = Trailers
self.Parodies: int = Parodies
self.Others: int = Others
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
# print(json)
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return Sizes()
sizes: Sizes = Sizes()
sizes.Episodes = json.get("Episodes")
sizes.Specials = json.get("Specials")
sizes.Credits = json.get("Credits")
sizes.Trailers = json.get("Trailers")
sizes.Parodies = json.get("Parodies")
sizes.Others = json.get("Others")
return sizes
class Role:
def __init__(self,
character: str = '',
character_image: str = '',
character_description: str = '',
staff: str = '',
staff_image: str = '',
staff_description: str = '',
role: str = '',
type: str = ''
):
self.character: str = character
self.character_image: str = character_image
self.character_description: str = character_description
self.staff: str = staff
self.staff_image: str = staff_image
self.staff_description: str = staff_description
self.role: str = role
self.type: str = type
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return Role()
role: Role = Role()
role.character = json.get("character")
role.character_image = json.get("character_image")
role.character_description = json.get("character_description")
role.staff = json.get("staff")
role.staff_image = json.get("staff_image")
role.staff_description = json.get("staff_description")
role.role = json.get("role")
role.type = json.get("type")
return role
class Art:
def __init__(self,
url: str = '',
index: int = 0
):
self.url: str = url
self.index: int = index
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return Art()
art: Art = Art()
art.url = json.get("url")
art.index = json.get("index")
return art
class ArtCollection:
def __init__(self,
banner: List[Art] = [],
fanart: List[Art] = [],
thumb: List[Art] = []
):
self.banner: List[Art] = banner
self.fanart: List[Art] = fanart
self.thumb: List[Art] = thumb
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return ArtCollection()
artcollection: ArtCollection = ArtCollection()
artcollection.banner = []
tmp = json.get("banner", [])
for art in tmp:
art = Art.Decoder(art)
artcollection.banner.append(art)
artcollection.fanart = []
tmp = json.get("fanart", [])
for art in tmp:
art = Art.Decoder(art)
artcollection.fanart.append(art)
artcollection.thumb = []
tmp = json.get("thumb", [])
for art in tmp:
art = Art.Decoder(art)
artcollection.thumb.append(art)
return artcollection
class General:
def __init__(self,
id = {},
format = {},
format_version = {},
size = {},
duration = {},
overallbitrate = {},
overallbitrate_mode = {},
encoded = {},
encoded_date = {},
encoded_lib = {},
attachments = {}
):
self.id = id
self.format = format
self.format_version = format_version
self.size = size
self.duration = duration
self.overallbitrate = overallbitrate
self.overallbitrate_mode = overallbitrate_mode
self.encoded = encoded
self.encoded_date = encoded_date
self.encoded_lib = encoded_lib
self.attachments = attachments
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return General()
general: General = General()
general.id = json.get("id")
general.format = json.get("format")
general.format_version = json.get("format_version")
general.size = json.get("size")
general.duration = json.get("duration")
general.overallbitrate = json.get("overallbitrate")
general.overallbitrate_mode = json.get("overallbitrate_mode")
general.encoded = json.get("encoded")
general.encoded_date = json.get("encoded_date")
general.encoded_lib = json.get("encoded_lib")
general.attachments = json.get("attachments")
return general
class Stream:
def __init__(self,
Title: str = '',
Language: str = '',
Key: str = '',
Duration: int = 0,
Height: int = 0,
Width: int = 0,
Bitrate: int = 0,
SubIndex: int = 0,
Id: int = 0,
ScanType: str = '',
RefFrames: int = 0,
Profile: str = '',
Level: int = 0,
HeaderStripping: int = 0,
HasScalingMatrix: int = 0,
FrameRateMode: str = '',
File: str = '',
FrameRate: float = 0.0,
ColorSpace: str = '',
CodecID: str = '',
ChromaSubsampling: str = '',
Cabac: int = 0,
BitDepth: int = 0,
Index: int = 0,
Codec: str = '',
StreamType: int = 0,
Orientation: int = 0,
QPel: int = 0,
GMC: str = '',
BVOP: int = 0,
SamplingRate: int = 0,
LanguageCode: str = '',
Channels: int = 0,
Selected: int = 0,
DialogNorm: str = '',
BitrateMode: str = '',
Format: str = '',
Default: int = 0,
Forced: int = 0,
PixelAspectRatio: str = '',
):
self.Title: str = Title
self.Language: str = Language
self.Key: str = Key
self.Duration: int = Duration
self.Height: int = Height
self.Width: int = Width
self.Bitrate: int = Bitrate
self.SubIndex: int = SubIndex
self.Id: int = Id
self.ScanType: str = ScanType
self.RefFrames: int = RefFrames
self.Profile: str = Profile
self.Level: int = Level
self.HeaderStripping: int = HeaderStripping
self.HasScalingMatrix: int = HasScalingMatrix
self.FrameRateMode: str = FrameRateMode
self.File: str = File
self.FrameRate: float = FrameRate
self.ColorSpace: str = ColorSpace
self.CodecID: str = CodecID
self.ChromaSubsampling: str = ChromaSubsampling
self.Cabac: int = Cabac
self.BitDepth: int = BitDepth
self.Index: int = Index
self.Codec: str = Codec
self.StreamType: int = StreamType
self.Orientation: int = Orientation
self.QPel: int = QPel
self.GMC: str = GMC
self.BVOP: int = BVOP
self.SamplingRate: int = SamplingRate
self.LanguageCode: str = LanguageCode
self.Channels: int = Channels
self.Selected: int = Selected
self.DialogNorm: str = DialogNorm
self.BitrateMode: str = BitrateMode
self.Format: str = Format
self.Default: int = Default
self.Forced: int = Forced
self.PixelAspectRatio: str = PixelAspectRatio
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return Stream()
stream: Stream = Stream()
stream.Title = json.get("Title")
stream.Language = json.get("Language")
stream.Key = json.get("Key")
stream.Duration = json.get("Duration")
stream.Height = json.get("Height")
stream.Width = json.get("Width")
stream.Bitrate = json.get("Bitrate")
stream.SubIndex = json.get("SubIndex")
stream.Id = json.get("Id")
stream.ScanType = json.get("ScanType")
stream.RefFrames = json.get("RefFrames")
stream.Profile = json.get("Profile")
stream.Level = json.get("Level")
stream.HeaderStripping = json.get("HeaderStripping")
stream.HasScalingMatrix = json.get("HasScalingMatrix")
stream.FrameRateMode = json.get("FrameRateMode")
stream.File = json.get("File")
stream.FrameRate = json.get("FrameRate")
stream.ColorSpace = json.get("ColorSpace")
stream.CodecID = json.get("CodecID")
stream.ChromaSubsampling = json.get("ChromaSubsampling")
stream.Cabac = json.get("Cabac")
stream.BitDepth = json.get("BitDepth")
stream.Index = json.get("Index")
stream.Codec = json.get("Codec")
stream.StreamType = json.get("StreamType")
stream.Orientation = json.get("Orientation")
stream.QPel = json.get("QPel")
stream.GMC = json.get("GMC")
stream.BVOP = json.get("BVOP")
stream.SamplingRate = json.get("SamplingRate")
stream.LanguageCode = json.get("LanguageCode")
stream.Channels = json.get("Channels")
stream.Selected = json.get("Selected")
stream.DialogNorm = json.get("DialogNorm")
stream.BitrateMode = json.get("BitrateMode")
stream.Format = json.get("Format")
stream.Default = json.get("Default")
stream.Forced = json.get("Forced")
stream.PixelAspectRatio = json.get("PixelAspectRatio")
return stream
class MediaInfo:
def __init__(self,
general: General = {},
audios: List[Stream] = [],
videos: List[Stream] = [],
subtitles: List[Stream] = [],
menus: List[Stream] = []
):
self.general: General = general
self.audios: List[Stream] = audios
self.videos: List[Stream] = videos
self.subtitles: List[Stream] = subtitles
self.menus: List[Stream] = menus
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return MediaInfo()
mediainfo: MediaInfo = MediaInfo()
mediainfo.general = General.Decoder(json.get("general"))
for a in json.get("audios", []):
mediainfo.audios.append(Stream.Decoder(a))
for a in json.get("videos", []):
mediainfo.videos.append(Stream.Decoder(a))
for a in json.get("subtitles", []):
mediainfo.subtitles.append(Stream.Decoder(a))
for a in json.get("menus", []):
mediainfo.menus.append(Stream.Decoder(a))
return mediainfo
class RawFile:
def __init__(self,
type: str = '',
crc32: str = '',
ed2khash: str = '',
md5: str = '',
sha1: str = '',
created: str = '',
updated: str = '',
duration: int = 0,
filename: str = '',
server_path: str = '',
size: int = 0,
hash: str = '',
hash_source: int = 0,
is_ignored: int = 0,
media: MediaInfo = {},
group_full: str = '',
group_short: str = '',
group_id: int = 0,
recognized: bool = False,
offset: int = 0,
videolocal_place_id: int = 0,
import_folder_id: int = 0,
is_preferred: int = 0,
id: int = 0,
name: str = '',
titles: List[AnimeTitle] = [],
summary: str = '',
url: str = '',
added: str = '',
edited: str = '',
year: str = '',
air: str = '',
localsize: int = 0,
total_sizes: Sizes = {},
local_sizes: Sizes = {},
watched_sizes: Sizes = {},
viewed: int = 0,
rating: str = '',
votes: str = '',
userrating: str = '',
roles: List[Role] = [],
tags: List[str] = [],
art: ArtCollection = {}
):
self.type: str = type
self.crc32: str = crc32
self.ed2khash: str = ed2khash
self.md5: str = md5
self.sha1: str = sha1
self.created: str = created
self.updated: str = updated
self.duration: int = duration
self.filename: str = filename
self.server_path: str = server_path
self.hash: str = hash
self.hash_source: int = hash_source
self.is_ignored: int = is_ignored
self.media: MediaInfo = media
self.group_full: str = group_full
self.group_short: str = group_short
self.group_id: int = group_id
self.recognized: bool = recognized
self.offset: int = offset
self.videolocal_place_id: int = videolocal_place_id
self.import_folder_id: int = import_folder_id
self.is_preferred: int = is_preferred
self.id: int = id
self.name: str = name
self.titles: List[AnimeTitle] = titles
self.summary: str = summary
self.url: str = url
self.added: str = added
self.edited: str = edited
self.year: str = year
self.air: str = air
self.size: int = size
self.localsize: int = localsize
self.total_sizes: Sizes = total_sizes
self.local_sizes: Sizes = local_sizes
self.watched_sizes: Sizes = watched_sizes
self.viewed: int = viewed
self.rating: str = rating
self.votes: str = votes
self.userrating: str = userrating
self.roles: List[Role] = roles
self.tags: List[str] = tags
self.art: ArtCollection = art
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return RawFile()
rawfile: RawFile = RawFile()
rawfile.type = json.get("type")
rawfile.crc32 = json.get("crc32")
rawfile.ed2khash = json.get("ed2khash")
rawfile.md5 = json.get("md5")
rawfile.sha1 = json.get("sha1")
rawfile.created = json.get("created")
rawfile.updated = json.get("updated")
rawfile.duration = json.get("duration")
rawfile.filename = json.get("filename")
rawfile.server_path = json.get("server_path")
rawfile.hash = json.get("hash")
rawfile.hash_source = json.get("hash_source")
rawfile.is_ignored = json.get("is_ignored")
rawfile.media = MediaInfo.Decoder(json.get("media"))
rawfile.group_full = json.get("group_full")
rawfile.group_short = json.get("group_short")
rawfile.group_id = json.get("group_id")
rawfile.recognized = json.get("recognized")
rawfile.offset = json.get("offset")
rawfile.videolocal_place_id = json.get("videolocal_place_id")
rawfile.import_folder_id = json.get("import_folder_id")
rawfile.is_preferred = json.get("is_preferred")
rawfile.id = json.get("id")
rawfile.name = json.get("name")
rawfile.titles = []
tmp = json.get("titles", [])
for title in tmp:
title = AnimeTitle.Decoder(title)
rawfile.titles.append(title)
rawfile.summary = json.get("summary")
rawfile.url = json.get("url")
rawfile.added = json.get("added")
rawfile.edited = json.get("edited")
rawfile.year = json.get("year")
rawfile.air = json.get("air")
rawfile.size = json.get("size")
rawfile.localsize = json.get("localsize")
rawfile.total_sizes = Sizes.Decoder(json.get('total_sizes'))
rawfile.local_sizes = Sizes.Decoder(json.get('local_sizes'))
rawfile.watched_sizes = Sizes.Decoder(json.get('watched_sizes'))
rawfile.viewed = json.get("viewed")
rawfile.rating = json.get("rating")
rawfile.votes = json.get("votes")
rawfile.userrating = json.get("userrating")
rawfile.roles = []
tmp = json.get("roles", [])
for role in tmp:
role = Role.Decoder(role)
rawfile.roles.append(role)
rawfile.tags = []
tmp = json.get("tags", [])
for tag in tmp:
rawfile.tags.append(tag)
rawfile.art = ArtCollection.Decoder(json.get("art"))
return rawfile
class RecentFile:
def __init__(self,
series_id: int = 0,
ep_id: int = 0,
type: str = '',
crc32: str = '',
ed2khash: str = '',
md5: str = '',
sha1: str = '',
created: str = '',
updated: str = '',
duration: int = 0,
filename: str = '',
server_path: str = '',
size: int = 0,
hash: str = '',
hash_source: int = 0,
is_ignored: int = 0,
media: MediaInfo = {},
group_full: str = '',
group_short: str = '',
group_id: int = 0,
recognized: bool = False,
offset: int = 0,
videolocal_place_id: int = 0,
import_folder_id: int = 0,
is_preferred: int = 0,
id: int = 0,
name: str = '',
titles: List[AnimeTitle] = [],
summary: str = '',
url: str = '',
added: str = '',
edited: str = '',
year: str = '',
air: str = '',
localsize: int = 0,
total_sizes: Sizes = {},
local_sizes: Sizes = {},
watched_sizes: Sizes = {},
viewed: int = 0,
rating: str = '',
votes: str = '',
userrating: str = '',
roles: List[Role] = [],
tags: List[str] = [],
art: ArtCollection = {}
):
self.series_id: int = series_id
self.ep_id: int = ep_id
self.type: str = type
self.crc32: str = crc32
self.ed2khash: str = ed2khash
self.md5: str = md5
self.sha1: str = sha1
self.created: str = created
self.updated: str = updated
self.duration: int = duration
self.filename: str = filename
self.server_path: str = server_path
self.hash: str = hash
self.hash_source: int = hash_source
self.is_ignored: int = is_ignored
self.media: MediaInfo = media
self.group_full: str = group_full
self.group_short: str = group_short
self.group_id: int = group_id
self.recognized: bool = recognized
self.offset: int = offset
self.videolocal_place_id: int = videolocal_place_id
self.import_folder_id: int = import_folder_id
self.is_preferred: int = is_preferred
self.id: int = id
self.name: str = name
self.titles: List[AnimeTitle] = titles
self.summary: str = summary
self.url: str = url
self.added: str = added
self.edited: str = edited
self.year: str = year
self.air: str = air
self.size: int = size
self.localsize: int = localsize
self.total_sizes: Sizes = total_sizes
self.local_sizes: Sizes = local_sizes
self.watched_sizes: Sizes = watched_sizes
self.viewed: int = viewed
self.rating: str = rating
self.votes: str = votes
self.userrating: str = userrating
self.roles: List[Role] = roles
self.tags: List[str] = tags
self.art: ArtCollection = art
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return RecentFile()
recentfile: RecentFile = RecentFile()
recentfile.series_id = json.get("series_id")
recentfile.ep_id = json.get("ep_id")
recentfile.type = json.get("type")
recentfile.crc32 = json.get("crc32")
recentfile.ed2khash = json.get("ed2khash")
recentfile.md5 = json.get("md5")
recentfile.sha1 = json.get("sha1")
recentfile.created = json.get("created")
recentfile.updated = json.get("updated")
recentfile.duration = json.get("duration")
recentfile.filename = json.get("filename")
recentfile.server_path = json.get("server_path")
recentfile.hash = json.get("hash")
recentfile.hash_source = json.get("hash_source")
recentfile.is_ignored = json.get("is_ignored")
recentfile.media = MediaInfo.Decoder(json.get("media"))
recentfile.group_full = json.get("group_full")
recentfile.group_short = json.get("group_short")
recentfile.group_id = json.get("group_id")
recentfile.recognized = json.get("recognized")
recentfile.offset = json.get("offset")
recentfile.videolocal_place_id = json.get("videolocal_place_id")
recentfile.import_folder_id = json.get("import_folder_id")
recentfile.is_preferred = json.get("is_preferred")
recentfile.id = json.get("id")
recentfile.name = json.get("name")
recentfile.titles = []
tmp = json.get("titles", [])
for title in tmp:
title = AnimeTitle.Decoder(title)
recentfile.titles.append(title)
recentfile.summary = json.get("summary")
recentfile.url = json.get("url")
recentfile.added = json.get("added")
recentfile.edited = json.get("edited")
recentfile.year = json.get("year")
recentfile.air = json.get("air")
recentfile.size = json.get("size")
recentfile.localsize = json.get("localsize")
recentfile.total_sizes = Sizes.Decoder(json.get('total_sizes'))
recentfile.local_sizes = Sizes.Decoder(json.get('local_sizes'))
recentfile.watched_sizes = Sizes.Decoder(json.get('watched_sizes'))
recentfile.viewed = json.get("viewed")
recentfile.rating = json.get("rating")
recentfile.votes = json.get("votes")
recentfile.userrating = json.get("userrating")
recentfile.roles = []
tmp = json.get("roles", [])
for role in tmp:
role = Role.Decoder(role)
recentfile.roles.append(role)
recentfile.tags = []
tmp = json.get("tags", [])
for tag in tmp:
recentfile.tags.append(tag)
recentfile.art = ArtCollection.Decoder(json.get("art"))
return recentfile
class Episode:
def __init__(self,
type: str = '',
season: str = '',
view: int = 0,
view_date: str = '',
eptype: str = '',
epnumber: int = 0,
aid: int = 0,
eid: int = 0,
files: List[RawFile] = [],
id: int = 0,
name: str = '',
titles: List[AnimeTitle] = [],
summary: str = '',
url: str = '',
added: str = '',
edited: str = '',
year: str = '',
air: str = '',
size: int = 0,
localsize: int = 0,
total_sizes: Sizes = {},
local_sizes: Sizes = {},
watched_sizes: Sizes = {},
viewed: int = 0,
rating: str = '',
votes: str = '',
userrating: int = 0,
roles: List[Role] = [],
tags: List[str] = [],
art: ArtCollection = {}
):
self.type: str = type
self.season: str = season
self.view: int = view
self.view_date: str = view_date
self.eptype: str = eptype
self.epnumber: int = epnumber
self.aid: int = aid
self.eid: int = eid
self.files: List[RawFile] = files
self.id: int = id
self.name: str = name
self.titles: List[AnimeTitle] = titles
self.summary: str = summary
self.url: str = url
self.added: str = added
self.edited: str = edited
self.year: str = year
self.air: str = air
self.size: int = size
self.localsize: int = localsize
self.total_sizes: Sizes = total_sizes
self.local_sizes: Sizes = local_sizes
self.watched_sizes: Sizes = watched_sizes
self.viewed: int = viewed
self.rating: str = rating
self.votes: str = votes
self.userrating: int = userrating
self.roles: List[Role] = roles
self.tags: List[str] = tags
self.art: ArtCollection = art
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return Episode()
episode: Episode = Episode()
episode.type = json.get("type")
episode.season = json.get("season")
episode.view = json.get("view")
episode.view_date = json.get("view_date")
episode.eptype = json.get("eptype")
episode.epnumber = json.get("epnumber")
episode.aid = json.get("aid")
episode.eid = json.get("eid")
episode.files = []
tmp = json.get("files", [])
for file in tmp:
file = RawFile.Decoder(file)
episode.files.append(file)
episode.id = json.get("id")
episode.name = json.get("name")
episode.titles = []
tmp = json.get("titles", [])
for title in tmp:
title = AnimeTitle.Decoder(title)
episode.titles.append(title)
episode.summary = json.get("summary")
episode.url = json.get("url")
episode.added = json.get("added")
episode.edited = json.get("edited")
episode.year = json.get("year")
episode.air = json.get("air")
episode.size = json.get("size")
episode.localsize = json.get("localsize")
episode.total_sizes = Sizes.Decoder(json.get('total_sizes'))
episode.local_sizes = Sizes.Decoder(json.get('local_sizes'))
episode.watched_sizes = Sizes.Decoder(json.get('watched_sizes'))
episode.viewed = json.get("viewed")
episode.rating = json.get("rating")
episode.votes = json.get("votes")
episode.userrating = json.get("userrating", 0)
episode.roles = []
tmp = json.get("roles", [])
for role in tmp:
role = Role.Decoder(role)
episode.roles.append(role)
episode.tags = []
tmp = json.get("tags", [])
for tag in tmp:
episode.tags.append(tag)
episode.art = ArtCollection.Decoder(json.get("art"))
return episode
class Serie:
def __init__(self,
type: str = '',
aid: int = 0,
season: str = '',
eps: List[Episode] = [],
ismovie: int = 0,
filesize: int = 0,
id: int = 0,
name: str = '',
titles: List[AnimeTitle] = [],
summary: str = '',
match: str = '',
url: str = '',
added: str = '',
edited: str = '',
year: str = '',
air: str = '',
size: int = 0,
localsize: int = 0,
total_sizes: Sizes = {},
local_sizes: Sizes = {},
watched_sizes: Sizes = {},
viewed: int = 0,
rating: str = '',
votes: str = '',
userrating: int = 0,
roles: List[Role] = [],
tags: List[str] = [],
art: ArtCollection = {}
):
self.type: str = type
self.aid: int = aid
self.season: str = season
self.eps: List[Episode] = eps
self.ismovie: int = ismovie
self.filesize: int = filesize
self.id: int = id
self.name: str = name
self.titles: List[AnimeTitle] = titles
self.summary: str = summary
self.match: str = match
self.url: str = url
self.added: str = added
self.edited: str = edited
self.year: str = year
self.air: str = air
self.size: int = size
self.localsize: int = localsize
self.total_sizes: Sizes = total_sizes
self.local_sizes: Sizes = local_sizes
self.watched_sizes: Sizes = watched_sizes
self.viewed: int = viewed
self.rating: str = rating
self.votes: str = votes
self.userrating: int = userrating
self.roles: List[Role] = roles
self.tags: List[str] = tags
self.art: ArtCollection = art
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return Serie()
serie: Serie = Serie()
serie.type = json.get("type")
serie.aid = json.get("aid")
serie.season = json.get("season")
serie.eps = []
tmp = json.get("eps", [])
for ep in tmp:
ep = Episode.Decoder(ep)
serie.eps.append(ep)
serie.ismovie = json.get("ismovie")
serie.filesize = json.get("filesize")
serie.id = json.get("id")
serie.name = json.get("name")
serie.titles = []
tmp = json.get("titles", [])
for title in tmp:
title = AnimeTitle.Decoder(title)
serie.titles.append(title)
serie.summary = json.get("summary")
serie.match = json.get("match")
serie.url = json.get("url")
serie.added = json.get("added")
serie.edited = json.get("edited")
serie.year = json.get("year")
serie.air = json.get("air")
serie.size = json.get("size")
serie.localsize = json.get("localsize")
serie.total_sizes = Sizes.Decoder(json.get('total_sizes'))
serie.local_sizes = Sizes.Decoder(json.get('local_sizes'))
serie.watched_sizes = Sizes.Decoder(json.get('watched_sizes'))
serie.viewed = json.get("viewed")
serie.rating = json.get("rating")
serie.votes = json.get("votes")
serie.userrating = json.get("userrating", 0)
serie.roles = []
tmp = json.get("roles", [])
for role in tmp:
role = Role.Decoder(role)
serie.roles.append(role)
serie.tags = []
tmp = json.get("tags", [])
for tag in tmp:
serie.tags.append(tag)
serie.art = ArtCollection.Decoder(json.get("art"))
return serie
class Group:
def __init__(self,
series: List[Serie] = [],
type: str = '',
id: int = 0,
name: str = '',
titles: List[AnimeTitle] = [],
summary: str = '',
url: str = '',
added: str = '',
edited: str = '',
year: str = '',
air: str = '',
size: int = 0,
localsize: int = 0,
total_sizes: Sizes = {},
local_sizes: Sizes = {},
watched_sizes: Sizes = {},
viewed: int = 0,
rating: str = '',
votes: str = '',
userrating: str = '',
roles: List[Role] = [],
tags: List[str] = [],
art: ArtCollection = {}
):
self.series: List[Serie] = series
self.type: str = type
self.id: int = id
self.name: str = name
self.titles: List[AnimeTitle] = titles
self.summary: str = summary
self.url: str = url
self.added: str = added
self.edited: str = edited
self.year: str = year
self.air: str = air
self.size: int = size
self.localsize: int = localsize
self.total_sizes: Sizes = total_sizes
self.local_sizes: Sizes = local_sizes
self.watched_sizes: Sizes = watched_sizes
self.viewed: int = viewed
self.rating: str = rating
self.votes: str = votes
self.userrating: str = userrating
self.roles: List[Role] = roles
self.tags: List[str] = tags
self.art: ArtCollection = art
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return Group()
group: Group = Group()
group.series = []
tmp = json.get("series", [])
for serie in tmp:
serie = Serie.Decoder(serie)
group.series.append(serie)
group.type = json.get("type")
group.id = json.get("id")
group.name = json.get("name")
group.titles = []
tmp = json.get("titles", [])
for title in tmp:
title = AnimeTitle.Decoder(title)
group.titles.append(title)
group.summary = json.get("summary")
group.url = json.get("url")
group.added = json.get("added")
group.edited = json.get("edited")
group.year = json.get("year")
group.air = json.get("air")
group.size = json.get("size")
group.localsize = json.get("localsize")
group.total_sizes = Sizes.Decoder(json.get("total_sizes"))
group.local_sizes = Sizes.Decoder(json.get("local_sizes"))
group.watched_sizes = Sizes.Decoder(json.get("watched_sizes"))
group.viewed = json.get("viewed")
group.rating = json.get("rating")
group.votes = json.get("votes")
group.userrating = json.get("userrating")
group.roles = []
tmp = json.get("roles", [])
for role in tmp:
role = Role.Decoder(role)
group.roles.append(role)
group.tags = []
tmp = json.get("tags", [])
for tag in tmp:
group.tags.append(tag)
group.art = ArtCollection.Decoder(json.get("art"))
return group
class Filter:
def __init__(self,
type: str = '',
groups: List[Group] = [],
filters: list = [],
id: int = 0,
name: str = '',
titles: List[AnimeTitle] = [],
summary: str = '',
url: str = '',
added: str = '',
edited: str = '',
year: str = '',
air: str = '',
size: int = 0,
localsize: int = 0,
total_sizes: Sizes = [],
local_sizes: Sizes = [],
watched_sizes: Sizes = [],
viewed: int = 0,
rating: str = '',
votes: str = '',
userrating: str = '',
roles: List[Role] = [],
tags: List[str] = [],
art: ArtCollection = {}
):
self.type: str = type
self.groups: List[Group] = groups
self.filters: List[Filter] = filters
self.id: int = id
self.name: str = name
self.titles: List[AnimeTitle] = titles
self.summary: str = summary
self.url: str = url
self.added: str = added
self.edited: str = edited
self.year: str = year
self.air: str = air
self.size: int = size
self.localsize: int = localsize
self.total_sizes: Sizes = total_sizes
self.local_sizes: Sizes = local_sizes
self.watched_sizes: Sizes = watched_sizes
self.viewed: int = viewed
self.rating: str = rating
self.votes: str = votes
self.userrating: str = userrating
self.roles: List[Role] = roles
self.tags: List[str] = tags
self.art: ArtCollection = art
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return Filter()
filter = Filter(id=json.get("id"), name=json.get("name"), type=json.get('type'))
filter.groups = [] # <--- ain't this default value ?
filter.titles = [] # <--- ain't this default value ?
for _group in json.get("groups", []):
group = Group.Decoder(_group)
filter.groups.append(group)
filter.filters = []
for _filter in json.get("filters", []):
__filter = Filter.Decoder(_filter)
filter.filters.append(__filter)
for _title in json.get("titles", []):
title = AnimeTitle.Decoder(_title)
filter.titles.append(title)
filter.summary = json.get("summary")
filter.url = json.get("url")
filter.added = json.get("added")
filter.edited = json.get("edited")
filter.year = json.get("year")
filter.air = json.get("air")
filter.size = json.get("size")
filter.localsize = json.get("localsize")
filter.total_sizes = Sizes.Decoder(json.get('total_sizes'))
filter.local_sizes = Sizes.Decoder(json.get('local_sizes'))
filter.watched_sizes = Sizes.Decoder(json.get('watched_sizes'))
filter.viewed = json.get("viewed")
filter.rating = json.get("rating")
filter.votes = json.get("votes")
filter.userrating = json.get("userrating")
filter.roles = []
for role in json.get("roles", []):
role = Role.Decoder(role)
filter.roles.append(role)
filter.tags = []
for tag in json.get("tags", []):
filter.tags.append(tag)
filter.art = ArtCollection.Decoder(json.get("art"))
return filter
class ImportFolder:
def __init__(self,
ImportFolderID: int = 0,
ImportFolderType: int = 0,
ImportFolderName: str = '',
ImportFolderLocation: str = '',
CloudID: int = 0,
IsWatched: int = 0,
IsDropSource: int = 0,
IsDropDestination: int = 0
):
self.ImportFolderID: int = ImportFolderID
self.ImportFolderType: int = ImportFolderType
self.ImportFolderName: str = ImportFolderName
self.ImportFolderLocation: str = ImportFolderLocation
self.CloudID: int = CloudID
self.IsWatched: int = IsWatched
self.IsDropSource: int = IsDropSource
self.IsDropDestination: int = IsDropDestination
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return ImportFolder()
importfolder: ImportFolder = ImportFolder()
importfolder.ImportFolderID = json.get("ImportFolderID")
importfolder.ImportFolderType = json.get("ImportFolderType")
importfolder.ImportFolderName = json.get("ImportFolderName")
importfolder.ImportFolderLocation = json.get("ImportFolderLocation")
importfolder.CloudID = json.get("CloudID")
importfolder.IsWatched = json.get("IsWatched")
importfolder.IsDropSource = json.get("IsDropSource")
importfolder.IsDropDestination = json.get("IsDropDestination")
return importfolder
class Counter:
def __init__(self,
count: int = 0
):
self.count: int = count
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return Counter()
counter: Counter = Counter()
counter.count = json.get("count")
return counter
class WebNews:
def __init__(self,
date: str = '',
link: str = '',
title: str = '',
description: str = '',
author: str = ''
):
self.date: str = date
self.link: str = link
self.title: str = title
self.description: str = description
self.author: str = author
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return WebNews()
webnews: WebNews = WebNews()
webnews.date = json.get("date")
webnews.link = json.get("link")
webnews.title = json.get("title")
webnews.description = json.get("description")
webnews.author = json.get("author")
return webnews
class QueueInfo:
def __init__(self,
count: int = 0,
state: str = '',
isrunning: str = '',
ispause: str = ''
):
self.count: int = count
self.state: str = state
self.isrunning: str = isrunning
self.ispause: str = ispause
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return QueueInfo()
queueinfo: QueueInfo = QueueInfo()
queueinfo.count = json.get("count")
queueinfo.state = json.get("state")
queueinfo.isrunning = json.get("isrunning")
queueinfo.ispause = json.get("ispause")
return queueinfo
class SeriesInFolderInfo:
def __init__(self,
name: str = '',
id: int = 0,
filesize: int = 0,
size: int = 0,
paths: List[str] = []
):
self.name: str = name
self.id: int = id
self.filesize: int = filesize
self.size: int = size
self.paths: List[str] = paths
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return SeriesInFolderInfo()
seriesinfolderinfo: SeriesInFolderInfo = SeriesInFolderInfo()
seriesinfolderinfo.name = json.get("name")
seriesinfolderinfo.id = json.get("id")
seriesinfolderinfo.filesize = json.get("filesize")
seriesinfolderinfo.size = json.get("size")
seriesinfolderinfo.paths = []
tmp = json.get("paths", [])
for path in tmp:
seriesinfolderinfo.paths.append(path)
return seriesinfolderinfo
class FolderInfo:
def __init__(self,
id: int = 0,
filesize: int = 0,
size: int = 0,
series: List[SeriesInFolderInfo] = {}
):
self.id: int = id
self.filesize: int = filesize
self.size: int = size
self.series: List[SeriesInFolderInfo] = series
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return FolderInfo()
folderinfo: FolderInfo = FolderInfo()
folderinfo.id = json.get("id")
folderinfo.filesize = json.get("filesize")
folderinfo.size = json.get("size")
folderinfo.series = []
tmp = json.get("series", [])
for serie in tmp:
serie = SeriesInFolderInfo.Decoder(serie)
folderinfo.series.append(serie)
return folderinfo
class ImagePath:
def __init__(self,
path: str = '',
isdefault: bool = False
):
self.path: str = path
self.isdefault: bool = isdefault
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return ImagePath()
imagepath: ImagePath = ImagePath()
imagepath.path = json.get("path")
imagepath.isdefault = json.get("isdefault")
return imagepath
class LogRotatorSettings:
def __init__(self,
Enabled: bool = True,
Zip: bool = True,
Delete: bool = True,
Delete_Days: str = ''
):
self.Enabled: bool = Enabled
self.Zip: bool = Zip
self.Delete: bool = Delete
self.Delete_Days: str = Delete_Days
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return LogRotatorSettings()
logrotatorsettings: LogRotatorSettings = LogRotatorSettings()
logrotatorsettings.Enabled = json.get("Enabled")
logrotatorsettings.Zip = json.get("Zip")
logrotatorsettings.Delete = json.get("Delete")
logrotatorsettings.Delete_Days = json.get("Delete_Days")
return logrotatorsettings
class DatabaseSettings:
def __init__(self,
MySqliteDirectory: str = '',
DatabaseBackupDirectory: str = '',
Type: str = '',
Username: str = '',
Password: str = '',
Schema: str = '',
Hostname: str = '',
SQLite_DatabaseFile: str = ''
):
self.MySqliteDirectory: str = MySqliteDirectory
self.DatabaseBackupDirectory: str = DatabaseBackupDirectory
self.Type: str = Type
self.Username: str = Username
self.Password: str = Password
self.Schema: str = Schema
self.Hostname: str = Hostname
self.SQLite_DatabaseFile: str = SQLite_DatabaseFile
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return DatabaseSettings()
databasesettings: DatabaseSettings = DatabaseSettings()
databasesettings.MySqliteDirectory = json.get("MySqliteDirectory")
databasesettings.DatabaseBackupDirectory = json.get("DatabaseBackupDirectory")
databasesettings.Type = json.get("Type")
databasesettings.Username = json.get("Username")
databasesettings.Password = json.get("Password")
databasesettings.Schema = json.get("Schema")
databasesettings.Hostname = json.get("Hostname")
databasesettings.SQLite_DatabaseFile = json.get("SQLite_DatabaseFile")
return databasesettings
class AniDBSettings:
def __init__(self,
Username: str = '',
Password: str = '',
ServerAddress: str = '',
ServerPort: int = 0,
ClientPort: int = 0,
AVDumpKey: str = '',
AVDumpClientPort: int = 0,
DownloadRelatedAnime: bool = True,
DownloadSimilarAnime: bool = True,
DownloadReviews: bool = True,
DownloadReleaseGroups: bool = True,
MyList_AddFiles: bool = True,
MyList_StorageState: int = 0,
MyList_DeleteType: int = 0,
MyList_ReadUnwatched: bool = True,
MyList_ReadWatched: bool = True,
MyList_SetWatched: bool = True,
MyList_SetUnwatched: bool = True,
MyList_UpdateFrequency: int = 1,
Calendar_UpdateFrequency: int = 1,
Anime_UpdateFrequency: int = 1,
MyListStats_UpdateFrequency: int = 1,
File_UpdateFrequency: int = 1,
DownloadCharacters: bool = True,
DownloadCreators: bool = True,
MaxRelationDepth: int = 0
):
self.Username: str = Username
self.Password: str = Password
self.ServerAddress: str = ServerAddress
self.ServerPort: int = ServerPort
self.ClientPort: int = ClientPort
self.AVDumpKey: str = AVDumpKey
self.AVDumpClientPort: int = AVDumpClientPort
self.DownloadRelatedAnime: bool = DownloadRelatedAnime
self.DownloadSimilarAnime: bool = DownloadSimilarAnime
self.DownloadReviews: bool = DownloadReviews
self.DownloadReleaseGroups: bool = DownloadReleaseGroups
self.MyList_AddFiles: bool = MyList_AddFiles
self.MyList_StorageState: int = MyList_StorageState
self.MyList_DeleteType: int = MyList_DeleteType
self.MyList_ReadUnwatched: bool = MyList_ReadUnwatched
self.MyList_ReadWatched: bool = MyList_ReadWatched
self.MyList_SetWatched: bool = MyList_SetWatched
self.MyList_SetUnwatched: bool = MyList_SetUnwatched
self.MyList_UpdateFrequency: int = MyList_UpdateFrequency
self.Calendar_UpdateFrequency: int = Calendar_UpdateFrequency
self.Anime_UpdateFrequency: int = Anime_UpdateFrequency
self.MyListStats_UpdateFrequency: int = MyListStats_UpdateFrequency
self.File_UpdateFrequency: int = File_UpdateFrequency
self.DownloadCharacters: bool = DownloadCharacters
self.DownloadCreators: bool = DownloadCreators
self.MaxRelationDepth: int = MaxRelationDepth
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return AniDBSettings()
anidbsettings: AniDBSettings = AniDBSettings()
anidbsettings.Username = json.get("Username")
anidbsettings.Password = json.get("Password")
anidbsettings.ServerAddress = json.get("ServerAddress")
anidbsettings.ServerPort = json.get("ServerPort")
anidbsettings.ClientPort = json.get("ClientPort")
anidbsettings.AVDumpKey = json.get("AVDumpKey")
anidbsettings.AVDumpClientPort = json.get("AVDumpClientPort")
anidbsettings.DownloadRelatedAnime = json.get("DownloadRelatedAnime")
anidbsettings.DownloadSimilarAnime = json.get("DownloadSimilarAnime")
anidbsettings.DownloadReviews = json.get("DownloadReviews")
anidbsettings.DownloadReleaseGroups = json.get("DownloadReleaseGroups")
anidbsettings.MyList_AddFiles = json.get("MyList_AddFiles")
anidbsettings.MyList_StorageState = json.get("MyList_StorageState")
anidbsettings.MyList_DeleteType = json.get("MyList_DeleteType")
anidbsettings.MyList_ReadUnwatched = json.get("MyList_ReadUnwatched")
anidbsettings.MyList_ReadWatched = json.get("MyList_ReadWatched")
anidbsettings.MyList_SetWatched = json.get("MyList_SetWatched")
anidbsettings.MyList_SetUnwatched = json.get("MyList_SetUnwatched")
anidbsettings.MyList_UpdateFrequency = json.get("MyList_UpdateFrequency")
anidbsettings.Calendar_UpdateFrequency = json.get("Calendar_UpdateFrequency")
anidbsettings.Anime_UpdateFrequency = json.get("Anime_UpdateFrequency")
anidbsettings.MyListStats_UpdateFrequency = json.get("MyListStats_UpdateFrequency")
anidbsettings.File_UpdateFrequency = json.get("File_UpdateFrequency")
anidbsettings.DownloadCharacters = json.get("DownloadCharacters")
anidbsettings.DownloadCreators = json.get("DownloadCreators")
anidbsettings.MaxRelationDepth = json.get("MaxRelationDepth")
return anidbsettings
class WebCacheSettings:
def __init__(self,
Enabled: bool = True,
Address: str = '',
BannedReason: str = '',
BannedExpiration: str = '',
XRefFileEpisode_Get: bool = True,
XRefFileEpisode_Send: bool = True,
TvDB_Get: bool = True,
TvDB_Send: bool = True,
Trakt_Get: bool = True,
Trakt_Send: bool = True
):
self.Enabled: bool = Enabled
self.Address: str = Address
self.BannedReason: str = BannedReason
self.BannedExpiration: str = BannedExpiration
self.XRefFileEpisode_Get: bool = XRefFileEpisode_Get
self.XRefFileEpisode_Send: bool = XRefFileEpisode_Send
self.TvDB_Get: bool = TvDB_Get
self.TvDB_Send: bool = TvDB_Send
self.Trakt_Get: bool = Trakt_Get
self.Trakt_Send: bool = Trakt_Send
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return WebCacheSettings()
webcachesettings: WebCacheSettings = WebCacheSettings()
webcachesettings.Enabled = json.get("Enabled")
webcachesettings.Address = json.get("Address")
webcachesettings.BannedReason = json.get("BannedReason")
webcachesettings.BannedExpiration = json.get("BannedExpiration")
webcachesettings.XRefFileEpisode_Get = json.get("XRefFileEpisode_Get")
webcachesettings.XRefFileEpisode_Send = json.get("XRefFileEpisode_Send")
webcachesettings.TvDB_Get = json.get("TvDB_Get")
webcachesettings.TvDB_Send = json.get("TvDB_Send")
webcachesettings.Trakt_Get = json.get("Trakt_Get")
webcachesettings.Trakt_Send = json.get("Trakt_Send")
return webcachesettings
class TvDBSettings:
def __init__(self,
AutoLink: bool = True,
AutoFanart: bool = True,
AutoFanartAmount: int = 0,
AutoWideBanners: bool = True,
AutoWideBannersAmount: int = 0,
AutoPosters: bool = True,
AutoPostersAmount: int = 0,
UpdateFrequency: int = 1,
Language: str = ''
):
self.AutoLink: bool = AutoLink
self.AutoFanart: bool = AutoFanart
self.AutoFanartAmount: int = AutoFanartAmount
self.AutoWideBanners: bool = AutoWideBanners
self.AutoWideBannersAmount: int = AutoWideBannersAmount
self.AutoPosters: bool = AutoPosters
self.AutoPostersAmount: int = AutoPostersAmount
self.UpdateFrequency: int = UpdateFrequency
self.Language: str = Language
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return TvDBSettings()
tvdbsettings: TvDBSettings = TvDBSettings()
tvdbsettings.AutoLink = json.get("AutoLink")
tvdbsettings.AutoFanart = json.get("AutoFanart")
tvdbsettings.AutoFanartAmount = json.get("AutoFanartAmount")
tvdbsettings.AutoWideBanners = json.get("AutoWideBanners")
tvdbsettings.AutoWideBannersAmount = json.get("AutoWideBannersAmount")
tvdbsettings.AutoPosters = json.get("AutoPosters")
tvdbsettings.AutoPostersAmount = json.get("AutoPostersAmount")
tvdbsettings.UpdateFrequency = json.get("UpdateFrequency")
tvdbsettings.Language = json.get("Language")
return tvdbsettings
class MovieDbSettings:
def __init__(self,
AutoFanart: bool = True,
AutoFanartAmount: int = 0,
AutoPosters: bool = True,
AutoPostersAmount: int = 0
):
self.AutoFanart: bool = AutoFanart
self.AutoFanartAmount: int = AutoFanartAmount
self.AutoPosters: bool = AutoPosters
self.AutoPostersAmount: int = AutoPostersAmount
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return MovieDbSettings()
moviedbsettings: MovieDbSettings = MovieDbSettings()
moviedbsettings.AutoFanart = json.get("AutoFanart")
moviedbsettings.AutoFanartAmount = json.get("AutoFanartAmount")
moviedbsettings.AutoPosters = json.get("AutoPosters")
moviedbsettings.AutoPostersAmount = json.get("AutoPostersAmount")
return moviedbsettings
class ImportSettings:
def __init__(self,
VideoExtensions: List[str] = [],
Exclude: List[str] = [],
DefaultSeriesLanguage: int = 1,
DefaultEpisodeLanguage: int = 1,
RunOnStart: bool = True,
ScanDropFoldersOnStart: bool = True,
Hash_CRC32: bool = True,
Hash_MD5: bool = True,
Hash_SHA1: bool = True,
UseExistingFileWatchedStatus: bool = True,
AutomaticallyDeleteDuplicatesOnImport: bool = True,
FileLockChecking: bool = True,
AggressiveFileLockChecking: bool = True,
FileLockWaitTimeMS: int = 0,
AggressiveFileLockWaitTimeSeconds: int = 0,
RenameThenMove: bool = True,
RenameOnImport: bool = True,
MoveOnImport: bool = True,
MediaInfoPath: str = '',
MediaInfoTimeoutMinutes: int = 0
):
self.VideoExtensions: List[str] = VideoExtensions
self.Exclude: List[str] = Exclude
self.DefaultSeriesLanguage: int = DefaultSeriesLanguage
self.DefaultEpisodeLanguage: int = DefaultEpisodeLanguage
self.RunOnStart: bool = RunOnStart
self.ScanDropFoldersOnStart: bool = ScanDropFoldersOnStart
self.Hash_CRC32: bool = Hash_CRC32
self.Hash_MD5: bool = Hash_MD5
self.Hash_SHA1: bool = Hash_SHA1
self.UseExistingFileWatchedStatus: bool = UseExistingFileWatchedStatus
self.AutomaticallyDeleteDuplicatesOnImport: bool = AutomaticallyDeleteDuplicatesOnImport
self.FileLockChecking: bool = FileLockChecking
self.AggressiveFileLockChecking: bool = AggressiveFileLockChecking
self.FileLockWaitTimeMS: int = FileLockWaitTimeMS
self.AggressiveFileLockWaitTimeSeconds: int = AggressiveFileLockWaitTimeSeconds
self.RenameThenMove: bool = RenameThenMove
self.RenameOnImport: bool = RenameOnImport
self.MoveOnImport: bool = MoveOnImport
self.MediaInfoPath: str = MediaInfoPath
self.MediaInfoTimeoutMinutes: int = MediaInfoTimeoutMinutes
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return ImportSettings()
importsettings: ImportSettings = ImportSettings()
importsettings.VideoExtensions = []
tmp = json.get("VideoExtensions", [])
for VideoExtension in tmp:
importsettings.VideoExtensions.append(VideoExtension)
importsettings.Exclude = []
tmp = json.get("Exclude", [])
for Exclude in tmp:
importsettings.Exclude.append(Exclude)
importsettings.DefaultSeriesLanguage = json.get("DefaultSeriesLanguage")
importsettings.DefaultEpisodeLanguage = json.get("DefaultEpisodeLanguage")
importsettings.RunOnStart = json.get("RunOnStart")
importsettings.ScanDropFoldersOnStart = json.get("ScanDropFoldersOnStart")
importsettings.Hash_CRC32 = json.get("Hash_CRC32")
importsettings.Hash_MD5 = json.get("Hash_MD5")
importsettings.Hash_SHA1 = json.get("Hash_SHA1")
importsettings.UseExistingFileWatchedStatus = json.get("UseExistingFileWatchedStatus")
importsettings.AutomaticallyDeleteDuplicatesOnImport = json.get("AutomaticallyDeleteDuplicatesOnImport")
importsettings.FileLockChecking = json.get("FileLockChecking")
importsettings.AggressiveFileLockChecking = json.get("AggressiveFileLockChecking")
importsettings.FileLockWaitTimeMS = json.get("FileLockWaitTimeMS")
importsettings.AggressiveFileLockWaitTimeSeconds = json.get("AggressiveFileLockWaitTimeSeconds")
importsettings.RenameThenMove = json.get("RenameThenMove")
importsettings.RenameOnImport = json.get("RenameOnImport")
importsettings.MoveOnImport = json.get("MoveOnImport")
importsettings.MediaInfoPath = json.get("MediaInfoPath")
importsettings.MediaInfoTimeoutMinutes = json.get("MediaInfoTimeoutMinutes")
return importsettings
class PlexSettings:
def __init__(self,
ThumbnailAspects: str = '',
Libraries: List[int] = [],
Token: str = '',
Server: str = ''
):
self.ThumbnailAspects: str = ThumbnailAspects
self.Libraries: List[int] = Libraries
self.Token: str = Token
self.Server: str = Server
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return PlexSettings()
plexsettings: PlexSettings = PlexSettings()
plexsettings.ThumbnailAspects = json.get("ThumbnailAspects")
plexsettings.Libraries = []
tmp = json.get("Libraries", [])
for library in tmp:
plexsettings.Libraries.append(library)
plexsettings.Token = json.get("Token")
plexsettings.Server = json.get("Server")
return plexsettings
class PluginSettings:
def __init__(self,
EnabledPlugins: dict = {},
Priority: List[str] = [],
EnabledRenamers: dict = {},
RenamerPriorities: dict = {}
):
self.EnabledPlugins: dict = EnabledPlugins
self.Priority: List[str] = Priority
self.EnabledRenamers: dict = EnabledRenamers
self.RenamerPriorities: dict = RenamerPriorities
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return PluginSettings()
pluginsettings: PluginSettings = PluginSettings()
pluginsettings.EnabledPlugins = json.get("EnabledPlugins")
pluginsettings.Priority = []
tmp = json.get("Priority", [])
for prio in tmp:
pluginsettings.Priority.append(prio)
pluginsettings.EnabledRenamers = json.get("EnabledRenamers")
pluginsettings.RenamerPriorities = json.get("RenamerPriorities")
return pluginsettings
class FileQualityPreferences:
def __init__(self,
Require10BitVideo: bool = True,
MaxNumberOfFilesToKeep: int = 0,
PreferredTypes: List[int] = [],
PreferredAudioCodecs: List[str] = [],
PreferredResolutions: List[str] = [],
PreferredSubGroups: List[str] = [],
PreferredVideoCodecs: List[str] = [],
Prefer8BitVideo: bool = True,
AllowDeletionOfImportedFiles: bool = True,
RequiredTypes: List[int] = [],
RequiredAudioCodecs: dict = {},
RequiredAudioStreamCount: dict = {},
RequiredResolutions: dict = {},
RequiredSources: dict = {},
RequiredSubGroups: dict = {},
RequiredSubStreamCount: dict = {},
RequiredVideoCodecs: dict = {},
PreferredSources: List[str] = []
):
self.Require10BitVideo: bool = Require10BitVideo
self.MaxNumberOfFilesToKeep: int = MaxNumberOfFilesToKeep
self.PreferredTypes: List[int] = PreferredTypes
self.PreferredAudioCodecs: List[str] = PreferredAudioCodecs
self.PreferredResolutions: List[str] = PreferredResolutions
self.PreferredSubGroups: List[str] = PreferredSubGroups
self.PreferredVideoCodecs: List[str] = PreferredVideoCodecs
self.Prefer8BitVideo: bool = Prefer8BitVideo
self.AllowDeletionOfImportedFiles: bool = AllowDeletionOfImportedFiles
self.RequiredTypes: List[int] = RequiredTypes
self.RequiredAudioCodecs: dict = RequiredAudioCodecs
self.RequiredAudioStreamCount: dict = RequiredAudioStreamCount
self.RequiredResolutions: dict = RequiredResolutions
self.RequiredSources: dict = RequiredSources
self.RequiredSubGroups: dict = RequiredSubGroups
self.RequiredSubStreamCount: dict = RequiredSubStreamCount
self.RequiredVideoCodecs: dict = RequiredVideoCodecs
self.PreferredSources: List[str] = PreferredSources
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return FileQualityPreferences()
filequalitypreferences: FileQualityPreferences = FileQualityPreferences()
filequalitypreferences.Require10BitVideo = json.get("Require10BitVideo")
filequalitypreferences.MaxNumberOfFilesToKeep = json.get("MaxNumberOfFilesToKeep")
filequalitypreferences.PreferredTypes = []
tmp = json.get("PreferredTypes", [])
for PreferredType in tmp:
filequalitypreferences.PreferredTypes.append(PreferredType)
filequalitypreferences.PreferredAudioCodecs = []
tmp = json.get("PreferredAudioCodecs", [])
for PreferredAudioCodec in tmp:
filequalitypreferences.PreferredAudioCodecs.append(PreferredAudioCodec)
filequalitypreferences.PreferredResolutions = []
tmp = json.get("PreferredResolutions", [])
for PreferredResolution in tmp:
filequalitypreferences.PreferredResolutions.append(PreferredResolution)
filequalitypreferences.PreferredSubGroups = []
tmp = json.get("PreferredSubGroups", [])
for PreferredSubGroup in tmp:
filequalitypreferences.PreferredSubGroups.append(PreferredSubGroup)
filequalitypreferences.PreferredVideoCodecs = []
tmp = json.get("PreferredVideoCodecs", [])
for PreferredVideoCodec in tmp:
filequalitypreferences.PreferredVideoCodecs.append(PreferredVideoCodec)
filequalitypreferences.Prefer8BitVideo = json.get("Prefer8BitVideo")
filequalitypreferences.AllowDeletionOfImportedFiles = json.get("AllowDeletionOfImportedFiles")
filequalitypreferences.RequiredTypes = []
tmp = json.get("RequiredTypes", [])
for RequiredType in tmp:
filequalitypreferences.RequiredTypes.append(RequiredType)
filequalitypreferences.RequiredAudioCodecs = json.get("RequiredAudioCodecs")
filequalitypreferences.RequiredAudioStreamCount = json.get("RequiredAudioStreamCount")
filequalitypreferences.RequiredResolutions = json.get("RequiredResolutions")
filequalitypreferences.RequiredSources = json.get("RequiredSources")
filequalitypreferences.RequiredSubGroups = json.get("RequiredSubGroups")
filequalitypreferences.RequiredSubStreamCount = json.get("RequiredSubStreamCount")
filequalitypreferences.RequiredVideoCodecs = json.get("RequiredVideoCodecs")
filequalitypreferences.PreferredSources = []
tmp = json.get("PreferredSources", [])
for PreferredSource in tmp:
filequalitypreferences.PreferredSources.append(PreferredSource)
return filequalitypreferences
class TraktSettings:
def __init__(self,
Enabled: bool = True,
PIN: str = '',
AuthToken: str = '',
RefreshToken: str = '',
TokenExpirationDate: str = '',
UpdateFrequency: int = 1,
SyncFrequency: int = 1
):
self.Enabled: bool = Enabled
self.PIN: str = PIN
self.AuthToken: str = AuthToken
self.RefreshToken: str = RefreshToken
self.TokenExpirationDate: str = TokenExpirationDate
self.UpdateFrequency: int = UpdateFrequency
self.SyncFrequency: int = SyncFrequency
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return TraktSettings()
traktsettings: TraktSettings = TraktSettings()
traktsettings.Enabled = json.get("Enabled")
traktsettings.PIN = json.get("PIN")
traktsettings.AuthToken = json.get("AuthToken")
traktsettings.RefreshToken = json.get("RefreshToken")
traktsettings.TokenExpirationDate = json.get("TokenExpirationDate")
traktsettings.UpdateFrequency = json.get("UpdateFrequency")
traktsettings.SyncFrequency = json.get("SyncFrequency")
return traktsettings
class LinuxSettings:
def __init__(self,
UID: int = 0,
GID: int = 0,
Permission: int = 0
):
self.UID: int = UID
self.GID: int = GID
self.Permission: int = Permission
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return LinuxSettings()
linuxsettings: LinuxSettings = LinuxSettings()
linuxsettings.UID = json.get("UID")
linuxsettings.GID = json.get("GID")
linuxsettings.Permission = json.get("Permission")
return linuxsettings
class ServerSettingsExport:
def __init__(self,
AnimeXmlDirectory: str = '',
MyListDirectory: str = '',
ServerPort: int = 0,
PluginAutoWatchThreshold: int = 0,
Culture: str = '',
WebUI_Settings: str = '',
FirstRun: bool = True,
LegacyRenamerMaxEpisodeLength: int = 0,
LogRotator: LogRotatorSettings = LogRotatorSettings(),
Database: DatabaseSettings = DatabaseSettings(),
AniDb: AniDBSettings = AniDBSettings(),
WebCache: WebCacheSettings = WebCacheSettings(),
TvDB: TvDBSettings = TvDBSettings(),
MovieDb: MovieDbSettings = MovieDbSettings(),
Import: ImportSettings = ImportSettings(),
Plex: PlexSettings = PlexSettings(),
Plugins: PluginSettings = PluginSettings(),
AutoGroupSeries: bool = True,
AutoGroupSeriesRelationExclusions: str = '',
AutoGroupSeriesUseScoreAlgorithm: bool = True,
FileQualityFilterEnabled: bool = True,
FileQualityPreferences: FileQualityPreferences = FileQualityPreferences(),
LanguagePreference: List[str] = [],
EpisodeLanguagePreference: str = '',
LanguageUseSynonyms: bool = True,
CloudWatcherTime: int = 0,
EpisodeTitleSource: int = 1,
SeriesDescriptionSource: int = 1,
SeriesNameSource: int = 1,
ImagesPath: str = '',
TraktTv: TraktSettings = TraktSettings(),
UpdateChannel: str = '',
Linux: LinuxSettings = LinuxSettings(),
TraceLog: bool = True,
GA_ClientId: str = '',
GA_OptOutPlzDont: bool = True
):
self.AnimeXmlDirectory: str = AnimeXmlDirectory
self.MyListDirectory: str = MyListDirectory
self.ServerPort: int = ServerPort
self.PluginAutoWatchThreshold: int = PluginAutoWatchThreshold
self.Culture: str = Culture
self.WebUI_Settings: str = WebUI_Settings
self.FirstRun: bool = FirstRun
self.LegacyRenamerMaxEpisodeLength: int = LegacyRenamerMaxEpisodeLength
self.LogRotator: LogRotatorSettings = LogRotator
self.Database: DatabaseSettings = Database
self.AniDb: AniDBSettings = AniDb
self.WebCache: WebCacheSettings = WebCache
self.TvDB: TvDBSettings = TvDB
self.MovieDb: MovieDbSettings = MovieDb
self.Import: ImportSettings = Import
self.Plex: PlexSettings = Plex
self.Plugins: PluginSettings = Plugins
self.AutoGroupSeries: bool = AutoGroupSeries
self.AutoGroupSeriesRelationExclusions: str = AutoGroupSeriesRelationExclusions
self.AutoGroupSeriesUseScoreAlgorithm: bool = AutoGroupSeriesUseScoreAlgorithm
self.FileQualityFilterEnabled: bool = FileQualityFilterEnabled
self.FileQualityPreferences: FileQualityPreferences = FileQualityPreferences
self.LanguagePreference: List[str] = LanguagePreference
self.EpisodeLanguagePreference: str = EpisodeLanguagePreference
self.LanguageUseSynonyms: bool = LanguageUseSynonyms
self.CloudWatcherTime: int = CloudWatcherTime
self.EpisodeTitleSource: int = EpisodeTitleSource
self.SeriesDescriptionSource: int = SeriesDescriptionSource
self.SeriesNameSource: int = SeriesNameSource
self.ImagesPath: str = ImagesPath
self.TraktTv: TraktSettings = TraktTv
self.UpdateChannel: str = UpdateChannel
self.Linux: LinuxSettings = Linux
self.TraceLog: bool = TraceLog
self.GA_ClientId: str = GA_ClientId
self.GA_OptOutPlzDont: bool = GA_OptOutPlzDont
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return ServerSettingsExport()
serversettingsexport: ServerSettingsExport = ServerSettingsExport()
serversettingsexport.AnimeXmlDirectory = json.get("AnimeXmlDirectory")
serversettingsexport.MyListDirectory = json.get("MyListDirectory")
serversettingsexport.ServerPort = json.get("ServerPort")
serversettingsexport.PluginAutoWatchThreshold = json.get("PluginAutoWatchThreshold")
serversettingsexport.Culture = json.get("Culture")
serversettingsexport.WebUI_Settings = json.get("WebUI_Settings")
serversettingsexport.FirstRun = json.get("FirstRun")
serversettingsexport.LegacyRenamerMaxEpisodeLength = json.get("LegacyRenamerMaxEpisodeLength")
serversettingsexport.LogRotator = json.get("LogRotator")
serversettingsexport.Database = json.get("Database")
serversettingsexport.AniDb = json.get("AniDb")
serversettingsexport.WebCache = json.get("WebCache")
serversettingsexport.TvDB = json.get("TvDB")
serversettingsexport.MovieDb = json.get("MovieDb")
serversettingsexport.Import = json.get("Import")
serversettingsexport.Plex = json.get("Plex")
serversettingsexport.Plugins = json.get("Plugins")
serversettingsexport.AutoGroupSeries = json.get("AutoGroupSeries")
serversettingsexport.AutoGroupSeriesRelationExclusions = json.get("AutoGroupSeriesRelationExclusions")
serversettingsexport.AutoGroupSeriesUseScoreAlgorithm = json.get("AutoGroupSeriesUseScoreAlgorithm")
serversettingsexport.FileQualityFilterEnabled = json.get("FileQualityFilterEnabled")
serversettingsexport.FileQualityPreferences = json.get("FileQualityPreferences")
serversettingsexport.LanguagePreference = []
tmp = json.get("LanguagePreference", [])
for LanguagePreference in tmp:
serversettingsexport.LanguagePreference.append(LanguagePreference)
serversettingsexport.EpisodeLanguagePreference = json.get("EpisodeLanguagePreference")
serversettingsexport.LanguageUseSynonyms = json.get("LanguageUseSynonyms")
serversettingsexport.CloudWatcherTime = json.get("CloudWatcherTime")
serversettingsexport.EpisodeTitleSource = json.get("EpisodeTitleSource")
serversettingsexport.SeriesDescriptionSource = json.get("SeriesDescriptionSource")
serversettingsexport.SeriesNameSource = json.get("SeriesNameSource")
serversettingsexport.ImagesPath = json.get("ImagesPath")
serversettingsexport.TraktTv = json.get("TraktTv")
serversettingsexport.UpdateChannel = json.get("UpdateChannel")
serversettingsexport.Linux = json.get("Linux")
serversettingsexport.TraceLog = json.get("TraceLog")
serversettingsexport.GA_ClientId = json.get("GA_ClientId")
serversettingsexport.GA_OptOutPlzDont = json.get("GA_OptOutPlzDont")
return serversettingsexport
class ServerSettingsImport:
def __init__(self,
AniDB_Username: str = '',
AniDB_Password: str = '',
AniDB_ServerAddress: str = '',
AniDB_ServerPort: str = '',
AniDB_ClientPort: str = '',
AniDB_AVDumpClientPort: str = '',
AniDB_AVDumpKey: str = '',
AniDB_DownloadRelatedAnime: bool = True,
AniDB_DownloadSimilarAnime: bool = True,
AniDB_DownloadReviews: bool = True,
AniDB_DownloadReleaseGroups: bool = True,
AniDB_MyList_AddFiles: bool = True,
AniDB_MyList_StorageState: int = 0,
AniDB_MyList_DeleteType: int = 0,
AniDB_MyList_ReadWatched: bool = True,
AniDB_MyList_ReadUnwatched: bool = True,
AniDB_MyList_SetWatched: bool = True,
AniDB_MyList_SetUnwatched: bool = True,
AniDB_MyList_UpdateFrequency: int = 0,
AniDB_Calendar_UpdateFrequency: int = 0,
AniDB_Anime_UpdateFrequency: int = 0,
AniDB_MyListStats_UpdateFrequency: int = 0,
AniDB_File_UpdateFrequency: int = 0,
AniDB_DownloadCharacters: bool = True,
AniDB_DownloadCreators: bool = True,
AniDB_MaxRelationDepth: int = 0,
WebCache_Address: str = '',
WebCache_Anonymous: bool = True,
WebCache_XRefFileEpisode_Get: bool = True,
WebCache_XRefFileEpisode_Send: bool = True,
WebCache_TvDB_Get: bool = True,
WebCache_TvDB_Send: bool = True,
WebCache_Trakt_Get: bool = True,
WebCache_Trakt_Send: bool = True,
WebCache_UserInfo: bool = True,
TvDB_AutoLink: bool = True,
TvDB_AutoFanart: bool = True,
TvDB_AutoFanartAmount: int = 0,
TvDB_AutoWideBanners: bool = True,
TvDB_AutoWideBannersAmount: int = 0,
TvDB_AutoPosters: bool = True,
TvDB_AutoPostersAmount: int = 0,
TvDB_UpdateFrequency: int = 0,
TvDB_Language: str = '',
MovieDB_AutoFanart: bool = True,
MovieDB_AutoFanartAmount: int = 0,
MovieDB_AutoPosters: bool = True,
MovieDB_AutoPostersAmount: int = 0,
VideoExtensions: str = '',
AutoGroupSeries: bool = True,
AutoGroupSeriesUseScoreAlgorithm: bool = True,
AutoGroupSeriesRelationExclusions: str = '',
FileQualityFilterEnabled: bool = True,
FileQualityFilterPreferences: str = '',
RunImportOnStart: bool = True,
ScanDropFoldersOnStart: bool = True,
Hash_CRC32: bool = True,
Hash_MD5: bool = True,
Hash_SHA1: bool = True,
Import_UseExistingFileWatchedStatus: bool = True,
LanguagePreference: str = '',
LanguageUseSynonyms: bool = True,
EpisodeTitleSource: int = 0,
SeriesDescriptionSource: int = 0,
SeriesNameSource: int = 0,
Trakt_IsEnabled: bool = True,
Trakt_AuthToken: str = '',
Trakt_RefreshToken: str = '',
Trakt_TokenExpirationDate: str = '',
Trakt_UpdateFrequency: int = 0,
Trakt_SyncFrequency: int = 0,
RotateLogs: bool = True,
RotateLogs_Zip: bool = True,
RotateLogs_Delete: bool = True,
RotateLogs_Delete_Days: str = '',
WebUI_Settings: str = '',
Plex_ServerHost: str = '',
Plex_Sections: str = '',
Import_MoveOnImport: bool = True,
Import_RenameOnImport: bool = True
):
self.AniDB_Username: str = AniDB_Username
self.AniDB_Password: str = AniDB_Password
self.AniDB_ServerAddress: str = AniDB_ServerAddress
self.AniDB_ServerPort: str = AniDB_ServerPort
self.AniDB_ClientPort: str = AniDB_ClientPort
self.AniDB_AVDumpClientPort: str = AniDB_AVDumpClientPort
self.AniDB_AVDumpKey: str = AniDB_AVDumpKey
self.AniDB_DownloadRelatedAnime: bool = AniDB_DownloadRelatedAnime
self.AniDB_DownloadSimilarAnime: bool = AniDB_DownloadSimilarAnime
self.AniDB_DownloadReviews: bool = AniDB_DownloadReviews
self.AniDB_DownloadReleaseGroups: bool = AniDB_DownloadReleaseGroups
self.AniDB_MyList_AddFiles: bool = AniDB_MyList_AddFiles
self.AniDB_MyList_StorageState: int = AniDB_MyList_StorageState
self.AniDB_MyList_DeleteType: int = AniDB_MyList_DeleteType
self.AniDB_MyList_ReadWatched: bool = AniDB_MyList_ReadWatched
self.AniDB_MyList_ReadUnwatched: bool = AniDB_MyList_ReadUnwatched
self.AniDB_MyList_SetWatched: bool = AniDB_MyList_SetWatched
self.AniDB_MyList_SetUnwatched: bool = AniDB_MyList_SetUnwatched
self.AniDB_MyList_UpdateFrequency: int = AniDB_MyList_UpdateFrequency
self.AniDB_Calendar_UpdateFrequency: int = AniDB_Calendar_UpdateFrequency
self.AniDB_Anime_UpdateFrequency: int = AniDB_Anime_UpdateFrequency
self.AniDB_MyListStats_UpdateFrequency: int = AniDB_MyListStats_UpdateFrequency
self.AniDB_File_UpdateFrequency: int = AniDB_File_UpdateFrequency
self.AniDB_DownloadCharacters: bool = AniDB_DownloadCharacters
self.AniDB_DownloadCreators: bool = AniDB_DownloadCreators
self.AniDB_MaxRelationDepth: int = AniDB_MaxRelationDepth
self.WebCache_Address: str = WebCache_Address
self.WebCache_Anonymous: bool = WebCache_Anonymous
self.WebCache_XRefFileEpisode_Get: bool = WebCache_XRefFileEpisode_Get
self.WebCache_XRefFileEpisode_Send: bool = WebCache_XRefFileEpisode_Send
self.WebCache_TvDB_Get: bool = WebCache_TvDB_Get
self.WebCache_TvDB_Send: bool = WebCache_TvDB_Send
self.WebCache_Trakt_Get: bool = WebCache_Trakt_Get
self.WebCache_Trakt_Send: bool = WebCache_Trakt_Send
self.WebCache_UserInfo: bool = WebCache_UserInfo
self.TvDB_AutoLink: bool = TvDB_AutoLink
self.TvDB_AutoFanart: bool = TvDB_AutoFanart
self.TvDB_AutoFanartAmount: int = TvDB_AutoFanartAmount
self.TvDB_AutoWideBanners: bool = TvDB_AutoWideBanners
self.TvDB_AutoWideBannersAmount: int = TvDB_AutoWideBannersAmount
self.TvDB_AutoPosters: bool = TvDB_AutoPosters
self.TvDB_AutoPostersAmount: int = TvDB_AutoPostersAmount
self.TvDB_UpdateFrequency: int = TvDB_UpdateFrequency
self.TvDB_Language: str = TvDB_Language
self.MovieDB_AutoFanart: bool = MovieDB_AutoFanart
self.MovieDB_AutoFanartAmount: int = MovieDB_AutoFanartAmount
self.MovieDB_AutoPosters: bool = MovieDB_AutoPosters
self.MovieDB_AutoPostersAmount: int = MovieDB_AutoPostersAmount
self.VideoExtensions: str = VideoExtensions
self.AutoGroupSeries: bool = AutoGroupSeries
self.AutoGroupSeriesUseScoreAlgorithm: bool = AutoGroupSeriesUseScoreAlgorithm
self.AutoGroupSeriesRelationExclusions: str = AutoGroupSeriesRelationExclusions
self.FileQualityFilterEnabled: bool = FileQualityFilterEnabled
self.FileQualityFilterPreferences: str = FileQualityFilterPreferences
self.RunImportOnStart: bool = RunImportOnStart
self.ScanDropFoldersOnStart: bool = ScanDropFoldersOnStart
self.Hash_CRC32: bool = Hash_CRC32
self.Hash_MD5: bool = Hash_MD5
self.Hash_SHA1: bool = Hash_SHA1
self.Import_UseExistingFileWatchedStatus: bool = Import_UseExistingFileWatchedStatus
self.LanguagePreference: str = LanguagePreference
self.LanguageUseSynonyms: bool = LanguageUseSynonyms
self.EpisodeTitleSource: int = EpisodeTitleSource
self.SeriesDescriptionSource: int = SeriesDescriptionSource
self.SeriesNameSource: int = SeriesNameSource
self.Trakt_IsEnabled: bool = Trakt_IsEnabled
self.Trakt_AuthToken: str = Trakt_AuthToken
self.Trakt_RefreshToken: str = Trakt_RefreshToken
self.Trakt_TokenExpirationDate: str = Trakt_TokenExpirationDate
self.Trakt_UpdateFrequency: int = Trakt_UpdateFrequency
self.Trakt_SyncFrequency: int = Trakt_SyncFrequency
self.RotateLogs: bool = RotateLogs
self.RotateLogs_Zip: bool = RotateLogs_Zip
self.RotateLogs_Delete: bool = RotateLogs_Delete
self.RotateLogs_Delete_Days: str = RotateLogs_Delete_Days
self.WebUI_Settings: str = WebUI_Settings
self.Plex_ServerHost: str = Plex_ServerHost
self.Plex_Sections: str = Plex_Sections
self.Import_MoveOnImport: bool = Import_MoveOnImport
self.Import_RenameOnImport: bool = Import_RenameOnImport
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return ServerSettingsImport()
serversettingsimport: ServerSettingsImport = ServerSettingsImport()
serversettingsimport.AniDB_Username = json.get("AniDB_Username")
serversettingsimport.AniDB_Password = json.get("AniDB_Password")
serversettingsimport.AniDB_ServerAddress = json.get("AniDB_ServerAddress")
serversettingsimport.AniDB_ServerPort = json.get("AniDB_ServerPort")
serversettingsimport.AniDB_ClientPort = json.get("AniDB_ClientPort")
serversettingsimport.AniDB_AVDumpClientPort = json.get("AniDB_AVDumpClientPort")
serversettingsimport.AniDB_AVDumpKey = json.get("AniDB_AVDumpKey")
serversettingsimport.AniDB_DownloadRelatedAnime = json.get("AniDB_DownloadRelatedAnime")
serversettingsimport.AniDB_DownloadSimilarAnime = json.get("AniDB_DownloadSimilarAnime")
serversettingsimport.AniDB_DownloadReviews = json.get("AniDB_DownloadReviews")
serversettingsimport.AniDB_DownloadReleaseGroups = json.get("AniDB_DownloadReleaseGroups")
serversettingsimport.AniDB_MyList_AddFiles = json.get("AniDB_MyList_AddFiles")
serversettingsimport.AniDB_MyList_StorageState = json.get("AniDB_MyList_StorageState")
serversettingsimport.AniDB_MyList_DeleteType = json.get("AniDB_MyList_DeleteType")
serversettingsimport.AniDB_MyList_ReadWatched = json.get("AniDB_MyList_ReadWatched")
serversettingsimport.AniDB_MyList_ReadUnwatched = json.get("AniDB_MyList_ReadUnwatched")
serversettingsimport.AniDB_MyList_SetWatched = json.get("AniDB_MyList_SetWatched")
serversettingsimport.AniDB_MyList_SetUnwatched = json.get("AniDB_MyList_SetUnwatched")
serversettingsimport.AniDB_MyList_UpdateFrequency = json.get("AniDB_MyList_UpdateFrequency")
serversettingsimport.AniDB_Calendar_UpdateFrequency = json.get("AniDB_Calendar_UpdateFrequency")
serversettingsimport.AniDB_Anime_UpdateFrequency = json.get("AniDB_Anime_UpdateFrequency")
serversettingsimport.AniDB_MyListStats_UpdateFrequency = json.get("AniDB_MyListStats_UpdateFrequency")
serversettingsimport.AniDB_File_UpdateFrequency = json.get("AniDB_File_UpdateFrequency")
serversettingsimport.AniDB_DownloadCharacters = json.get("AniDB_DownloadCharacters")
serversettingsimport.AniDB_DownloadCreators = json.get("AniDB_DownloadCreators")
serversettingsimport.AniDB_MaxRelationDepth = json.get("AniDB_MaxRelationDepth")
serversettingsimport.WebCache_Address = json.get("WebCache_Address")
serversettingsimport.WebCache_Anonymous = json.get("WebCache_Anonymous")
serversettingsimport.WebCache_XRefFileEpisode_Get = json.get("WebCache_XRefFileEpisode_Get")
serversettingsimport.WebCache_XRefFileEpisode_Send = json.get("WebCache_XRefFileEpisode_Send")
serversettingsimport.WebCache_TvDB_Get = json.get("WebCache_TvDB_Get")
serversettingsimport.WebCache_TvDB_Send = json.get("WebCache_TvDB_Send")
serversettingsimport.WebCache_Trakt_Get = json.get("WebCache_Trakt_Get")
serversettingsimport.WebCache_Trakt_Send = json.get("WebCache_Trakt_Send")
serversettingsimport.WebCache_UserInfo = json.get("WebCache_UserInfo")
serversettingsimport.TvDB_AutoLink = json.get("TvDB_AutoLink")
serversettingsimport.TvDB_AutoFanart = json.get("TvDB_AutoFanart")
serversettingsimport.TvDB_AutoFanartAmount = json.get("TvDB_AutoFanartAmount")
serversettingsimport.TvDB_AutoWideBanners = json.get("TvDB_AutoWideBanners")
serversettingsimport.TvDB_AutoWideBannersAmount = json.get("TvDB_AutoWideBannersAmount")
serversettingsimport.TvDB_AutoPosters = json.get("TvDB_AutoPosters")
serversettingsimport.TvDB_AutoPostersAmount = json.get("TvDB_AutoPostersAmount")
serversettingsimport.TvDB_UpdateFrequency = json.get("TvDB_UpdateFrequency")
serversettingsimport.TvDB_Language = json.get("TvDB_Language")
serversettingsimport.MovieDB_AutoFanart = json.get("MovieDB_AutoFanart")
serversettingsimport.MovieDB_AutoFanartAmount = json.get("MovieDB_AutoFanartAmount")
serversettingsimport.MovieDB_AutoPosters = json.get("MovieDB_AutoPosters")
serversettingsimport.MovieDB_AutoPostersAmount = json.get("MovieDB_AutoPostersAmount")
serversettingsimport.VideoExtensions = json.get("VideoExtensions")
serversettingsimport.AutoGroupSeries = json.get("AutoGroupSeries")
serversettingsimport.AutoGroupSeriesUseScoreAlgorithm = json.get("AutoGroupSeriesUseScoreAlgorithm")
serversettingsimport.AutoGroupSeriesRelationExclusions = json.get("AutoGroupSeriesRelationExclusions")
serversettingsimport.FileQualityFilterEnabled = json.get("FileQualityFilterEnabled")
serversettingsimport.FileQualityFilterPreferences = json.get("FileQualityFilterPreferences")
serversettingsimport.RunImportOnStart = json.get("RunImportOnStart")
serversettingsimport.ScanDropFoldersOnStart = json.get("ScanDropFoldersOnStart")
serversettingsimport.Hash_CRC32 = json.get("Hash_CRC32")
serversettingsimport.Hash_MD5 = json.get("Hash_MD5")
serversettingsimport.Hash_SHA1 = json.get("Hash_SHA1")
serversettingsimport.Import_UseExistingFileWatchedStatus = json.get("Import_UseExistingFileWatchedStatus")
serversettingsimport.LanguagePreference = json.get("LanguagePreference")
serversettingsimport.LanguageUseSynonyms = json.get("LanguageUseSynonyms")
serversettingsimport.EpisodeTitleSource = json.get("EpisodeTitleSource")
serversettingsimport.SeriesDescriptionSource = json.get("SeriesDescriptionSource")
serversettingsimport.SeriesNameSource = json.get("SeriesNameSource")
serversettingsimport.Trakt_IsEnabled = json.get("Trakt_IsEnabled")
serversettingsimport.Trakt_AuthToken = json.get("Trakt_AuthToken")
serversettingsimport.Trakt_RefreshToken = json.get("Trakt_RefreshToken")
serversettingsimport.Trakt_TokenExpirationDate = json.get("Trakt_TokenExpirationDate")
serversettingsimport.Trakt_UpdateFrequency = json.get("Trakt_UpdateFrequency")
serversettingsimport.Trakt_SyncFrequency = json.get("Trakt_SyncFrequency")
serversettingsimport.RotateLogs = json.get("RotateLogs")
serversettingsimport.RotateLogs_Zip = json.get("RotateLogs_Zip")
serversettingsimport.RotateLogs_Delete = json.get("RotateLogs_Delete")
serversettingsimport.RotateLogs_Delete_Days = json.get("RotateLogs_Delete_Days")
serversettingsimport.WebUI_Settings = json.get("WebUI_Settings")
serversettingsimport.Plex_ServerHost = json.get("Plex_ServerHost")
serversettingsimport.Plex_Sections = json.get("Plex_Sections")
serversettingsimport.Import_MoveOnImport = json.get("Import_MoveOnImport")
serversettingsimport.Import_RenameOnImport = json.get("Import_RenameOnImport")
return serversettingsimport
class Credentials:
def __init__(self,
login: str = '',
password: str = '',
port: int = 0,
token: str = '',
refresh_token: str = '',
apikey: str = '',
apiport: int = 0
):
self.login: str = login
self.password: str = password
self.port: int = port
self.token: str = token
self.refresh_token: str = refresh_token
self.apikey: str = apikey
self.apiport: int = apiport
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return Credentials()
credentials: Credentials = Credentials()
credentials.login = json.get("login")
credentials.password = json.get("password")
credentials.port = json.get("port")
credentials.token = json.get("token")
credentials.refresh_token = json.get("refresh_token")
credentials.apikey = json.get("apikey")
credentials.apiport = json.get("apiport")
return credentials
class JMMUser:
def __init__(self,
JMMUserID: int,
Username: str,
Password: str,
IsAdmin: int,
IsAniDBUser: int,
IsTraktUser: int,
HideCategories: str,
CanEditServerSettings: int,
PlexUsers: str,
PlexToken: str
):
self.JMMUserID: int = JMMUserID
self.Username: str = Username
self.Password: str = Password
self.IsAdmin: int = IsAdmin
self.IsAniDBUser: int = IsAniDBUser
self.IsTraktUser: int = IsTraktUser
self.HideCategories: str = HideCategories
self.CanEditServerSettings: int = CanEditServerSettings
self.PlexUsers: str = PlexUsers
self.PlexToken: str = PlexToken
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return JMMUser()
jmmuser: JMMUser = JMMUser()
jmmuser.JMMUserID = json.get("JMMUserID")
jmmuser.Username = json.get("Username")
jmmuser.Password = json.get("Password")
jmmuser.IsAdmin = json.get("IsAdmin")
jmmuser.IsAniDBUser = json.get("IsAniDBUser")
jmmuser.IsTraktUser = json.get("IsTraktUser")
jmmuser.HideCategories = json.get("HideCategories")
jmmuser.CanEditServerSettings = json.get("CanEditServerSettings")
jmmuser.PlexUsers = json.get("PlexUsers")
jmmuser.PlexToken = json.get("PlexToken")
return jmmuser
class OSFolder:
def __init__(self,
dir: str = '',
full_path: str = '',
subdir: List[object] = []
):
self.dir: str = dir
self.full_path: str = full_path
self.subdir: List[object] = subdir
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return OSFolder()
osfolder: OSFolder = OSFolder()
osfolder.dir = json.get("dir")
osfolder.full_path = json.get("full_path")
osfolder.subdir = []
for dir in json.get("subdir"):
osfolder.subdir.append(dir)
return osfolder
class Logs:
def __init__(self,
rotate: bool = True,
zip: bool = True,
delete: bool = True,
days: int = 0
):
self.rotate: bool = rotate
self.zip: bool = zip
self.delete: bool = delete
self.days: int = days
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return Logs()
logs: Logs = Logs()
logs.rotate = json.get("rotate")
logs.zip = json.get("zip")
logs.delete = json.get("delete")
logs.days = json.get("days")
return logs
class Part:
def __init__(self,
Accessible: int = 0,
Exists: int = 0,
Streams: List[Stream] = [],
Size: int = 0,
Duration: int = 0,
Key: str = '',
LocalKey: str = '',
Container: str = '',
Id: int = 0,
File: str = '',
OptimizedForStreaming: int = 0,
Has64bitOffsets: int = 0
):
self.Accessible: int = Accessible
self.Exists: int = Exists
self.Streams: List[Stream] = Streams
self.Size: int = Size
self.Duration: int = Duration
self.Key: str = Key
self.LocalKey: str = LocalKey
self.Container: str = Container
self.Id: int = Id
self.File: str = File
self.OptimizedForStreaming: int = OptimizedForStreaming
self.Has64bitOffsets: int = Has64bitOffsets
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return Part()
part: Part = Part()
part.Accessible = json.get("Accessible")
part.Exists = json.get("Exists")
part.Streams = []
tmp = json.get("Streams", [])
for stream in tmp:
stream = Stream.Decoder(stream)
part.Streams.append(stream)
part.Size = json.get("Size")
part.Duration = json.get("Duration")
part.Key = json.get("Key")
part.LocalKey = json.get("LocalKey")
part.Container = json.get("Container")
part.Id = json.get("Id")
part.File = json.get("File")
part.OptimizedForStreaming = json.get("OptimizedForStreaming")
part.Has64bitOffsets = json.get("Has64bitOffsets")
return part
class Media:
def __init__(self,
Parts: List[Part] = [],
Duration: int = 0,
VideoFrameRate: str = '',
Container: str = '',
VideoCodec: str = '',
AudioCodec: str = '',
AudioChannels: int = 0,
AspectRatio: int = 0,
Height: int = 0,
Width: int = 0,
Bitrate: int = 0,
Id: int = 0,
VideoResolution: str = '',
OptimizedForStreaming: int = 0,
Chaptered: bool = True
):
self.Parts: List[Part] = Parts
self.Duration: int = Duration
self.VideoFrameRate: str = VideoFrameRate
self.Container: str = Container
self.VideoCodec: str = VideoCodec
self.AudioCodec: str = AudioCodec
self.AudioChannels: int = AudioChannels
self.AspectRatio: int = AspectRatio
self.Height: int = Height
self.Width: int = Width
self.Bitrate: int = Bitrate
self.Id: int = Id
self.VideoResolution: str = VideoResolution
self.OptimizedForStreaming: int = OptimizedForStreaming
self.Chaptered: bool = Chaptered
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return Media()
media: Media = Media()
media.Parts = []
tmp = json.get("Parts", [])
for part in tmp:
part = Part.Decoder(part)
media.Parts.append(part)
media.Duration = json.get("Duration")
media.VideoFrameRate = json.get("VideoFrameRate")
media.Container = json.get("Container")
media.VideoCodec = json.get("VideoCodec")
media.AudioCodec = json.get("AudioCodec")
media.AudioChannels = json.get("AudioChannels")
media.AspectRatio = json.get("AspectRatio")
media.Height = json.get("Height")
media.Width = json.get("Width")
media.Bitrate = json.get("Bitrate")
media.Id = json.get("Id")
media.VideoResolution = json.get("VideoResolution")
media.OptimizedForStreaming = json.get("OptimizedForStreaming")
media.Chaptered = json.get("Chaptered")
return media
class ComponentVersion:
def __init__(self,
name: str = '',
version: str = '',
):
self.name: str = name
self.version: str = version
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return ComponentVersion()
componentversion: ComponentVersion = ComponentVersion()
componentversion.name = json.get("name")
componentversion.version = json.get("version")
return componentversion
class ServerStatus:
def __init__(self,
startup_state: str = '',
server_started: bool = True,
server_uptime: str = '',
first_run: bool = True,
startup_failed: bool = True,
startup_failed_error_message: str = ''
):
self.startup_state: str = startup_state
self.server_started: bool = server_started
self.server_uptime: str = server_uptime
self.first_run: bool = first_run
self.startup_failed: bool = startup_failed
self.startup_failed_error_message: str = startup_failed_error_message
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return ServerStatus()
serverstatus: ServerStatus = ServerStatus()
serverstatus.startup_state = json.get("startup_state")
serverstatus.server_started = json.get("server_started")
serverstatus.server_uptime = json.get("server_uptime")
serverstatus.first_run = json.get("first_run")
serverstatus.startup_failed = json.get("startup_failed")
serverstatus.startup_failed_error_message = json.get("startup_failed_error_message")
return serverstatus
class WebUI_Settings:
def __init__(self,
actions: List[str] = [],
uiTheme: str = '',
uiNotifications: bool = True,
otherUpdateChannel: str = '',
logDelta: int = 0
):
self.actions: List[str] = actions
self.uiTheme: str = uiTheme
self.uiNotifications: bool = uiNotifications
self.otherUpdateChannel: str = otherUpdateChannel
self.logDelta: int = logDelta
class Encoder(JSONEncoder):
def default(self, o):
return o.__dict__
def __repr__(self) -> str:
return f"<{self.__class__.__qualname__} {self.__dict__}>"
@staticmethod
def Decoder(json: dict):
if not isinstance(json, dict):
try:
json = json.__dict__
except:
print(f"Exception at: {__class__.__name__}.{__class__.Decoder.__name__} --- json is not dictionary")
return WebUI_Settings()
webui_settings: WebUI_Settings = WebUI_Settings()
webui_settings.actions = []
tmp = json.get("actions", [])
for action in tmp:
webui_settings.actions.append(action)
webui_settings.uiTheme = json.get("uiTheme")
webui_settings.uiNotifications = json.get("uiNotifications")
webui_settings.otherUpdateChannel = json.get("otherUpdateChannel")
webui_settings.logDelta = json.get("logDelta")
return webui_settings
|
import sys
from celery import shared_task
from django.conf import settings
from ops.celery.utils import (
create_or_update_celery_periodic_tasks, disable_celery_periodic_task
)
from ops.celery.decorator import after_app_ready_start
from common.utils import get_logger
from .models import User
from .utils import (
send_password_expiration_reminder_mail, send_user_expiration_reminder_mail
)
from settings.utils import LDAPServerUtil, LDAPImportUtil
logger = get_logger(__file__)
@shared_task
def check_password_expired():
users = User.objects.exclude(role=User.ROLE_APP)
for user in users:
if not user.is_valid:
continue
if not user.password_will_expired:
continue
send_password_expiration_reminder_mail(user)
msg = "The user {} password expires in {} days"
logger.info(msg.format(user, user.password_expired_remain_days))
@shared_task
@after_app_ready_start
def check_password_expired_periodic():
tasks = {
'check_password_expired_periodic': {
'task': check_password_expired.name,
'interval': None,
'crontab': '0 10 * * *',
'enabled': True,
}
}
create_or_update_celery_periodic_tasks(tasks)
@shared_task
def check_user_expired():
users = User.objects.exclude(role=User.ROLE_APP)
for user in users:
if not user.is_valid:
continue
if not user.will_expired:
continue
send_user_expiration_reminder_mail(user)
@shared_task
@after_app_ready_start
def check_user_expired_periodic():
tasks = {
'check_user_expired_periodic': {
'task': check_user_expired.name,
'interval': None,
'crontab': '0 14 * * *',
'enabled': True,
}
}
create_or_update_celery_periodic_tasks(tasks)
@shared_task
def import_ldap_user():
logger.info("Start import ldap user task")
util_server = LDAPServerUtil()
util_import = LDAPImportUtil()
users = util_server.search()
errors = util_import.perform_import(users)
if errors:
logger.error("Imported LDAP users errors: {}".format(errors))
else:
logger.info('Imported {} users successfully'.format(len(users)))
@shared_task
@after_app_ready_start
def import_ldap_user_periodic():
if not settings.AUTH_LDAP:
return
if not settings.AUTH_LDAP_SYNC_IS_PERIODIC:
task_name = sys._getframe().f_code.co_name
disable_celery_periodic_task(task_name)
return
interval = settings.AUTH_LDAP_SYNC_INTERVAL
if isinstance(interval, int):
interval = interval * 3600
else:
interval = None
crontab = settings.AUTH_LDAP_SYNC_CRONTAB
tasks = {
'import_ldap_user_periodic': {
'task': import_ldap_user.name,
'interval': interval,
'crontab': crontab,
'enabled': True,
}
}
create_or_update_celery_periodic_tasks(tasks)
|
a = []
n = 1111111111111111111111111111111111111111
with open('../data/p013_digits.data', 'r') as f:
for lines in f:
n = int(lines)
a.append(n)
s = sum(a)
print(str(s)[:10])
|
import fauxfactory
import pytest
from cfme.infrastructure import pxe
import utils.error as error
from utils.update import update
pytestmark = [pytest.mark.usefixtures("logged_in"), pytest.mark.tier(3)]
def test_system_image_type_crud():
"""
Tests a System Image Type using CRUD operations.
"""
sys_image_type = pxe.SystemImageType(
name=fauxfactory.gen_alphanumeric(8),
provision_type='Vm')
sys_image_type.create()
with update(sys_image_type):
sys_image_type.name = sys_image_type.name + "_update"
sys_image_type.delete(cancel=False)
def test_duplicate_name_error_validation():
"""
Tests a System Image for duplicate name.
"""
sys_image_type = pxe.SystemImageType(
name=fauxfactory.gen_alphanumeric(8),
provision_type='Vm')
sys_image_type.create()
with error.expected('Name has already been taken'):
sys_image_type.create()
sys_image_type.delete(cancel=False)
def test_name_required_error_validation():
"""
Tests a System Image with no name.
"""
sys_image_type = pxe.SystemImageType(
name=None,
provision_type='Vm')
with error.expected('Name is required'):
sys_image_type.create()
|
"""
plot two-dimensional density view
"""
import numpy as np
import matplotlib.pyplot as plt
import scipy.interpolate
x, y, z = 10*np.random.random((3,10))
xi, yi = np.linspace(x.min(), x.max(), 100), np.linspace(y.min(), y.max(), 100)
xi, yi = np.meshgrid(xi, yi)
rbf = scipy.interpolate.Rbf(x,y,z,function='linear')
zi = rbf(xi, yi)
plt.imshow(zi, vmin=z.min(), vmax=z.max(), origin='lower',
extent=[x.min(), x.max(), y.min(), y.max()])
plt.scatter(x,y,c=z)
plt.colorbar()
plt.show()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.