repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
BT-fgarbely/odoo | addons/sale/__openerp__.py | 221 | 3467 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Sales Management',
'version': '1.0',
'category': 'Sales Management',
'sequence': 14,
'summary': 'Quotations, Sales Orders, Invoicing',
'description': """
Manage sales quotations and orders
==================================
This application allows you to manage your sales goals in an effective and efficient manner by keeping track of all sales orders and history.
It handles the full sales workflow:
* **Quotation** -> **Sales order** -> **Invoice**
Preferences (only with Warehouse Management installed)
------------------------------------------------------
If you also installed the Warehouse Management, you can deal with the following preferences:
* Shipping: Choice of delivery at once or partial delivery
* Invoicing: choose how invoices will be paid
* Incoterms: International Commercial terms
You can choose flexible invoicing methods:
* *On Demand*: Invoices are created manually from Sales Orders when needed
* *On Delivery Order*: Invoices are generated from picking (delivery)
* *Before Delivery*: A Draft invoice is created and must be paid before delivery
The Dashboard for the Sales Manager will include
------------------------------------------------
* My Quotations
* Monthly Turnover (Graph)
""",
'author': 'OpenERP SA',
'website': 'https://www.odoo.com/page/crm',
'depends': ['sales_team','account_voucher', 'procurement', 'report'],
'data': [
'wizard/sale_make_invoice_advance.xml',
'wizard/sale_line_invoice.xml',
'wizard/sale_make_invoice.xml',
'security/sale_security.xml',
'security/ir.model.access.csv',
'sale_workflow.xml',
'sale_sequence.xml',
'sale_report.xml',
'sale_data.xml',
'sale_view.xml',
'sales_team_view.xml',
'res_partner_view.xml',
'report/sale_report_view.xml',
'report/invoice_report_view.xml',
'edi/sale_order_action_data.xml',
'res_config_view.xml',
'views/report_saleorder.xml',
],
'demo': ['sale_demo.xml'],
'test': [
'test/create_sale_users.yml',
'test/sale_order_demo.yml',
'test/manual_order_policy.yml',
'test/cancel_order.yml',
'test/delete_order.yml',
'test/edi_sale_order.yml',
'test/canceled_lines_order.yml',
],
'installable': True,
'auto_install': False,
'application': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
linktlh/Toontown-journey | toontown/racing/DistributedStartingBlock.py | 1 | 30674 | from direct.distributed.ClockDelta import *
from direct.interval.IntervalGlobal import *
from toontown.building.ElevatorConstants import *
from toontown.building.ElevatorUtils import *
from toontown.building import DistributedElevatorExt
from toontown.building import DistributedElevator
from toontown.toonbase import ToontownGlobals
from direct.fsm import ClassicFSM
from direct.fsm import State
from direct.gui import DirectGui
from toontown.hood import ZoneUtil
from toontown.toonbase import TTLocalizer
from toontown.toontowngui import TTDialog
from direct.distributed import DistributedObject
from direct.distributed import DistributedSmoothNode
from direct.actor import Actor
from direct.fsm.FSM import FSM
from direct.showbase import PythonUtil
from toontown.toonbase.ToontownTimer import ToontownTimer
from toontown.racing.Kart import Kart
from toontown.racing.KartShopGlobals import KartGlobals
from toontown.racing import RaceGlobals
from toontown.toontowngui.TTDialog import TTGlobalDialog
from toontown.toontowngui.TeaserPanel import TeaserPanel
if (__debug__):
import pdb
class DistributedStartingBlock(DistributedObject.DistributedObject, FSM):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedStartingBlock')
sphereRadius = 1.5
id = 0
cameraPos = Point3(0, -23, 10)
cameraHpr = Point3(0, -10, 0)
SFX_BaseDir = 'phase_6/audio/sfx/'
SFX_KartAppear = SFX_BaseDir + 'KART_Appear.ogg'
defaultTransitions = {'Off': ['EnterMovie'],
'EnterMovie': ['Off', 'Waiting', 'ExitMovie'],
'Waiting': ['ExitMovie', 'Off'],
'ExitMovie': ['Off', 'ExitMovie']}
def __init__(self, cr):
DistributedObject.DistributedObject.__init__(self, cr)
FSM.__init__(self, 'staringBlock_%s_FSM' % DistributedStartingBlock.id)
self.avId = 0
self.av = None
self.lastAvId = 0
self.avatar = None
self.kartPad = None
self.collNode = None
self.movieNode = None
self.movieTrack = None
self.collSphere = None
self.collNodePath = None
self.localToonKarting = 0
self.kartNode = None
self.kart = None
self.holeActor = None
self.exitRequested = False
if (__debug__):
self.testLOD = False
self.id = DistributedStartingBlock.id
DistributedStartingBlock.id += 1
return
def disable(self):
FSM.cleanup(self)
self.ignore(self.uniqueName('enterStartingBlockSphere'))
self.ignore('stoppedAsleep')
self.setOccupied(0)
self.avId = 0
self.nodePath.detachNode()
self.kartPad = None
if self.holeActor:
self.holeActor.cleanup()
self.holeActor = None
DistributedObject.DistributedObject.disable(self)
return
def delete(self):
if hasattr(self, 'dialog'):
if not self.dialog.removed():
self.dialog.ignoreAll()
if not self.dialog.isEmpty():
self.dialog.cleanup()
del self.dialog
self.finishMovie()
if hasattr(self, 'cancelButton'):
self.cancelButton.destroy()
del self.cancelButton
del self.kartPad
if self.nodePath:
self.nodePath.removeNode()
del self.nodePath
DistributedObject.DistributedObject.delete(self)
def generateInit(self):
self.notify.debugStateCall(self)
DistributedObject.DistributedObject.generateInit(self)
self.nodePath = NodePath(self.uniqueName('StartingBlock'))
self.collSphere = CollisionSphere(0, 0, 0, self.sphereRadius)
self.collSphere.setTangible(0)
self.collNode = CollisionNode(self.uniqueName('StartingBlockSphere'))
self.collNode.setCollideMask(ToontownGlobals.WallBitmask)
self.collNode.addSolid(self.collSphere)
self.collNodePath = self.nodePath.attachNewNode(self.collNode)
def announceGenerate(self):
self.notify.debugStateCall(self)
DistributedObject.DistributedObject.announceGenerate(self)
self.nodePath.reparentTo(render)
self.accept(self.uniqueName('enterStartingBlockSphere'), self.__handleEnterSphere)
if (__debug__):
if self.testLOD:
self.__generateKartAppearTrack()
def setPadDoId(self, padDoId):
if padDoId in self.cr.doId2do:
self.setPad(self.cr.doId2do[padDoId])
else:
self.acceptOnce('generate-%d' % padDoId, self.setPad)
def setPad(self, pad):
self.kartPad = pad
self.kartPad.addStartingBlock(self)
def setPosHpr(self, x, y, z, h, p, r):
self.notify.debugStateCall(self)
self.nodePath.setPosHpr(x, y, z, h + 180, 0, 0)
def setPadLocationId(self, padLocationId):
self.notify.debugStateCall(self)
self.movieNode = self.nodePath.attachNewNode(self.uniqueName('MovieNode'))
self.exitMovieNode = self.movieNode
if padLocationId % 2:
self.movieNode.setPosHpr(3.0, 0, 0, 90.0, 0, 0)
else:
self.movieNode.setPosHpr(-3.0, 0, 0, -90.0, 0, 0)
def setActive(self, isTangible):
self.collSphere.setTangible(isTangible)
def __handleEnterSphere(self, collEntry):
if base.localAvatar.doId == self.lastAvId and globalClock.getFrameCount() <= self.lastFrame + 1:
self.notify.debug('Ignoring duplicate entry for avatar.')
return
if base.localAvatar.hp > 0:
def handleEnterRequest(self = self):
self.ignore('stoppedAsleep')
if hasattr(self.dialog, 'doneStatus') and self.dialog.doneStatus == 'ok':
self.d_requestEnter(base.cr.isPaid())
elif self.cr and not self.isDisabled():
self.cr.playGame.getPlace().setState('walk')
else:
self.notify.warning('Warning! Object has already been disabled.')
self.dialog.ignoreAll()
self.dialog.cleanup()
del self.dialog
self.cr.playGame.getPlace().fsm.request('stopped')
self.accept('stoppedAsleep', handleEnterRequest)
doneEvent = 'enterRequest|dialog'
if self.kartPad.isPractice():
msg = TTLocalizer.StartingBlock_EnterPractice
else:
raceName = TTLocalizer.KartRace_RaceNames[self.kartPad.trackType]
numTickets = RaceGlobals.getEntryFee(self.kartPad.trackId, self.kartPad.trackType)
msg = TTLocalizer.StartingBlock_EnterNonPractice % (raceName, numTickets)
self.dialog = TTGlobalDialog(msg, doneEvent, 4)
self.dialog.accept(doneEvent, handleEnterRequest)
def d_movieFinished(self):
self.notify.debugStateCall(self)
self.sendUpdate('movieFinished', [])
def d_requestEnter(self, paid):
self.notify.debugStateCall(self)
self.sendUpdate('requestEnter', [paid])
def d_requestExit(self):
self.notify.debugStateCall(self)
self.exitRequested = True
self.hideGui()
self.sendUpdate('requestExit', [])
def rejectEnter(self, errCode):
self.notify.debugStateCall(self)
def handleTicketError(self = self):
self.ignore('stoppedAsleep')
self.dialog.ignoreAll()
self.dialog.cleanup()
del self.dialog
self.cr.playGame.getPlace().setState('walk')
doneEvent = 'errorCode|dialog'
if errCode == KartGlobals.ERROR_CODE.eTickets:
msg = TTLocalizer.StartingBlock_NotEnoughTickets
self.dialog = TTGlobalDialog(msg, doneEvent, 2)
self.dialog.accept(doneEvent, handleTicketError)
self.accept('stoppedAsleep', handleTicketError)
elif errCode == KartGlobals.ERROR_CODE.eBoardOver:
msg = TTLocalizer.StartingBlock_NoBoard
self.dialog = TTGlobalDialog(msg, doneEvent, 2)
self.dialog.accept(doneEvent, handleTicketError)
self.accept('stoppedAsleep', handleTicketError)
elif errCode == KartGlobals.ERROR_CODE.eNoKart:
msg = TTLocalizer.StartingBlock_NoKart
self.dialog = TTGlobalDialog(msg, doneEvent, 2)
self.dialog.accept(doneEvent, handleTicketError)
self.accept('stoppedAsleep', handleTicketError)
elif errCode == KartGlobals.ERROR_CODE.eOccupied:
msg = TTLocalizer.StartingBlock_Occupied
self.dialog = TTGlobalDialog(msg, doneEvent, 2)
self.dialog.accept(doneEvent, handleTicketError)
self.accept('stoppedAsleep', handleTicketError)
elif errCode == KartGlobals.ERROR_CODE.eTrackClosed:
msg = TTLocalizer.StartingBlock_TrackClosed
self.dialog = TTGlobalDialog(msg, doneEvent, 2)
self.dialog.accept(doneEvent, handleTicketError)
self.accept('stoppedAsleep', handleTicketError)
elif errCode == KartGlobals.ERROR_CODE.eUnpaid:
self.dialog = TeaserPanel(pageName='karting', doneFunc=handleTicketError)
else:
self.cr.playGame.getPlace().setState('walk')
def finishMovie(self):
if self.movieTrack:
self.movieTrack.finish()
self.movieTrack = None
return
def setOccupied(self, avId):
self.notify.debug('%d setOccupied: %d' % (self.doId, avId))
if self.av != None:
self.finishMovie()
if not self.av.isEmpty() and not self.av.isDisabled():
self.av.loop('neutral')
self.av.setParent(ToontownGlobals.SPRender)
self.av.startSmooth()
self.finishMovie()
if self.kart:
self.kart.delete()
self.kart = None
if self.kartNode:
self.kartNode.removeNode()
self.kartNode = None
self.placedAvatar = 0
self.ignore(self.av.uniqueName('disable'))
self.av = None
wasLocalToon = self.localToonKarting
self.lastAvId = self.avId
self.lastFrame = globalClock.getFrameCount()
self.avId = avId
self.localToonKarting = 0
if self.avId == 0:
self.collSphere.setTangible(0)
self.request('Off')
else:
self.collSphere.setTangible(1)
av = self.cr.doId2do.get(self.avId)
self.placedAvatar = 0
if self.avId == base.localAvatar.doId:
self.localToonKarting = 1
if av != None:
self.av = av
self.av.stopSmooth()
self.placedAvatar = 0
self.acceptOnce(self.av.uniqueName('disable'), self.__avatarGone)
self.kartNode = render.attachNewNode(self.av.uniqueName('KartNode'))
self.kartNode.setPosHpr(self.nodePath.getPos(render), self.nodePath.getHpr(render))
self.kart = Kart()
self.kart.baseScale = 1.6
self.kart.setDNA(self.av.getKartDNA())
self.kart.generateKart()
self.kart.resetGeomPos()
self.av.wrtReparentTo(self.nodePath)
self.av.setAnimState('neutral', 1.0)
if not self.localToonKarting:
av.stopSmooth()
self.__placeAvatar()
self.avParent = av.getParent()
else:
self.notify.warning('Unknown avatar %d in kart block %d ' % (self.avId, self.doId))
self.avId = 0
if wasLocalToon and not self.localToonKarting:
place = base.cr.playGame.getPlace()
if place:
if self.exitRequested:
place.setState('walk')
else:
def handleDialogOK(self = self):
self.ignore('stoppedAsleep')
place.setState('walk')
self.dialog.ignoreAll()
self.dialog.cleanup()
del self.dialog
doneEvent = 'kickedOutDialog'
msg = TTLocalizer.StartingBlock_KickSoloRacer
self.dialog = TTGlobalDialog(msg, doneEvent, style=1)
self.dialog.accept(doneEvent, handleDialogOK)
self.accept('stoppedAsleep', handleDialogOK)
return
def __avatarGone(self):
self.notify.debugStateCall(self)
self.setOccupied(0)
def __placeAvatar(self):
self.notify.debugStateCall(self)
if not self.placedAvatar:
self.placedAvatar = 1
self.av.setPosHpr(0, 0, 0, 0, 0, 0)
def setMovie(self, mode):
self.notify.debugStateCall(self)
if self.avId == 0:
return
self.finishMovie()
if mode == 0:
pass
elif mode == KartGlobals.ENTER_MOVIE:
self.request('EnterMovie')
elif mode == KartGlobals.EXIT_MOVIE:
self.request('ExitMovie')
def makeGui(self):
self.notify.debugStateCall(self)
if hasattr(self, 'cancelButton'):
return
fishGui = loader.loadModel('phase_4/models/gui/fishingGui')
self.cancelButton = DirectGui.DirectButton(relief=None, scale=0.67, pos=(1.16, 0, -0.9), text=('', TTLocalizer.FishingExit, TTLocalizer.FishingExit), text_align=TextNode.ACenter, text_fg=Vec4(1, 1, 1, 1), text_shadow=Vec4(0, 0, 0, 1), text_pos=(0.0, -0.12), textMayChange=0, text_scale=0.1, image=(fishGui.find('**/exit_buttonUp'), fishGui.find('**/exit_buttonDown'), fishGui.find('**/exit_buttonRollover')), text_font=ToontownGlobals.getInterfaceFont(), command=self.d_requestExit)
self.cancelButton.hide()
return
def showGui(self):
self.notify.debugStateCall(self)
if hasattr(self.kartPad, 'state'):
if not self.kartPad.state == 'WaitCountdown':
return
self.cancelButton.show()
def hideGui(self):
self.notify.debugStateCall(self)
if not hasattr(self, 'cancelButton'):
return
self.cancelButton.hide()
def generateToonMoveTrack(self):
hpr = self.movieNode.getHpr(render)
heading = PythonUtil.fitDestAngle2Src(self.av.getH(render), hpr[0])
hpr.setX(heading)
self.av.setAnimState('run', 1.0)
toonTrack = Sequence(Wait(0.5), Parallel(LerpPosInterval(self.av, 1.0, Point3(self.movieNode.getX(self.avParent), self.movieNode.getY(self.avParent), 0)), LerpHprInterval(self.av, 1.0, hpr=hpr, other=render)), Func(self.av.loop, 'neutral'))
return toonTrack
def generateKartAppearTrack(self):
if not self.av:
if not self.kartNode:
self.kartNode = render.attachNewNode(str(self) + 'kartNode')
self.kartNode.setPosHpr(self.nodePath.getPos(render), self.nodePath.getHpr(render))
self.kart.setScale(0.85)
self.kart.reparentTo(self.kartNode)
return Parallel()
self.kart.setScale(0.1)
kartTrack = Parallel(
Sequence(
ActorInterval(self.av, 'feedPet'),
Func(self.av.loop, 'neutral')),
Sequence(
Func(self.kart.setActiveShadow, False),
Func(self.kart.reparentTo, self.av.rightHand),
Wait(2.1),
Func(self.kart.wrtReparentTo, render),
Func(self.kart.setShear, 0, 0, 0),
Parallel(
LerpHprInterval(self.kart, hpr=self.kartNode.getHpr(render), duration=1.2),
ProjectileInterval(self.kart, endPos=self.kartNode.getPos(render), duration=1.2, gravityMult=0.45)),
Wait(0.2),
Func(self.kart.setActiveShadow, True),
Sequence(
LerpScaleInterval(self.kart, scale=Point3(1.1, 1.1, 0.1), duration=0.2),
LerpScaleInterval(self.kart, scale=Point3(0.9, 0.9, 0.1), duration=0.1),
LerpScaleInterval(self.kart, scale=Point3(1.0, 1.0, 0.1), duration=0.1),
LerpScaleInterval(self.kart, scale=Point3(1.0, 1.0, 1.1), duration=0.2),
LerpScaleInterval(self.kart, scale=Point3(1.0, 1.0, 0.9), duration=0.1),
LerpScaleInterval(self.kart, scale=Point3(1.0, 1.0, 1.0), duration=0.1),
Func(self.kart.wrtReparentTo, self.kartNode))))
return kartTrack
def generateToonJumpTrack(self):
base.sb = self
def getToonJumpTrack(av, kart):
def getJumpDest(av = av, node = kart.toonNode[0]):
dest = node.getPos(av.getParent())
return dest
def getJumpHpr(av = av, node = kart.toonNode[0]):
hpr = node.getHpr(av.getParent())
return hpr
toonJumpTrack = Parallel(
ActorInterval(av, 'jump'),
Sequence(Wait(0.43),
Parallel(
LerpHprInterval(av, hpr=getJumpHpr, duration=0.9),
ProjectileInterval(av, endPos=getJumpDest, duration=0.9))))
return toonJumpTrack
def getToonSitTrack(av):
toonSitTrack = Sequence(ActorInterval(av, 'sit-start'), Func(av.loop, 'sit'))
return toonSitTrack
toonJumpTrack = getToonJumpTrack(self.av, self.kart)
toonSitTrack = getToonSitTrack(self.av)
jumpTrack = Sequence(
Parallel(
toonJumpTrack,
Sequence(
Wait(1),
toonSitTrack)),
Func(self.av.setPosHpr, 0, 0.45, -.25, 0, 0, 0),
Func(self.av.reparentTo, self.kart.toonSeat))
return jumpTrack
def generateToonReverseJumpTrack(self):
def getToonJumpTrack(av, destNode):
def getJumpDest(av = av, node = destNode):
dest = node.getPos(av.getParent())
return dest
def getJumpHpr(av = av, node = destNode):
hpr = node.getHpr(av.getParent())
return hpr
toonJumpTrack = Parallel(ActorInterval(av, 'jump'), Sequence(Wait(0.1), Parallel(LerpHprInterval(av, hpr=getJumpHpr, duration=0.9), ProjectileInterval(av, endPos=getJumpDest, duration=0.9))))
return toonJumpTrack
toonJumpTrack = getToonJumpTrack(self.av, self.exitMovieNode)
jumpTrack = Sequence(toonJumpTrack, Func(self.av.loop, 'neutral'), Func(self.av.reparentTo, render), Func(self.av.setPosHpr, self.exitMovieNode, 0, 0, 0, 0, 0, 0))
return jumpTrack
def generateCameraMoveTrack(self):
self.cPos = base.camera.getPos(self.av)
self.cHpr = base.camera.getHpr(self.av)
base.camera.wrtReparentTo(self.nodePath)
cameraTrack = LerpPosHprInterval(base.camera, 1.5, self.cameraPos, self.cameraHpr)
return cameraTrack
def generateCameraReturnMoveTrack(self):
cameraTrack = Sequence(Func(base.camera.wrtReparentTo, self.av), LerpPosHprInterval(base.camera, 1.5, self.cPos, self.cHpr))
return cameraTrack
def generateKartDisappearTrack(self):
def getHoleTrack(hole, holeParent):
holeTrack = Sequence(
Wait(0.2),
Func(hole.setBin, 'shadow', 0),
Func(hole.setDepthTest, 0),
Func(hole.setDepthWrite, 0),
Func(hole.reparentTo, holeParent),
Func(hole.setPos, holeParent, Point3(0, 0.0, -.6)),
ActorInterval(hole, 'hole', startTime=3.4, endTime=3.1),
Wait(0.4),
ActorInterval(hole, 'hole', startTime=3.1, endTime=3.4))
return holeTrack
def getKartShrinkTrack(kart):
pos = kart.getPos()
pos.addZ(-1.0)
kartTrack = Sequence(LerpScaleInterval(kart, scale=Point3(1.0, 1.0, 0.9), duration=0.1), LerpScaleInterval(kart, scale=Point3(1.0, 1.0, 1.1), duration=0.1), LerpScaleInterval(kart, scale=Point3(1.0, 1.0, 0.1), duration=0.2), LerpScaleInterval(kart, scale=Point3(0.9, 0.9, 0.1), duration=0.1), LerpScaleInterval(kart, scale=Point3(1.1, 1.1, 0.1), duration=0.1), LerpScaleInterval(kart, scale=Point3(0.1, 0.1, 0.1), duration=0.2), Wait(0.2), LerpPosInterval(kart, pos=pos, duration=0.2), Func(kart.hide))
return kartTrack
if not self.holeActor:
self.holeActor = Actor.Actor('phase_3.5/models/props/portal-mod', {'hole': 'phase_3.5/models/props/portal-chan'})
holeTrack = getHoleTrack(self.holeActor, self.kartNode)
shrinkTrack = getKartShrinkTrack(self.kart)
kartTrack = Parallel(shrinkTrack, holeTrack)
return kartTrack
def enterOff(self):
self.notify.debug('%d enterOff: Entering the Off State.' % self.doId)
self.hideGui()
def exitOff(self):
self.notify.debug('%d exitOff: Exiting the Off State.' % self.doId)
def enterEnterMovie(self):
self.notify.debug('%d enterEnterMovie: Entering the Enter Movie State.' % self.doId)
if base.config.GetBool('want-qa-regression', 0):
raceName = TTLocalizer.KartRace_RaceNames[self.kartPad.trackType]
self.notify.info('QA-REGRESSION: KARTING: %s' % raceName)
toonTrack = self.generateToonMoveTrack()
kartTrack = self.generateKartAppearTrack()
jumpTrack = self.generateToonJumpTrack()
name = self.av.uniqueName('EnterRaceTrack')
if self.av is not None and self.localToonKarting:
kartAppearSfx = base.loadSfx(self.SFX_KartAppear)
cameraTrack = self.generateCameraMoveTrack()
engineStartTrack = self.kart.generateEngineStartTrack()
self.finishMovie()
self.movieTrack = Sequence(Parallel(cameraTrack, toonTrack), Parallel(SoundInterval(kartAppearSfx), Sequence(kartTrack, jumpTrack, engineStartTrack, Func(self.makeGui), Func(self.showGui), Func(self.request, 'Waiting'), Func(self.d_movieFinished))), name=name, autoFinish=1)
self.exitRequested = False
else:
self.finishMovie()
self.movieTrack = Sequence(toonTrack, kartTrack, jumpTrack, name=name, autoFinish=1)
self.movieTrack.start()
return
def exitEnterMovie(self):
self.notify.debug('%d exitEnterMovie: Exiting the Enter Movie State.' % self.doId)
def enterWaiting(self):
self.notify.debug('%d enterWaiting: Entering the Waiting State.' % self.doId)
def exitWaiting(self):
self.notify.debug('%d exitWaiting: Exiting the Waiting State.' % self.doId)
def enterExitMovie(self):
self.notify.debug('%d enterExitMovie: Entering the Exit Movie State.' % self.doId)
self.hideGui()
jumpTrack = self.generateToonReverseJumpTrack()
kartTrack = self.generateKartDisappearTrack()
self.finishMovie()
self.movieTrack = Sequence(Func(self.kart.kartLoopSfx.stop), jumpTrack, kartTrack, name=self.av.uniqueName('ExitRaceTrack'), autoFinish=1)
if self.av is not None and self.localToonKarting:
cameraTrack = self.generateCameraReturnMoveTrack()
self.movieTrack.append(cameraTrack)
self.movieTrack.append(Func(self.d_movieFinished))
self.movieTrack.start()
return
def exitExitMovie(self):
self.notify.debug('%d exitExitMovie: Exiting the Exit Movie State.' % self.doId)
def doExitToRaceTrack(self):
self.hideGui()
self.finishMovie()
oldBlockPos = self.kartNode.getPos(render)
self.kartNode.setPos(self.kartNode, 0, 40, 0)
newBlockPos = self.kartNode.getPos(render)
oldBlockScale = self.kartNode.getScale()
self.kart.LODnode.setSwitch(0, 60, 0)
self.kartNode.setPos(render, oldBlockPos)
blockLerpIval = LerpPosInterval(self.kartNode, pos=newBlockPos, duration=2.0)
scaleLerpIval = LerpScaleInterval(self.kartNode, scale=oldBlockScale * 0.2, duration=2.0)
engineStopTrack = self.kart.generateEngineStopTrack(2)
self.finishMovie()
self.movieTrack = Parallel()
if self.av == base.localAvatar:
self.movieTrack.insert(0, Func(base.transitions.irisOut, 1.5, 0))
(self.movieTrack.append(engineStopTrack),)
taskMgr.doMethodLater(1.6, self.bulkLoad, 'loadIt', extraArgs=[])
self.movieTrack.append(Sequence(Parallel(blockLerpIval, scaleLerpIval), Func(self.kartNode.hide), Func(self.kartNode.setPos, render, oldBlockPos), Func(self.kartNode.setScale, oldBlockScale)))
self.movieTrack.start()
def bulkLoad(self):
zoneId = 0 #TODO: get zoneId for certain tracks
base.loader.beginBulkLoad('atRace', TTLocalizer.StartingBlock_Loading, 60, 1, TTLocalizer.TIP_KARTING, zoneId)
class DistributedViewingBlock(DistributedStartingBlock):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedViewingBlock')
sphereRadius = 6
def __init__(self, cr):
DistributedStartingBlock.__init__(self, cr)
self.timer = None
return
def delete(self):
if self.timer is not None:
self.timer.destroy()
del self.timer
DistributedStartingBlock.delete(self)
return
def generateInit(self):
self.notify.debugStateCall(self)
DistributedObject.DistributedObject.generateInit(self)
self.nodePath = NodePath(self.uniqueName('StartingBlock'))
self.collSphere = CollisionSphere(-1, 6.75, -1, self.sphereRadius)
self.collSphere.setTangible(0)
self.collNode = CollisionNode(self.uniqueName('StartingBlockSphere'))
self.collNode.setCollideMask(ToontownGlobals.WallBitmask)
self.collNode.addSolid(self.collSphere)
self.collNodePath = self.nodePath.attachNewNode(self.collNode)
def announceGenerate(self):
self.notify.debugStateCall(self)
DistributedObject.DistributedObject.announceGenerate(self)
self.nodePath.reparentTo(render)
self.accept(self.uniqueName('enterStartingBlockSphere'), self.__handleEnterSphere)
if (__debug__):
if self.testLOD:
self.__generateKartAppearTrack()
def setPadLocationId(self, padLocationId):
self.notify.debugStateCall(self)
self.movieNode = self.nodePath.attachNewNode(self.uniqueName('MovieNode'))
self.exitMovieNode = self.nodePath.attachNewNode(self.uniqueName('ExitMovieNode'))
if padLocationId % 2:
self.movieNode.setPosHpr(0, 6.5, 0, 0, 0, 0)
else:
self.movieNode.setPosHpr(0, -6.5, 0, 0, 0, 0)
self.exitMovieNode.setPosHpr(3, 6.5, 0, 270, 0, 0)
self.collNodePath.reparentTo(self.movieNode)
def __handleEnterSphere(self, collEntry):
if base.localAvatar.doId == self.lastAvId and globalClock.getFrameCount() <= self.lastFrame + 1:
self.notify.debug('Ignoring duplicate entry for avatar.')
return
if base.localAvatar.hp > 0:
def handleEnterRequest(self = self):
self.ignore('stoppedAsleep')
if hasattr(self.dialog, 'doneStatus') and self.dialog.doneStatus == 'ok':
self.d_requestEnter(base.cr.isPaid())
else:
self.cr.playGame.getPlace().setState('walk')
self.dialog.ignoreAll()
self.dialog.cleanup()
del self.dialog
self.cr.playGame.getPlace().fsm.request('stopped')
self.accept('stoppedAsleep', handleEnterRequest)
doneEvent = 'enterRequest|dialog'
msg = TTLocalizer.StartingBlock_EnterShowPad
self.dialog = TTGlobalDialog(msg, doneEvent, 4)
self.dialog.accept(doneEvent, handleEnterRequest)
def generateCameraMoveTrack(self):
self.cPos = base.camera.getPos(self.av)
self.cHpr = base.camera.getHpr(self.av)
cameraPos = Point3(23, -10, 7)
cameraHpr = Point3(65, -10, 0)
base.camera.wrtReparentTo(self.nodePath)
cameraTrack = LerpPosHprInterval(base.camera, 1.5, cameraPos, cameraHpr)
return cameraTrack
def makeGui(self):
self.notify.debugStateCall(self)
if self.timer is not None:
return
self.timer = ToontownTimer()
self.timer.setScale(0.3)
self.timer.setPos(1.16, 0, -.73)
self.timer.hide()
DistributedStartingBlock.makeGui(self)
return
def showGui(self):
self.notify.debugStateCall(self)
self.timer.show()
DistributedStartingBlock.showGui(self)
def hideGui(self):
self.notify.debugStateCall(self)
if not hasattr(self, 'timer') or self.timer is None:
return
self.timer.reset()
self.timer.hide()
DistributedStartingBlock.hideGui(self)
return
def countdown(self):
countdownTime = KartGlobals.COUNTDOWN_TIME - globalClockDelta.localElapsedTime(self.kartPad.getTimestamp(self.avId))
self.timer.countdown(countdownTime)
def enterEnterMovie(self):
self.notify.debug('%d enterEnterMovie: Entering the Enter Movie State.' % self.doId)
if base.config.GetBool('want-qa-regression', 0):
raceName = TTLocalizer.KartRace_RaceNames[self.kartPad.trackType]
self.notify.info('QA-REGRESSION: KARTING: %s' % raceName)
pos = self.nodePath.getPos(render)
hpr = self.nodePath.getHpr(render)
pos.addZ(1.7)
hpr.addX(270)
self.kartNode.setPosHpr(pos, hpr)
toonTrack = self.generateToonMoveTrack()
kartTrack = self.generateKartAppearTrack()
jumpTrack = self.generateToonJumpTrack()
name = self.av.uniqueName('EnterRaceTrack')
if self.av is not None and self.localToonKarting:
cameraTrack = self.generateCameraMoveTrack()
self.finishMovie()
self.movieTrack = Sequence(Parallel(cameraTrack, Sequence()), kartTrack, jumpTrack, Func(self.makeGui), Func(self.showGui), Func(self.countdown), Func(self.request, 'Waiting'), Func(self.d_movieFinished), name=name, autoFinish=1)
else:
self.finishMovie()
self.movieTrack = Sequence(toonTrack, kartTrack, jumpTrack, name=name, autoFinish=1)
self.movieTrack.start()
self.exitRequested = True
return
| apache-2.0 |
dgoodwin/origin | vendor/github.com/google/cadvisor/build/boilerplate/boilerplate.py | 73 | 4780 | #!/usr/bin/env python
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import argparse
import glob
import json
import mmap
import os
import re
import sys
parser = argparse.ArgumentParser()
parser.add_argument("filenames", help="list of files to check, all files if unspecified", nargs='*')
args = parser.parse_args()
rootdir = os.path.dirname(__file__) + "/../../"
rootdir = os.path.abspath(rootdir)
def get_refs():
refs = {}
for path in glob.glob(os.path.join(rootdir, "build/boilerplate/boilerplate.*.txt")):
extension = os.path.basename(path).split(".")[1]
ref_file = open(path, 'r')
ref = ref_file.read().splitlines()
ref_file.close()
refs[extension] = ref
return refs
def file_passes(filename, refs, regexs):
try:
f = open(filename, 'r')
except:
return False
data = f.read()
f.close()
extension = file_extension(filename)
ref = refs[extension]
# remove build tags from the top of Go files
if extension == "go":
p = regexs["go_build_constraints"]
(data, found) = p.subn("", data, 1)
# remove shebang from the top of shell files
if extension == "sh":
p = regexs["shebang"]
(data, found) = p.subn("", data, 1)
data = data.splitlines()
# if our test file is smaller than the reference it surely fails!
if len(ref) > len(data):
return False
# trim our file to the same number of lines as the reference file
data = data[:len(ref)]
p = regexs["year"]
for d in data:
if p.search(d):
return False
# Replace all occurrences of the regex "2016|2015|2014" with "YEAR"
p = regexs["date"]
for i, d in enumerate(data):
(data[i], found) = p.subn('YEAR', d)
if found != 0:
break
# if we don't match the reference at this point, fail
if ref != data:
return False
return True
def file_extension(filename):
return os.path.splitext(filename)[1].split(".")[-1].lower()
skipped_dirs = ['Godeps', 'vendor', 'third_party', '_gopath', '_output', '.git']
def normalize_files(files):
newfiles = []
for pathname in files:
if any(x in pathname for x in skipped_dirs):
continue
newfiles.append(pathname)
for i, pathname in enumerate(newfiles):
if not os.path.isabs(pathname):
newfiles[i] = os.path.join(rootdir, pathname)
return newfiles
def get_files(extensions):
files = []
if len(args.filenames) > 0:
files = args.filenames
else:
for root, dirs, walkfiles in os.walk(rootdir):
# don't visit certain dirs. This is just a performance improvement
# as we would prune these later in normalize_files(). But doing it
# cuts down the amount of filesystem walking we do and cuts down
# the size of the file list
for d in skipped_dirs:
if d in dirs:
dirs.remove(d)
for name in walkfiles:
pathname = os.path.join(root, name)
files.append(pathname)
files = normalize_files(files)
outfiles = []
for pathname in files:
extension = file_extension(pathname)
if extension in extensions:
outfiles.append(pathname)
return outfiles
def get_regexs():
regexs = {}
# Search for "YEAR" which exists in the boilerplate, but shouldn't in the real thing
regexs["year"] = re.compile( 'YEAR' )
# dates can be 2014, 2015 or 2016, company holder names can be anything
regexs["date"] = re.compile( '(2014|2015|2016|2017|2018|2019|2020)' )
# strip // +build \n\n build constraints
regexs["go_build_constraints"] = re.compile(r"^(// \+build.*\n)+\n", re.MULTILINE)
# strip #!.* from shell scripts
regexs["shebang"] = re.compile(r"^(#!.*\n)\n*", re.MULTILINE)
return regexs
def main():
regexs = get_regexs()
refs = get_refs()
filenames = get_files(refs.keys())
for filename in filenames:
if not file_passes(filename, refs, regexs):
print(filename, file=sys.stdout)
if __name__ == "__main__":
sys.exit(main())
| apache-2.0 |
zhujzhuo/Sahara | sahara/service/volumes.py | 4 | 8747 | # Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from oslo_log import log as logging
from sahara import conductor as c
from sahara import context
from sahara import exceptions as ex
from sahara.i18n import _
from sahara.i18n import _LE
from sahara.utils import cluster_progress_ops as cpo
from sahara.utils.openstack import cinder
from sahara.utils.openstack import nova
from sahara.utils import poll_utils
conductor = c.API
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
CONF.import_opt('api_version', 'sahara.utils.openstack.cinder',
group='cinder')
def _count_instances_to_attach(instances):
result = 0
for instance in instances:
if instance.node_group.volumes_per_node > 0:
result += 1
return result
def _count_volumes_to_mount(instances):
return sum([inst.node_group.volumes_per_node for inst in instances])
def attach_to_instances(instances):
instances_to_attach = _count_instances_to_attach(instances)
if instances_to_attach == 0:
return
cpo.add_provisioning_step(
instances[0].cluster_id, _("Attach volumes to instances"),
instances_to_attach)
with context.ThreadGroup() as tg:
for instance in instances:
if instance.node_group.volumes_per_node > 0:
tg.spawn(
'attach-volumes-for-instance-%s' % instance.instance_name,
_attach_volumes_to_node, instance.node_group, instance)
@poll_utils.poll_status(
'await_attach_volumes', _("Await for attaching volumes to instances"),
sleep=2)
def _await_attach_volumes(instance, devices):
return _count_attached_devices(instance, devices) == len(devices)
@cpo.event_wrapper(mark_successful_on_exit=True)
def _attach_volumes_to_node(node_group, instance):
ctx = context.ctx()
size = node_group.volumes_size
volume_type = node_group.volume_type
devices = []
for idx in range(1, node_group.volumes_per_node + 1):
display_name = "volume_" + instance.instance_name + "_" + str(idx)
device = _create_attach_volume(
ctx, instance, size, volume_type,
node_group.volume_local_to_instance, display_name,
node_group.volumes_availability_zone)
devices.append(device)
LOG.debug("Attached volume {device} to instance {uuid}".format(
device=device, uuid=instance.instance_id))
_await_attach_volumes(instance, devices)
paths = instance.node_group.storage_paths()
for idx in range(0, instance.node_group.volumes_per_node):
LOG.debug("Mounting volume {volume} to instance {id}"
.format(volume=devices[idx], id=instance.instance_id))
_mount_volume(instance, devices[idx], paths[idx])
LOG.debug("Mounted volume to instance {id}"
.format(id=instance.instance_id))
@poll_utils.poll_status(
'volume_available_timeout', _("Await for volume become available"),
sleep=1)
def _await_available(volume):
volume = cinder.get_volume(volume.id)
if volume.status == 'error':
raise ex.SystemError(_("Volume %s has error status") % volume.id)
return volume.status == 'available'
def _create_attach_volume(ctx, instance, size, volume_type,
volume_local_to_instance, name=None,
availability_zone=None):
if CONF.cinder.api_version == 1:
kwargs = {'size': size, 'display_name': name}
else:
kwargs = {'size': size, 'name': name}
kwargs['volume_type'] = volume_type
if availability_zone is not None:
kwargs['availability_zone'] = availability_zone
if volume_local_to_instance:
kwargs['scheduler_hints'] = {'local_to_instance': instance.instance_id}
volume = cinder.client().volumes.create(**kwargs)
conductor.append_volume(ctx, instance, volume.id)
_await_available(volume)
resp = nova.client().volumes.create_server_volume(
instance.instance_id, volume.id, None)
return resp.device
def _count_attached_devices(instance, devices):
code, part_info = instance.remote().execute_command('cat /proc/partitions')
count = 0
for line in part_info.split('\n')[1:]:
tokens = line.split()
if len(tokens) > 3:
dev = '/dev/' + tokens[3]
if dev in devices:
count += 1
return count
def mount_to_instances(instances):
if len(instances) == 0:
return
cpo.add_provisioning_step(
instances[0].cluster_id,
_("Mount volumes to instances"), _count_volumes_to_mount(instances))
with context.ThreadGroup() as tg:
for instance in instances:
devices = _find_instance_volume_devices(instance)
# Since formatting can take several minutes (for large disks) and
# can be done in parallel, launch one thread per disk.
for idx in range(0, instance.node_group.volumes_per_node):
tg.spawn('mount-volume-%d-to-node-%s' %
(idx, instance.instance_name),
_mount_volume_to_node, instance, idx, devices[idx])
def _find_instance_volume_devices(instance):
volumes = nova.client().volumes.get_server_volumes(instance.instance_id)
devices = [volume.device for volume in volumes]
return devices
@cpo.event_wrapper(mark_successful_on_exit=True)
def _mount_volume_to_node(instance, idx, device):
LOG.debug("Mounting volume {device} to instance {id}".format(
device=device, id=instance.instance_id))
mount_point = instance.node_group.storage_paths()[idx]
_mount_volume(instance, device, mount_point)
LOG.debug("Mounted volume to instance {id}".format(
id=instance.instance_id))
def _mount_volume(instance, device_path, mount_point):
with instance.remote() as r:
try:
# Mount volumes with better performance options:
# - reduce number of blocks reserved for root to 1%
# - use 'dir_index' for faster directory listings
# - use 'extents' to work faster with large files
# - disable journaling
# - enable write-back
# - do not store access time
fs_opts = '-m 1 -O dir_index,extents,^has_journal'
mount_opts = '-o data=writeback,noatime,nodiratime'
r.execute_command('sudo mkdir -p %s' % mount_point)
r.execute_command('sudo mkfs.ext4 %s %s' % (fs_opts, device_path))
r.execute_command('sudo mount %s %s %s' %
(mount_opts, device_path, mount_point))
except Exception:
LOG.error(_LE("Error mounting volume to instance {id}")
.format(id=instance.instance_id))
raise
def detach_from_instance(instance):
for volume_id in instance.volumes:
_detach_volume(instance, volume_id)
_delete_volume(volume_id)
@poll_utils.poll_status(
'detach_volume_timeout', _("Await for volume become detached"), sleep=2)
def _await_detach(volume_id):
volume = cinder.get_volume(volume_id)
if volume.status not in ['available', 'error']:
return False
return True
def _detach_volume(instance, volume_id):
volume = cinder.get_volume(volume_id)
try:
LOG.debug("Detaching volume {id} from instance {instance}".format(
id=volume_id, instance=instance.instance_name))
nova.client().volumes.delete_server_volume(instance.instance_id,
volume_id)
except Exception:
LOG.error(_LE("Can't detach volume {id}").format(id=volume.id))
detach_timeout = CONF.timeouts.detach_volume_timeout
LOG.debug("Waiting {timeout} seconds to detach {id} volume".format(
timeout=detach_timeout, id=volume_id))
_await_detach(volume_id)
def _delete_volume(volume_id):
LOG.debug("Deleting volume {volume}".format(volume=volume_id))
volume = cinder.get_volume(volume_id)
try:
volume.delete()
except Exception:
LOG.error(_LE("Can't delete volume {volume}").format(
volume=volume.id))
| apache-2.0 |
AOSPU/external_chromium_org | tools/valgrind/test_suppressions.py | 44 | 6750 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
from collections import defaultdict
import json
import os
import re
import subprocess
import sys
import suppressions
def ReadReportsFromFile(filename):
""" Returns a list of (report_hash, report) and the URL of the report on the
waterfall.
"""
input_file = file(filename, 'r')
# reports is a list of (error hash, report) pairs.
reports = []
in_suppression = False
cur_supp = []
# This stores the last error hash found while reading the file.
last_hash = ""
for line in input_file:
line = line.strip()
line = line.replace("</span><span class=\"stdout\">", "")
line = line.replace("</span><span class=\"stderr\">", "")
line = line.replace("<", "<")
line = line.replace(">", ">")
if in_suppression:
if line == "}":
cur_supp += ["}"]
reports += [[last_hash, "\n".join(cur_supp)]]
in_suppression = False
cur_supp = []
last_hash = ""
else:
cur_supp += [" "*3 + line]
elif line == "{":
in_suppression = True
cur_supp = ["{"]
elif line.find("Suppression (error hash=#") == 0:
last_hash = line[25:41]
# The line at the end of the file is assumed to store the URL of the report.
return reports,line
def Demangle(names):
""" Demangle a list of C++ symbols, return a list of human-readable symbols.
"""
# -n is not the default on Mac.
args = ['c++filt', '-n']
pipe = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
stdout, _ = pipe.communicate(input='\n'.join(names))
demangled = stdout.split("\n")
# Each line ends with a newline, so the final entry of the split output
# will always be ''.
assert len(demangled) == len(names)
return demangled
def GetSymbolsFromReport(report):
"""Extract all symbols from a suppression report."""
symbols = []
prefix = "fun:"
prefix_len = len(prefix)
for line in report.splitlines():
index = line.find(prefix)
if index != -1:
symbols.append(line[index + prefix_len:])
return symbols
def PrintTopSymbols(symbol_reports, top_count):
"""Print the |top_count| symbols with the most occurrences."""
boring_symbols=['malloc', '_Znw*', 'TestBody']
sorted_reports = sorted(filter(lambda x:x[0] not in boring_symbols,
symbol_reports.iteritems()),
key=lambda x:len(x[1]), reverse=True)
symbols = symbol_reports.keys()
demangled = Demangle(symbols)
assert len(demangled) == len(symbols)
symboltable = dict(zip(symbols, demangled))
print "\n"
print "Top %d symbols" % top_count
for (symbol, suppressions) in sorted_reports[:top_count]:
print "%4d occurrences : %s" % (len(suppressions), symboltable[symbol])
def ReadHashExclusions(exclusions):
input_file = file(exclusions, 'r')
contents = json.load(input_file)
return contents['hashes']
def main(argv):
supp = suppressions.GetSuppressions()
# all_reports is a map {report: list of urls containing this report}
all_reports = defaultdict(list)
report_hashes = {}
symbol_reports = defaultdict(list)
# Create argument parser.
parser = argparse.ArgumentParser()
parser.add_argument('--top-symbols', type=int, default=0,
help='Print a list of the top <n> symbols')
parser.add_argument('--symbol-filter', action='append',
help='Filter out all suppressions not containing the specified symbol(s). '
'Matches against the mangled names.')
parser.add_argument('--exclude-symbol', action='append',
help='Filter out all suppressions containing the specified symbol(s). '
'Matches against the mangled names.')
parser.add_argument('--exclude-hashes', action='append',
help='Specify a .json file with a list of hashes to exclude.')
parser.add_argument('reports', metavar='report file', nargs='+',
help='List of report files')
args = parser.parse_args(argv)
# exclude_hashes is a list of strings, each string an error hash.
exclude_hashes = []
exclude_hashes = []
if args.exclude_hashes:
for excl in args.exclude_hashes:
print "reading exclusion", excl
exclude_hashes += ReadHashExclusions(excl)
for f in args.reports:
f_reports, url = ReadReportsFromFile(f)
for (hash, report) in f_reports:
if hash in exclude_hashes:
continue
all_reports[report] += [url]
report_hashes[report] = hash
reports_count = 0
for r in all_reports:
cur_supp = supp['common_suppressions']
if all([re.search("%20Mac%20|mac_valgrind", url)
for url in all_reports[r]]):
# Include mac suppressions if the report is only present on Mac
cur_supp += supp['mac_suppressions']
elif all([re.search("Windows%20", url) for url in all_reports[r]]):
# Include win32 suppressions if the report is only present on Windows
cur_supp += supp['win_suppressions']
elif all([re.search("Linux%20", url) for url in all_reports[r]]):
cur_supp += supp['linux_suppressions']
if all(["DrMemory" in url for url in all_reports[r]]):
cur_supp += supp['drmem_suppressions']
if all(["DrMemory%20full" in url for url in all_reports[r]]):
cur_supp += supp['drmem_full_suppressions']
# Test if this report is already suppressed
skip = False
for s in cur_supp:
if s.Match(r.split("\n")):
skip = True
break
# Skip reports if none of the symbols are in the report.
if args.symbol_filter and all(not s in r for s in args.symbol_filter):
skip = True
if args.exclude_symbol and any(s in r for s in args.exclude_symbol):
skip = True
if not skip:
reports_count += 1
print "==================================="
print "This report observed at"
for url in all_reports[r]:
print " %s" % url
print "didn't match any suppressions:"
print "Suppression (error hash=#%s#):" % (report_hashes[r])
print r
print "==================================="
if args.top_symbols > 0:
symbols = GetSymbolsFromReport(r)
for symbol in symbols:
symbol_reports[symbol].append(report_hashes[r])
if reports_count > 0:
print ("%d unique reports don't match any of the suppressions" %
reports_count)
if args.top_symbols > 0:
PrintTopSymbols(symbol_reports, args.top_symbols)
else:
print "Congratulations! All reports are suppressed!"
# TODO(timurrrr): also make sure none of the old suppressions
# were narrowed too much.
if __name__ == "__main__":
main(sys.argv[1:])
| bsd-3-clause |
tndatacommons/tndata_backend | tndata_backend/userprofile/migrations/0009_auto_20150908_1727.py | 2 | 2208 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('userprofile', '0008_auto_20150730_1554'),
]
operations = [
migrations.CreateModel(
name='Place',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, verbose_name='ID', auto_created=True)),
('name', models.CharField(db_index=True, max_length=32, unique=True)),
('slug', models.SlugField(max_length=32, unique=True)),
('primary', models.BooleanField(default=False, help_text='Use this place as a suggestion for users.')),
('updated_on', models.DateTimeField(auto_now=True)),
('created_on', models.DateTimeField(auto_now_add=True)),
],
options={
'ordering': ['name'],
'verbose_name': 'Place',
'verbose_name_plural': 'Places',
},
),
migrations.CreateModel(
name='UserPlace',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, verbose_name='ID', auto_created=True)),
('latitude', models.DecimalField(db_index=True, decimal_places=4, max_digits=8)),
('longitude', models.DecimalField(db_index=True, decimal_places=4, max_digits=8)),
('updated_on', models.DateTimeField(auto_now=True)),
('created_on', models.DateTimeField(auto_now_add=True)),
('place', models.ForeignKey(to='userprofile.Place')),
('profile', models.ForeignKey(to='userprofile.UserProfile')),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'User Place',
'verbose_name_plural': 'User Places',
},
),
migrations.AlterOrderWithRespectTo(
name='userplace',
order_with_respect_to='user',
),
]
| mit |
frlen/simian | src/simian/mac/munki/handlers/manifests.py | 2 | 1999 | #!/usr/bin/env python
#
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Manifest URL handlers."""
import httplib
import logging
from simian.mac.common import auth
from simian.mac.munki import common
from simian.mac.munki import handlers
class Manifests(handlers.AuthenticationHandler):
"""Handler for /manifests/"""
def get(self, client_id_str=''):
"""Manifest get handler.
Args:
client_id_str: optional str client_id; only needed for user requests.
Returns:
A webapp.Response() response.
"""
session = auth.DoAnyAuth()
client_id = handlers.GetClientIdForRequest(
self.request, session=session, client_id_str=client_id_str)
try:
plist_xml = common.GetComputerManifest(
client_id=client_id, packagemap=False)
except common.ManifestNotFoundError, e:
logging.warning('Invalid manifest requested: %s', str(e))
self.response.set_status(httplib.NOT_FOUND)
return
except common.ManifestDisabledError, e:
logging.info('Disabled manifest requested: %s', str(e))
self.response.set_status(httplib.SERVICE_UNAVAILABLE)
return
except common.Error, e:
logging.exception(
'%s, client_id_str=%s', str(e.__class__.__name__), client_id_str)
self.response.set_status(httplib.SERVICE_UNAVAILABLE)
return
self.response.headers['Content-Type'] = 'text/xml; charset=utf-8'
self.response.out.write(plist_xml)
| apache-2.0 |
beacloudgenius/edx-platform | lms/djangoapps/certificates/views.py | 10 | 22082 | """URL handlers related to certificate handling by LMS"""
from datetime import datetime
import dogstats_wrapper as dog_stats_api
import json
import logging
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.http import HttpResponse, Http404, HttpResponseForbidden
from django.utils.translation import ugettext as _
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from capa.xqueue_interface import XQUEUE_METRIC_NAME
from certificates.models import (
certificate_status_for_student,
CertificateStatuses,
GeneratedCertificate,
ExampleCertificate,
CertificateHtmlViewConfiguration
)
from certificates.queue import XQueueCertInterface
from edxmako.shortcuts import render_to_response
from xmodule.modulestore.django import modulestore
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from util.json_request import JsonResponse, JsonResponseBadRequest
from util.bad_request_rate_limiter import BadRequestRateLimiter
logger = logging.getLogger(__name__)
@csrf_exempt
def request_certificate(request):
"""Request the on-demand creation of a certificate for some user, course.
A request doesn't imply a guarantee that such a creation will take place.
We intentionally use the same machinery as is used for doing certification
at the end of a course run, so that we can be sure users get graded and
then if and only if they pass, do they get a certificate issued.
"""
if request.method == "POST":
if request.user.is_authenticated():
xqci = XQueueCertInterface()
username = request.user.username
student = User.objects.get(username=username)
course_key = SlashSeparatedCourseKey.from_deprecated_string(request.POST.get('course_id'))
course = modulestore().get_course(course_key, depth=2)
status = certificate_status_for_student(student, course_key)['status']
if status in [CertificateStatuses.unavailable, CertificateStatuses.notpassing, CertificateStatuses.error]:
log_msg = u'Grading and certification requested for user %s in course %s via /request_certificate call'
logger.info(log_msg, username, course_key)
status = xqci.add_cert(student, course_key, course=course)
return HttpResponse(json.dumps({'add_status': status}), mimetype='application/json')
return HttpResponse(json.dumps({'add_status': 'ERRORANONYMOUSUSER'}), mimetype='application/json')
@csrf_exempt
def update_certificate(request):
"""
Will update GeneratedCertificate for a new certificate or
modify an existing certificate entry.
See models.py for a state diagram of certificate states
This view should only ever be accessed by the xqueue server
"""
status = CertificateStatuses
if request.method == "POST":
xqueue_body = json.loads(request.POST.get('xqueue_body'))
xqueue_header = json.loads(request.POST.get('xqueue_header'))
try:
course_key = SlashSeparatedCourseKey.from_deprecated_string(xqueue_body['course_id'])
cert = GeneratedCertificate.objects.get(
user__username=xqueue_body['username'],
course_id=course_key,
key=xqueue_header['lms_key'])
except GeneratedCertificate.DoesNotExist:
logger.critical('Unable to lookup certificate\n'
'xqueue_body: {0}\n'
'xqueue_header: {1}'.format(
xqueue_body, xqueue_header))
return HttpResponse(json.dumps({
'return_code': 1,
'content': 'unable to lookup key'}),
mimetype='application/json')
if 'error' in xqueue_body:
cert.status = status.error
if 'error_reason' in xqueue_body:
# Hopefully we will record a meaningful error
# here if something bad happened during the
# certificate generation process
#
# example:
# (aamorm BerkeleyX/CS169.1x/2012_Fall)
# <class 'simples3.bucket.S3Error'>:
# HTTP error (reason=error(32, 'Broken pipe'), filename=None) :
# certificate_agent.py:175
cert.error_reason = xqueue_body['error_reason']
else:
if cert.status in [status.generating, status.regenerating]:
cert.download_uuid = xqueue_body['download_uuid']
cert.verify_uuid = xqueue_body['verify_uuid']
cert.download_url = xqueue_body['url']
cert.status = status.downloadable
elif cert.status in [status.deleting]:
cert.status = status.deleted
else:
logger.critical('Invalid state for cert update: {0}'.format(
cert.status))
return HttpResponse(
json.dumps({
'return_code': 1,
'content': 'invalid cert status'
}),
mimetype='application/json'
)
dog_stats_api.increment(XQUEUE_METRIC_NAME, tags=[
u'action:update_certificate',
u'course_id:{}'.format(cert.course_id)
])
cert.save()
return HttpResponse(json.dumps({'return_code': 0}),
mimetype='application/json')
@csrf_exempt
@require_POST
def update_example_certificate(request):
"""Callback from the XQueue that updates example certificates.
Example certificates are used to verify that certificate
generation is configured correctly for a course.
Unlike other certificates, example certificates
are not associated with a particular user or displayed
to students.
For this reason, we need a different end-point to update
the status of generated example certificates.
Arguments:
request (HttpRequest)
Returns:
HttpResponse (200): Status was updated successfully.
HttpResponse (400): Invalid parameters.
HttpResponse (403): Rate limit exceeded for bad requests.
HttpResponse (404): Invalid certificate identifier or access key.
"""
logger.info(u"Received response for example certificate from XQueue.")
rate_limiter = BadRequestRateLimiter()
# Check the parameters and rate limits
# If these are invalid, return an error response.
if rate_limiter.is_rate_limit_exceeded(request):
logger.info(u"Bad request rate limit exceeded for update example certificate end-point.")
return HttpResponseForbidden("Rate limit exceeded")
if 'xqueue_body' not in request.POST:
logger.info(u"Missing parameter 'xqueue_body' for update example certificate end-point")
rate_limiter.tick_bad_request_counter(request)
return JsonResponseBadRequest("Parameter 'xqueue_body' is required.")
if 'xqueue_header' not in request.POST:
logger.info(u"Missing parameter 'xqueue_header' for update example certificate end-point")
rate_limiter.tick_bad_request_counter(request)
return JsonResponseBadRequest("Parameter 'xqueue_header' is required.")
try:
xqueue_body = json.loads(request.POST['xqueue_body'])
xqueue_header = json.loads(request.POST['xqueue_header'])
except (ValueError, TypeError):
logger.info(u"Could not decode params to example certificate end-point as JSON.")
rate_limiter.tick_bad_request_counter(request)
return JsonResponseBadRequest("Parameters must be JSON-serialized.")
# Attempt to retrieve the example certificate record
# so we can update the status.
try:
uuid = xqueue_body.get('username')
access_key = xqueue_header.get('lms_key')
cert = ExampleCertificate.objects.get(uuid=uuid, access_key=access_key)
except ExampleCertificate.DoesNotExist:
# If we are unable to retrieve the record, it means the uuid or access key
# were not valid. This most likely means that the request is NOT coming
# from the XQueue. Return a 404 and increase the bad request counter
# to protect against a DDOS attack.
logger.info(u"Could not find example certificate with uuid '%s' and access key '%s'", uuid, access_key)
rate_limiter.tick_bad_request_counter(request)
raise Http404
if 'error' in xqueue_body:
# If an error occurs, save the error message so we can fix the issue.
error_reason = xqueue_body.get('error_reason')
cert.update_status(ExampleCertificate.STATUS_ERROR, error_reason=error_reason)
logger.warning(
(
u"Error occurred during example certificate generation for uuid '%s'. "
u"The error response was '%s'."
), uuid, error_reason
)
else:
# If the certificate generated successfully, save the download URL
# so we can display the example certificate.
download_url = xqueue_body.get('url')
if download_url is None:
rate_limiter.tick_bad_request_counter(request)
logger.warning(u"No download URL provided for example certificate with uuid '%s'.", uuid)
return JsonResponseBadRequest(
"Parameter 'download_url' is required for successfully generated certificates."
)
else:
cert.update_status(ExampleCertificate.STATUS_SUCCESS, download_url=download_url)
logger.info("Successfully updated example certificate with uuid '%s'.", uuid)
# Let the XQueue know that we handled the response
return JsonResponse({'return_code': 0})
# pylint: disable=too-many-statements, bad-continuation
@login_required
def render_html_view(request):
"""
This view generates an HTML representation of the specified student's certificate
If a certificate is not available, we display a "Sorry!" screen instead
"""
# Initialize the template context and bootstrap with default values from configuration
context = {}
configuration = CertificateHtmlViewConfiguration.get_config()
context = configuration.get('default', {})
invalid_template_path = 'certificates/invalid.html'
# Translators: This text is bound to the HTML 'title' element of the page and appears
# in the browser title bar when a requested certificate is not found or recognized
context['document_title'] = _("Invalid Certificate")
# Feature Flag check
if not settings.FEATURES.get('CERTIFICATES_HTML_VIEW', False):
return render_to_response(invalid_template_path, context)
course_id = request.GET.get('course', None)
context['course'] = course_id
if not course_id:
return render_to_response(invalid_template_path, context)
# Course Lookup
try:
course_key = CourseKey.from_string(course_id)
except InvalidKeyError:
return render_to_response(invalid_template_path, context)
course = modulestore().get_course(course_key)
if not course:
return render_to_response(invalid_template_path, context)
# Certificate Lookup
try:
certificate = GeneratedCertificate.objects.get(
user=request.user,
course_id=course_key
)
except GeneratedCertificate.DoesNotExist:
return render_to_response(invalid_template_path, context)
# Override the defaults with any mode-specific static values
context.update(configuration.get(certificate.mode, {}))
# Override further with any course-specific static values
context.update(course.cert_html_view_overrides)
# Populate dynamic output values using the course/certificate data loaded above
user_fullname = request.user.profile.name
platform_name = context.get('platform_name')
context['accomplishment_copy_name'] = user_fullname
context['accomplishment_copy_course_org'] = course.org
context['accomplishment_copy_course_name'] = course.display_name
context['certificate_id_number'] = certificate.verify_uuid
context['certificate_verify_url'] = "{prefix}{uuid}{suffix}".format(
prefix=context.get('certificate_verify_url_prefix'),
uuid=certificate.verify_uuid,
suffix=context.get('certificate_verify_url_suffix')
)
context['logo_alt'] = platform_name
accd_course_org_html = '<span class="detail--xuniversity">{partner_name}</span>'.format(partner_name=course.org)
accd_platform_name_html = '<span class="detail--company">{platform_name}</span>'.format(platform_name=platform_name)
# Translators: This line appears on the certificate after the name of a course, and provides more
# information about the organizations providing the course material to platform users
context['accomplishment_copy_course_description'] = _('a course of study offered by {partner_name}, '
'through {platform_name}.').format(
partner_name=accd_course_org_html,
platform_name=accd_platform_name_html
)
context['accomplishment_more_title'] = _("More Information About {user_name}'s Certificate:").format(
user_name=user_fullname
)
# Translators: This line appears on the page just before the generation date for the certificate
context['certificate_date_issued_title'] = _("Issued On:")
# Translators: The format of the date includes the full name of the month
context['certificate_date_issued'] = _('{month} {day}, {year}').format(
month=certificate.modified_date.strftime("%B"),
day=certificate.modified_date.day,
year=certificate.modified_date.year
)
# Translators: The Certificate ID Number is an alphanumeric value unique to each individual certificate
context['certificate_id_number_title'] = _('Certificate ID Number')
context['certificate_info_title'] = _('About {platform_name} Certificates').format(
platform_name=platform_name
)
# Translators: This text describes the purpose (and therefore, value) of a course certificate
# 'verifying your identity' refers to the process for establishing the authenticity of the student
context['certificate_info_description'] = _("{platform_name} acknowledges achievements through certificates, which "
"are awarded for various activities {platform_name} students complete "
"under the <a href='{tos_url}'>{platform_name} Honor Code</a>. Some "
"certificates require completing additional steps, such as "
"<a href='{verified_cert_url}'> verifying your identity</a>.").format(
platform_name=platform_name,
tos_url=context.get('company_tos_url'),
verified_cert_url=context.get('company_verified_certificate_url')
)
# Translators: Certificate Types correspond to the different enrollment options available for a given course
context['certificate_type_title'] = _('{certificate_type} Certificate').format(
certificate_type=context.get('certificate_type')
)
context['certificate_verify_title'] = _("How {platform_name} Validates Student Certificates").format(
platform_name=platform_name
)
# Translators: This text describes the validation mechanism for a certificate file (known as GPG security)
context['certificate_verify_description'] = _('Certificates issued by {platform_name} are signed by a gpg key so '
'that they can be validated independently by anyone with the '
'{platform_name} public key. For independent verification, '
'{platform_name} uses what is called a '
'"detached signature""".').format(platform_name=platform_name)
context['certificate_verify_urltext'] = _("Validate this certificate for yourself")
# Translators: This text describes (at a high level) the mission and charter the edX platform and organization
context['company_about_description'] = _("{platform_name} offers interactive online classes and MOOCs from the "
"world's best universities, including MIT, Harvard, Berkeley, University "
"of Texas, and many others. {platform_name} is a non-profit online "
"initiative created by founding partners Harvard and MIT.").format(
platform_name=platform_name
)
context['company_about_title'] = _("About {platform_name}").format(platform_name=platform_name)
context['company_about_urltext'] = _("Learn more about {platform_name}").format(platform_name=platform_name)
context['company_courselist_urltext'] = _("Learn with {platform_name}").format(platform_name=platform_name)
context['company_careers_urltext'] = _("Work at {platform_name}").format(platform_name=platform_name)
context['company_contact_urltext'] = _("Contact {platform_name}").format(platform_name=platform_name)
context['company_privacy_urltext'] = _("Privacy Policy")
context['company_tos_urltext'] = _("Terms of Service & Honor Code")
# Translators: This text appears near the top of the certficate and describes the guarantee provided by edX
context['document_banner'] = _("{platform_name} acknowledges the following student accomplishment").format(
platform_name=platform_name
)
context['logo_subtitle'] = _("Certificate Validation")
if certificate.mode == 'honor':
# Translators: This text describes the 'Honor' course certificate type.
context['certificate_type_description'] = _("An {cert_type} Certificate signifies that an {platform_name} "
"learner has agreed to abide by {platform_name}'s honor code and "
"completed all of the required tasks for this course under its "
"guidelines.").format(
cert_type=context.get('certificate_type'),
platform_name=platform_name
)
elif certificate.mode == 'verified':
# Translators: This text describes the 'ID Verified' course certificate type, which is a higher level of
# verification offered by edX. This type of verification is useful for professional education/certifications
context['certificate_type_description'] = _("An {cert_type} Certificate signifies that an {platform_name} "
"learner has agreed to abide by {platform_name}'s honor code and "
"completed all of the required tasks for this course under its "
"guidelines, as well as having their photo ID checked to verify "
"their identity.").format(
cert_type=context.get('certificate_type'),
platform_name=platform_name
)
elif certificate.mode == 'xseries':
# Translators: This text describes the 'XSeries' course certificate type. An XSeries is a collection of
# courses related to each other in a meaningful way, such as a specific topic or theme, or even an organization
context['certificate_type_description'] = _("An {cert_type} Certificate demonstrates a high level of "
"achievement in a program of study, and includes verification of "
"the student's identity.").format(
cert_type=context.get('certificate_type')
)
# Translators: This is the copyright line which appears at the bottom of the certificate page/screen
context['copyright_text'] = _('© {year} {platform_name}. All rights reserved.').format(
year=datetime.now().year,
platform_name=platform_name
)
# Translators: This text represents the verification of the certificate
context['document_meta_description'] = _('This is a valid {platform_name} certificate for {user_name}, '
'who participated in {partner_name} {course_number}').format(
platform_name=platform_name,
user_name=user_fullname,
partner_name=course.org,
course_number=course.number
)
# Translators: This text is bound to the HTML 'title' element of the page and appears in the browser title bar
context['document_title'] = _("Valid {partner_name} {course_number} Certificate | {platform_name}").format(
partner_name=course.org,
course_number=course.number,
platform_name=platform_name
)
# Translators: This text fragment appears after the student's name (displayed in a large font) on the certificate
# screen. The text describes the accomplishment represented by the certificate information displayed to the user
context['accomplishment_copy_description_full'] = _("successfully completed, received a passing grade, and was "
"awarded a {platform_name} {certificate_type} "
"Certificate of Completion in ").format(
platform_name=platform_name,
certificate_type=context.get("certificate_type")
)
return render_to_response("certificates/valid.html", context)
| agpl-3.0 |
nikhil93uf/Qemu | scripts/tracetool/backend/ftrace.py | 102 | 1351 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Ftrace built-in backend.
"""
__author__ = "Eiichi Tsukata <eiichi.tsukata.xh@hitachi.com>"
__copyright__ = "Copyright (C) 2013 Hitachi, Ltd."
__license__ = "GPL version 2 or (at your option) any later version"
__maintainer__ = "Stefan Hajnoczi"
__email__ = "stefanha@redhat.com"
from tracetool import out
PUBLIC = True
def generate_h_begin(events):
out('#include "trace/ftrace.h"',
'#include "trace/control.h"',
'')
def generate_h(event):
argnames = ", ".join(event.args.names())
if len(event.args) > 0:
argnames = ", " + argnames
out(' {',
' char ftrace_buf[MAX_TRACE_STRLEN];',
' int unused __attribute__ ((unused));',
' int trlen;',
' if (trace_event_get_state(%(event_id)s)) {',
' trlen = snprintf(ftrace_buf, MAX_TRACE_STRLEN,',
' "%(name)s " %(fmt)s "\\n" %(argnames)s);',
' trlen = MIN(trlen, MAX_TRACE_STRLEN - 1);',
' unused = write(trace_marker_fd, ftrace_buf, trlen);',
' }',
' }',
name=event.name,
args=event.args,
event_id="TRACE_" + event.name.upper(),
fmt=event.fmt.rstrip("\n"),
argnames=argnames)
| gpl-2.0 |
gurneyalex/purchase-workflow | __unported__/purchase_landed_costs/product.py | 8 | 3004 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010-2013 Camptocamp (<http://www.camptocamp.com>)
# Authors: Ferdinand Gasauer, Joel Grand-Guillaume
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm, fields
from openerp.tools.translate import _
class product_template(orm.Model):
_inherit = "product.template"
_columns = {
'landed_cost_type': fields.selection(
[('value', 'Value'),
('per_unit', 'Quantity'),
('none', 'None')],
'Distribution Type',
help="Used if this product is landed costs: "
"If landed costs are defined for purchase orders or pickings, "
"this indicates how the costs are distributed to the lines"),
}
_defaults = {
'landed_cost_type': lambda self, cr, uid, context: context.get('landed_cost_type')
}
class product_product(orm.Model):
_inherit = "product.product"
def _choose_exp_account_from(self, cr, uid, product, fiscal_position=False,
context=None):
""" Method to compute the expense account to chose based on product and
fiscal position.
Used in invoice creation and on_change of landed costs.
Taken from method : _choose_account_from_po_line of purchase.py in
purchase module.
"""
fiscal_obj = self.pool.get('account.fiscal.position')
property_obj = self.pool.get('ir.property')
if product:
acc_id = product.property_account_expense.id
if not acc_id:
acc_id = product.categ_id.property_account_expense_categ.id
if not acc_id:
raise orm.except_orm(
_('Error!'),
_('Define expense account for this company: "%s" (id:%d).')
% (product.name, product.id,))
else:
acc_id = property_obj.get(cr, uid,
'property_account_expense_categ',
'product.category').id
return fiscal_obj.map_account(cr, uid, fiscal_position, acc_id)
| agpl-3.0 |
AlbertoPeon/invenio | modules/bibcirculation/lib/bibcirculation.py | 7 | 31144 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2008, 2009, 2010, 2011, 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
Invenio Bibcirculation User.
When applicable, methods should be renamed, refactored and
appropriate documentation added.
"""
__revision__ = "$Id$"
import datetime, time
# Invenio imports
from invenio.config import \
CFG_SITE_LANG, \
CFG_CERN_SITE, \
CFG_SITE_URL
from invenio.webuser import collect_user_info
from invenio.mailutils import send_email
from invenio.messages import gettext_set_language
from invenio.bibrecord import record_get_field_value
from invenio.search_engine import get_record
# Bibcirculation imports
import invenio.bibcirculation_dblayer as db
from invenio.bibcirculationadminlib import load_template
from invenio.bibcirculation_utils import book_title_from_MARC, \
book_information_from_MARC, \
create_ill_record, \
tag_all_requests_as_done, \
generate_tmp_barcode, \
generate_new_due_date, \
update_requests_statuses, \
search_user
from invenio.bibcirculation_cern_ldap import get_user_info_from_ldap
from invenio.bibcirculation_config import CFG_BIBCIRCULATION_LIBRARIAN_EMAIL, \
CFG_BIBCIRCULATION_LOANS_EMAIL, \
CFG_BIBCIRCULATION_ITEM_STATUS_UNDER_REVIEW, \
CFG_BIBCIRCULATION_REQUEST_STATUS_PENDING, \
CFG_BIBCIRCULATION_REQUEST_STATUS_WAITING, \
CFG_BIBCIRCULATION_REQUEST_STATUS_PROPOSED, \
CFG_BIBCIRCULATION_ILL_STATUS_NEW, \
CFG_BIBCIRCULATION_PROPOSAL_STATUS_NEW, \
AMZ_BOOK_PUBLICATION_DATE_TAG, \
CFG_BIBCIRCULATION_DEFAULT_LIBRARY_ID
import invenio.template
bc_templates = invenio.template.load('bibcirculation')
def perform_borrower_loans(uid, barcode, borrower_id,
request_id, action, ln=CFG_SITE_LANG):
"""
Display all the loans and the requests of a given borrower.
@param barcode: identify the item. Primary key of crcITEM.
@type barcode: string
@param borrower_id: identify the borrower. Primary key of crcBORROWER.
@type borrower_id: int
@param request_id: identify the request: Primary key of crcLOANREQUEST
@type request_id: int
@return body(html)
"""
_ = gettext_set_language(ln)
infos = []
borrower_id = db.get_borrower_id_by_email(db.get_invenio_user_email(uid))
new_due_date = generate_new_due_date(30)
#renew loan
if action == 'renew':
recid = db.get_id_bibrec(barcode)
item_description = db.get_item_description(barcode)
queue = db.get_queue_request(recid, item_description)
if len(queue) != 0 and queue[0][0] != borrower_id:
message = "It is not possible to renew your loan for %(x_strong_tag_open)s%(x_title)s%(x_strong_tag_close)s" % {'x_title': book_title_from_MARC(recid), 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'}
message += ' ' + _("Another user is waiting for this book.")
infos.append(message)
else:
loan_id = db.get_current_loan_id(barcode)
db.renew_loan(loan_id, new_due_date)
#update_status_if_expired(loan_id)
tag_all_requests_as_done(barcode, borrower_id)
infos.append(_("Your loan has been renewed with sucess."))
#cancel request
elif action == 'cancel':
db.cancel_request(request_id)
barcode_requested = db.get_requested_barcode(request_id)
update_requests_statuses(barcode_requested)
#renew all loans
elif action == 'renew_all':
list_of_barcodes = db.get_borrower_loans_barcodes(borrower_id)
for bc in list_of_barcodes:
bc_recid = db.get_id_bibrec(bc)
item_description = db.get_item_description(bc)
queue = db.get_queue_request(bc_recid, item_description)
#check if there are requests
if len(queue) != 0 and queue[0][0] != borrower_id:
message = "It is not possible to renew your loan for %(x_strong_tag_open)s%(x_title)s%(x_strong_tag_close)s" % {'x_title': book_title_from_MARC(bc_recid), 'x_strong_tag_open': '<strong>', 'x_strong_tag_close': '</strong>'}
message += ' ' + _("Another user is waiting for this book.")
infos.append(message)
else:
loan_id = db.get_current_loan_id(bc)
db.renew_loan(loan_id, new_due_date)
#update_status_if_expired(loan_id)
tag_all_requests_as_done(barcode, borrower_id)
if infos == []:
infos.append(_("All loans have been renewed with success."))
loans = db.get_borrower_loans(borrower_id)
requests = db.get_borrower_requests(borrower_id)
proposals = db.get_borrower_proposals(borrower_id)
body = bc_templates.tmpl_yourloans(loans=loans, requests=requests, proposals=proposals,
borrower_id=borrower_id, infos=infos, ln=ln)
return body
def perform_loanshistoricaloverview(uid, ln=CFG_SITE_LANG):
"""
Display Loans historical overview for user uid.
@param uid: user id
@param ln: language of the page
@return body(html)
"""
invenio_user_email = db.get_invenio_user_email(uid)
borrower_id = db.get_borrower_id_by_email(invenio_user_email)
result = db.get_historical_overview(borrower_id)
body = bc_templates.tmpl_loanshistoricaloverview(result=result, ln=ln)
return body
def perform_get_holdings_information(recid, req, action="borrowal", ln=CFG_SITE_LANG):
"""
Display all the copies of an item. If the parameter action is 'proposal', display
appropriate information to the user.
@param recid: identify the record. Primary key of bibrec.
@type recid: int
@param action: Specifies whether the current record is put up to solicit acquisition
proposals(if "proposal") or not("borrowal").
@type proposal: string
@return body(html)
"""
_ = gettext_set_language(ln)
if action == "proposal":
tag = AMZ_BOOK_PUBLICATION_DATE_TAG
publication_date = record_get_field_value(get_record(recid), tag[:3],
ind1=tag[3], ind2=tag[4],
code=tag[5])
msg = ''
if publication_date:
cur_date = datetime.date.today()
try:
pub_date = time.strptime(publication_date, '%d %b %Y')
pub_date = datetime.date(pub_date[0], pub_date[1], pub_date[2])
if cur_date < pub_date:
msg += _("The publication date of this book is %s.") % (publication_date)
msg += "<br /><br />"
else:
msg += _("This book has no copies in the library. ")
except:
msg += _("This book has no copies in the library. ")
msg += _("If you think this book is interesting, suggest it and tell us why you consider this \
book is important. The library will consider your opinion and if we decide to buy the \
book, we will issue a loan for you as soon as it arrives and send it by internal mail.")
msg += "<br \><br \>"
msg += _("In case we decide not to buy the book, we will offer you an interlibrary loan")
body = bc_templates.tmpl_book_proposal_information(recid, msg, ln=ln)
else:
holdings_information = db.get_holdings_information(recid, False)
body = bc_templates.tmpl_holdings_information(recid=recid,
req=req,
holdings_info=holdings_information,
ln=ln)
return body
def perform_new_request(recid, barcode, action="borrowal", ln=CFG_SITE_LANG):
"""
Display form to be filled by the user.
@param uid: user id
@type: int
@param recid: identify the record. Primary key of bibrec.
@type recid: int
@param barcode: identify the item. Primary key of crcITEM.
@type barcode: string
@return request form
"""
body = bc_templates.tmpl_new_request(recid=recid, barcode=barcode, action=action, ln=ln)
return body
def perform_book_proposal_send(uid, recid, period_from, period_to,
remarks, ln=CFG_SITE_LANG):
"""
The subfield containing the information about the source of importation
of the record acts as the marker for the records put up for acquisition
proposals.
Register the user's book proposal, his period of interest and his remarks
in the 'ILLREQUEST' table. Add a new 'dummy' copy for the proposed book.
Create a loan(hold) request on behalf of the user for that copy and send
a confirmation e-mail to her/him.
"""
_ = gettext_set_language(ln)
user = collect_user_info(uid)
if CFG_CERN_SITE:
try:
borrower = search_user('ccid', user['external_personid'])
except:
borrower = ()
else:
borrower = search_user('email', user['email'])
if borrower != ():
if not db.has_copies(recid):
tmp_barcode = generate_tmp_barcode()
ill_register_request_with_recid(recid, uid, period_from, period_to, remarks,
conditions='register_acquisition_suggestion',
only_edition='False', barcode=tmp_barcode, ln=CFG_SITE_LANG)
db.add_new_copy(tmp_barcode, recid, library_id=CFG_BIBCIRCULATION_DEFAULT_LIBRARY_ID,
collection='', location='',
description=_("This book was suggested for acquisition"), loan_period='',
status=CFG_BIBCIRCULATION_ITEM_STATUS_UNDER_REVIEW, expected_arrival_date='')
db.delete_brief_format_cache(recid)
return perform_new_request_send_message(uid, recid, period_from, period_to, tmp_barcode,
status=CFG_BIBCIRCULATION_REQUEST_STATUS_PROPOSED,
mail_subject='Acquisition Suggestion',
mail_template='proposal_notification',
mail_remarks=remarks, ln=CFG_SITE_LANG)
return _("This item already has copies.")
else:
if CFG_CERN_SITE:
message = bc_templates.tmpl_message_request_send_fail_cern("Borrower ID not found.")
else:
message = bc_templates.tmpl_message_request_send_fail_other("Borrower ID not found.")
body = bc_templates.tmpl_new_request_send(message=message, ln=ln)
return body
def perform_new_request_send(uid, recid, period_from, period_to,
barcode, ln=CFG_SITE_LANG):
"""
@param recid: recID - Invenio record identifier
@param ln: language of the page
"""
nb_requests = 0
all_copies_on_loan = True
description = db.get_item_description(barcode)
copies = db.get_barcodes(recid, description)
for bc in copies:
nb_requests += db.get_number_requests_per_copy(bc)
if db.is_item_on_loan(bc) is None:
all_copies_on_loan = False
if nb_requests == 0:
if all_copies_on_loan:
status = CFG_BIBCIRCULATION_REQUEST_STATUS_WAITING
else:
status = CFG_BIBCIRCULATION_REQUEST_STATUS_PENDING
else:
status = CFG_BIBCIRCULATION_REQUEST_STATUS_WAITING
return perform_new_request_send_message(uid, recid, period_from, period_to, barcode,
status, mail_subject='New request',
mail_template='notification',
mail_remarks='', ln=ln)
def perform_new_request_send_message(uid, recid, period_from, period_to, barcode,
status, mail_subject, mail_template,
mail_remarks='', ln=CFG_SITE_LANG):
user = collect_user_info(uid)
if CFG_CERN_SITE:
try:
borrower = search_user('ccid', user['external_personid'])
except:
borrower = ()
else:
borrower = search_user('email', user['email'])
if borrower != ():
borrower_id = borrower[0][0]
if db.is_doc_already_requested(recid, barcode, borrower_id):
message = bc_templates.tmpl_message_send_already_requested()
return bc_templates.tmpl_new_request_send(message=message, ln=ln)
borrower_details = db.get_borrower_details(borrower_id)
(_id, ccid, name, email, _phone, address, mailbox) = borrower_details
(title, year, author,
isbn, publisher) = book_information_from_MARC(recid)
req_id = db.new_hold_request(borrower_id, recid, barcode,
period_from, period_to, status)
location = '-'
library = ''
request_date = ''
if status != CFG_BIBCIRCULATION_REQUEST_STATUS_PROPOSED:
details = db.get_loan_request_details(req_id)
if details:
library = details[3]
location = details[4]
request_date = details[7]
message_template = load_template(mail_template)
# A message to be sent to the user detailing his loan request
# or his new book proposal.
if status == CFG_BIBCIRCULATION_REQUEST_STATUS_PROPOSED:
message_for_user = message_template % (title)
else:
link_to_holdings_details = CFG_SITE_URL + \
'/record/%s/holdings' % str(recid)
message_for_user = message_template % (name, ccid, email, address,
mailbox, title, author, publisher,
year, isbn, location, library,
link_to_holdings_details, request_date)
send_email(fromaddr = CFG_BIBCIRCULATION_LOANS_EMAIL,
toaddr = email,
subject = mail_subject,
content = message_for_user,
header = '',
footer = '',
attempt_times=1,
attempt_sleeptime=10
)
if status == CFG_BIBCIRCULATION_REQUEST_STATUS_PENDING:
# A message to be sent to the librarian about the pending status.
link_to_item_request_details = CFG_SITE_URL + \
"/admin2/bibcirculation/get_item_requests_details?ln=%s&recid=%s" \
% (ln, str(recid))
message_for_librarian = message_template % (name, ccid, email, address,
mailbox, title, author, publisher,
year, isbn, location, library,
link_to_item_request_details,
request_date)
send_email(fromaddr = CFG_BIBCIRCULATION_LIBRARIAN_EMAIL,
toaddr = CFG_BIBCIRCULATION_LOANS_EMAIL,
subject = mail_subject,
content = message_for_librarian,
header = '',
footer = '',
attempt_times=1,
attempt_sleeptime=10
)
if CFG_CERN_SITE:
if status == CFG_BIBCIRCULATION_REQUEST_STATUS_PROPOSED:
message = bc_templates.tmpl_message_proposal_send_ok_cern()
else:
message = bc_templates.tmpl_message_request_send_ok_cern()
else:
if status == CFG_BIBCIRCULATION_REQUEST_STATUS_PROPOSED:
message = bc_templates.tmpl_message_proposal_send_ok_other()
else:
message = bc_templates.tmpl_message_request_send_ok_other()
else:
if CFG_CERN_SITE:
message = bc_templates.tmpl_message_request_send_fail_cern("Borrower ID not found")
else:
message = bc_templates.tmpl_message_request_send_fail_other("Borrower ID not found")
body = bc_templates.tmpl_new_request_send(message=message, ln=ln)
return body
def display_ill_form(ln=CFG_SITE_LANG):
"""
Display ILL form
@param uid: user id
@type: int
"""
body = bc_templates.tmpl_display_ill_form(infos=[], ln=ln)
return body
def ill_request_with_recid(recid, ln=CFG_SITE_LANG):
"""
Display ILL form.
@param recid: identify the record. Primary key of bibrec.
@type recid: int
@param uid: user id
@type: int
"""
body = bc_templates.tmpl_ill_request_with_recid(recid=recid,
infos=[],
ln=ln)
return body
def ill_register_request_with_recid(recid, uid, period_of_interest_from,
period_of_interest_to, additional_comments,
conditions, only_edition, barcode='',
ln=CFG_SITE_LANG):
"""
Register a new ILL request.
@param recid: identify the record. Primary key of bibrec.
@type recid: int
@param uid: user id
@type: int
@param period_of_interest_from: period of interest - from(date)
@type period_of_interest_from: string
@param period_of_interest_to: period of interest - to(date)
@type period_of_interest_to: string
"""
_ = gettext_set_language(ln)
# Create a dictionary.
book_info = "{'recid': " + str(recid) + "}"
user = collect_user_info(uid)
borrower_id = db.get_borrower_id_by_email(user['email'])
if borrower_id is None:
if CFG_CERN_SITE == 1:
result = get_user_info_from_ldap(email=user['email'])
try:
name = result['cn'][0]
except KeyError:
name = None
try:
email = result['mail'][0]
except KeyError:
email = None
try:
phone = result['telephoneNumber'][0]
except KeyError:
phone = None
try:
address = result['physicalDeliveryOfficeName'][0]
except KeyError:
address = None
try:
mailbox = result['postOfficeBox'][0]
except KeyError:
mailbox = None
try:
ccid = result['employeeID'][0]
except KeyError:
ccid = ''
if address is not None:
db.new_borrower(ccid, name, email, phone, address, mailbox, '')
else:
message = bc_templates.tmpl_message_request_send_fail_cern("Office address not available.")
else:
message = bc_templates.tmpl_message_request_send_fail_other("Office address not available.")
return bc_templates.tmpl_ill_register_request_with_recid(
message=message,
ln=ln)
address = db.get_borrower_address(user['email'])
if not address:
if CFG_CERN_SITE == 1:
email = user['email']
result = get_user_info_from_ldap(email)
try:
address = result['physicalDeliveryOfficeName'][0]
except KeyError:
address = None
if address is not None:
db.add_borrower_address(address, email)
else:
message = bc_templates.tmpl_message_request_send_fail_cern("Office address not available.")
else:
message = bc_templates.tmpl_message_request_send_fail_other("Office address not available.")
return bc_templates.tmpl_ill_register_request_with_recid(
message=message,
ln=ln)
if not conditions:
infos = []
infos.append(_("You didn't accept the ILL conditions."))
return bc_templates.tmpl_ill_request_with_recid(recid,
infos=infos,
ln=ln)
elif conditions == 'register_acquisition_suggestion':
# This ILL request entry is a book proposal.
db.ill_register_request(book_info, borrower_id,
period_of_interest_from, period_of_interest_to,
CFG_BIBCIRCULATION_PROPOSAL_STATUS_NEW,
additional_comments,
only_edition or 'False','proposal-book', barcode=barcode)
else:
db.ill_register_request(book_info, borrower_id,
period_of_interest_from, period_of_interest_to,
CFG_BIBCIRCULATION_ILL_STATUS_NEW,
additional_comments,
only_edition or 'False','book', barcode=barcode)
if CFG_CERN_SITE == 1:
message = bc_templates.tmpl_message_request_send_ok_cern()
else:
message = bc_templates.tmpl_message_request_send_ok_other()
#Notify librarian about new ILL request.
send_email(fromaddr=CFG_BIBCIRCULATION_LIBRARIAN_EMAIL,
toaddr=CFG_BIBCIRCULATION_LOANS_EMAIL,
subject='ILL request for books confirmation',
content='',
#hold_request_mail(recid=recid, borrower_id=borrower_id),
attempt_times=1,
attempt_sleeptime=10)
return bc_templates.tmpl_ill_register_request_with_recid(
message=message,
ln=ln)
def ill_register_request(uid, title, authors, place, publisher, year, edition,
isbn, period_of_interest_from, period_of_interest_to,
additional_comments, conditions, only_edition, request_type,
barcode='', ln=CFG_SITE_LANG):
"""
Register new ILL request. Create new record (collection: ILL Books)
@param uid: user id
@type: int
@param authors: book's authors
@type authors: string
@param place: place of publication
@type place: string
@param publisher: book's publisher
@type publisher: string
@param year: year of publication
@type year: string
@param edition: book's edition
@type edition: string
@param isbn: book's isbn
@type isbn: string
@param period_of_interest_from: period of interest - from(date)
@type period_of_interest_from: string
@param period_of_interest_to: period of interest - to(date)
@type period_of_interest_to: string
@param additional_comments: comments given by the user
@type additional_comments: string
@param conditions: ILL conditions
@type conditions: boolean
@param only_edition: borrower wants only the given edition
@type only_edition: boolean
"""
_ = gettext_set_language(ln)
item_info = (title, authors, place, publisher, year, edition, isbn)
create_ill_record(item_info)
book_info = {'title': title, 'authors': authors, 'place': place,
'publisher': publisher, 'year': year, 'edition': edition,
'isbn': isbn}
user = collect_user_info(uid)
borrower_id = db.get_borrower_id_by_email(user['email'])
#Check if borrower is on DB.
if borrower_id != 0:
address = db.get_borrower_address(user['email'])
#Check if borrower has an address.
if address != 0:
#Check if borrower has accepted ILL conditions.
if conditions:
#Register ILL request on crcILLREQUEST.
db.ill_register_request(book_info, borrower_id,
period_of_interest_from,
period_of_interest_to,
CFG_BIBCIRCULATION_ILL_STATUS_NEW,
additional_comments,
only_edition or 'False', request_type,
budget_code='', barcode=barcode)
#Display confirmation message.
message = _("Your ILL request has been registered and the " \
"document will be sent to you via internal mail.")
#Notify librarian about new ILL request.
send_email(fromaddr=CFG_BIBCIRCULATION_LIBRARIAN_EMAIL,
toaddr=CFG_BIBCIRCULATION_LOANS_EMAIL,
subject=_('ILL request for books confirmation'),
content="",
attempt_times=1,
attempt_sleeptime=10
)
#Borrower did not accept ILL conditions.
else:
infos = []
infos.append(_("You didn't accept the ILL conditions."))
body = bc_templates.tmpl_display_ill_form(infos=infos, ln=ln)
#Borrower doesn't have an address.
else:
#If BibCirculation at CERN, use LDAP.
if CFG_CERN_SITE == 1:
email = user['email']
result = get_user_info_from_ldap(email)
try:
ldap_address = result['physicalDeliveryOfficeName'][0]
except KeyError:
ldap_address = None
# verify address
if ldap_address is not None:
db.add_borrower_address(ldap_address, email)
db.ill_register_request(book_info, borrower_id,
period_of_interest_from,
period_of_interest_to,
CFG_BIBCIRCULATION_ILL_STATUS_NEW,
additional_comments,
only_edition or 'False',
request_type, budget_code='', barcode=barcode)
message = _("Your ILL request has been registered and" \
" the document will be sent to you via" \
" internal mail.")
send_email(fromaddr=CFG_BIBCIRCULATION_LIBRARIAN_EMAIL,
toaddr=CFG_BIBCIRCULATION_LOANS_EMAIL,
subject=_('ILL request for books confirmation'),
content="",
attempt_times=1,
attempt_sleeptime=10
)
else:
message = _("It is not possible to validate your request.")
message += ' ' + _("Your office address is not available.")
message += ' ' + _("Please contact %(contact_email)s") % \
{'contact_email': CFG_BIBCIRCULATION_LIBRARIAN_EMAIL}
else:
# Get information from CERN LDAP
if CFG_CERN_SITE == 1:
result = get_user_info_from_ldap(email=user['email'])
try:
name = result['cn'][0]
except KeyError:
name = None
try:
email = result['mail'][0]
except KeyError:
email = None
try:
phone = result['telephoneNumber'][0]
except KeyError:
phone = None
try:
address = result['physicalDeliveryOfficeName'][0]
except KeyError:
address = None
try:
mailbox = result['postOfficeBox'][0]
except KeyError:
mailbox = None
try:
ccid = result['employeeID'][0]
except KeyError:
ccid = ''
# verify address
if address is not None:
db.new_borrower(ccid, name, email, phone, address, mailbox, '')
borrower_id = db.get_borrower_id_by_email(email)
db.ill_register_request(book_info, borrower_id,
period_of_interest_from,
period_of_interest_to,
CFG_BIBCIRCULATION_ILL_STATUS_NEW,
additional_comments,
only_edition or 'False',
request_type, budget_code='', barcode=barcode)
message = _("Your ILL request has been registered and" \
" the document will be sent to you via" \
" internal mail.")
send_email(fromaddr=CFG_BIBCIRCULATION_LIBRARIAN_EMAIL,
toaddr=CFG_BIBCIRCULATION_LOANS_EMAIL,
subject='ILL request for books confirmation',
content="",
attempt_times=1,
attempt_sleeptime=10
)
else:
message = _("It is not possible to validate your request.")
message += ' ' + _("Your office address is not available.")
message += ' ' + _("Please contact %(contact_email)s") % \
{'contact_email': CFG_BIBCIRCULATION_LIBRARIAN_EMAIL}
body = bc_templates.tmpl__with_recid(message=message,
ln=ln)
return body
| gpl-2.0 |
slevenhagen/odoo-npg | addons/hr_evaluation/report/hr_evaluation_report.py | 313 | 4181 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import tools
from openerp.osv import fields, osv
class hr_evaluation_report(osv.Model):
_name = "hr.evaluation.report"
_description = "Evaluations Statistics"
_auto = False
_columns = {
'create_date': fields.datetime('Create Date', readonly=True),
'delay_date': fields.float('Delay to Start', digits=(16, 2), readonly=True),
'overpass_delay': fields.float('Overpassed Deadline', digits=(16, 2), readonly=True),
'deadline': fields.date("Deadline", readonly=True),
'request_id': fields.many2one('survey.user_input', 'Request ID', readonly=True),
'closed': fields.date("Close Date", readonly=True), # TDE FIXME master: rename into date_close
'plan_id': fields.many2one('hr_evaluation.plan', 'Plan', readonly=True),
'employee_id': fields.many2one('hr.employee', "Employee", readonly=True),
'rating': fields.selection([
('0', 'Significantly bellow expectations'),
('1', 'Did not meet expectations'),
('2', 'Meet expectations'),
('3', 'Exceeds expectations'),
('4', 'Significantly exceeds expectations'),
], "Overall Rating", readonly=True),
'nbr': fields.integer('# of Requests', readonly=True), # TDE FIXME master: rename into nbr_requests
'state': fields.selection([
('draft', 'Draft'),
('wait', 'Plan In Progress'),
('progress', 'Final Validation'),
('done', 'Done'),
('cancel', 'Cancelled'),
], 'Status', readonly=True),
}
_order = 'create_date desc'
_depends = {
'hr.evaluation.interview': ['evaluation_id', 'id', 'request_id'],
'hr_evaluation.evaluation': [
'create_date', 'date', 'date_close', 'employee_id', 'plan_id',
'rating', 'state',
],
}
def init(self, cr):
tools.drop_view_if_exists(cr, 'hr_evaluation_report')
cr.execute("""
create or replace view hr_evaluation_report as (
select
min(l.id) as id,
s.create_date as create_date,
s.employee_id,
l.request_id,
s.plan_id,
s.rating,
s.date as deadline,
s.date_close as closed,
count(l.*) as nbr,
s.state,
avg(extract('epoch' from age(s.create_date,CURRENT_DATE)))/(3600*24) as delay_date,
avg(extract('epoch' from age(s.date,CURRENT_DATE)))/(3600*24) as overpass_delay
from
hr_evaluation_interview l
LEFT JOIN
hr_evaluation_evaluation s on (s.id=l.evaluation_id)
GROUP BY
s.create_date,
s.state,
s.employee_id,
s.date,
s.date_close,
l.request_id,
s.rating,
s.plan_id
)
""")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
redhat-openstack/neutron | neutron/tests/unit/cisco/n1kv/test_n1kv_db.py | 19 | 43201 | # Copyright 2013 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from six import moves
from sqlalchemy.orm import exc as s_exc
from testtools import matchers
from neutron.common import exceptions as n_exc
from neutron import context
from neutron.db import api as db
from neutron.db import common_db_mixin
from neutron.plugins.cisco.common import cisco_constants as c_const
from neutron.plugins.cisco.common import cisco_exceptions as c_exc
from neutron.plugins.cisco.db import n1kv_db_v2
from neutron.plugins.cisco.db import n1kv_models_v2
from neutron.tests.unit import test_db_plugin as test_plugin
from neutron.tests.unit import testlib_api
PHYS_NET = 'physnet1'
PHYS_NET_2 = 'physnet2'
VLAN_MIN = 10
VLAN_MAX = 19
VXLAN_MIN = 5000
VXLAN_MAX = 5009
SEGMENT_RANGE = '200-220'
SEGMENT_RANGE_MIN_OVERLAP = '210-230'
SEGMENT_RANGE_MAX_OVERLAP = '190-209'
SEGMENT_RANGE_OVERLAP = '190-230'
TEST_NETWORK_ID = 'abcdefghijklmnopqrstuvwxyz'
TEST_NETWORK_ID2 = 'abcdefghijklmnopqrstuvwxy2'
TEST_NETWORK_ID3 = 'abcdefghijklmnopqrstuvwxy3'
TEST_NETWORK_PROFILE = {'name': 'test_profile',
'segment_type': c_const.NETWORK_TYPE_VLAN,
'physical_network': 'physnet1',
'segment_range': '10-19'}
TEST_NETWORK_PROFILE_2 = {'name': 'test_profile_2',
'segment_type': c_const.NETWORK_TYPE_VLAN,
'physical_network': 'physnet1',
'segment_range': SEGMENT_RANGE}
TEST_NETWORK_PROFILE_VXLAN = {'name': 'test_profile',
'segment_type': c_const.NETWORK_TYPE_OVERLAY,
'sub_type': c_const.NETWORK_SUBTYPE_NATIVE_VXLAN,
'segment_range': '5000-5009',
'multicast_ip_range': '239.0.0.70-239.0.0.80'}
TEST_POLICY_PROFILE = {'id': '4a417990-76fb-11e2-bcfd-0800200c9a66',
'name': 'test_policy_profile'}
TEST_NETWORK_PROFILE_MULTI_SEGMENT = {'name': 'test_profile',
'segment_type':
c_const.NETWORK_TYPE_MULTI_SEGMENT}
TEST_NETWORK_PROFILE_VLAN_TRUNK = {'name': 'test_profile',
'segment_type': c_const.NETWORK_TYPE_TRUNK,
'sub_type': c_const.NETWORK_TYPE_VLAN}
TEST_NETWORK_PROFILE_VXLAN_TRUNK = {'name': 'test_profile',
'segment_type': c_const.NETWORK_TYPE_TRUNK,
'sub_type': c_const.NETWORK_TYPE_OVERLAY}
def _create_test_network_profile_if_not_there(session,
profile=TEST_NETWORK_PROFILE):
try:
_profile = session.query(n1kv_models_v2.NetworkProfile).filter_by(
name=profile['name']).one()
except s_exc.NoResultFound:
_profile = n1kv_db_v2.create_network_profile(session, profile)
return _profile
def _create_test_policy_profile_if_not_there(session,
profile=TEST_POLICY_PROFILE):
try:
_profile = session.query(n1kv_models_v2.PolicyProfile).filter_by(
name=profile['name']).one()
except s_exc.NoResultFound:
_profile = n1kv_db_v2.create_policy_profile(profile)
return _profile
class VlanAllocationsTest(testlib_api.SqlTestCase):
def setUp(self):
super(VlanAllocationsTest, self).setUp()
self.session = db.get_session()
self.net_p = _create_test_network_profile_if_not_there(self.session)
n1kv_db_v2.sync_vlan_allocations(self.session, self.net_p)
def test_sync_vlan_allocations_outside_segment_range(self):
self.assertRaises(c_exc.VlanIDNotFound,
n1kv_db_v2.get_vlan_allocation,
self.session,
PHYS_NET,
VLAN_MIN - 1)
self.assertRaises(c_exc.VlanIDNotFound,
n1kv_db_v2.get_vlan_allocation,
self.session,
PHYS_NET,
VLAN_MAX + 1)
self.assertRaises(c_exc.VlanIDNotFound,
n1kv_db_v2.get_vlan_allocation,
self.session,
PHYS_NET_2,
VLAN_MIN + 20)
self.assertRaises(c_exc.VlanIDNotFound,
n1kv_db_v2.get_vlan_allocation,
self.session,
PHYS_NET_2,
VLAN_MIN + 20)
self.assertRaises(c_exc.VlanIDNotFound,
n1kv_db_v2.get_vlan_allocation,
self.session,
PHYS_NET_2,
VLAN_MAX + 20)
def test_sync_vlan_allocations_unallocated_vlans(self):
self.assertFalse(n1kv_db_v2.get_vlan_allocation(self.session,
PHYS_NET,
VLAN_MIN).allocated)
self.assertFalse(n1kv_db_v2.get_vlan_allocation(self.session,
PHYS_NET,
VLAN_MIN + 1).
allocated)
self.assertFalse(n1kv_db_v2.get_vlan_allocation(self.session,
PHYS_NET,
VLAN_MAX - 1).
allocated)
self.assertFalse(n1kv_db_v2.get_vlan_allocation(self.session,
PHYS_NET,
VLAN_MAX).allocated)
def test_vlan_pool(self):
vlan_ids = set()
for x in moves.xrange(VLAN_MIN, VLAN_MAX + 1):
(physical_network, seg_type,
vlan_id, m_ip) = n1kv_db_v2.reserve_vlan(self.session, self.net_p)
self.assertEqual(physical_network, PHYS_NET)
self.assertThat(vlan_id, matchers.GreaterThan(VLAN_MIN - 1))
self.assertThat(vlan_id, matchers.LessThan(VLAN_MAX + 1))
vlan_ids.add(vlan_id)
self.assertRaises(n_exc.NoNetworkAvailable,
n1kv_db_v2.reserve_vlan,
self.session,
self.net_p)
n1kv_db_v2.release_vlan(self.session, PHYS_NET, vlan_ids.pop())
physical_network, seg_type, vlan_id, m_ip = (n1kv_db_v2.reserve_vlan(
self.session, self.net_p))
self.assertEqual(physical_network, PHYS_NET)
self.assertThat(vlan_id, matchers.GreaterThan(VLAN_MIN - 1))
self.assertThat(vlan_id, matchers.LessThan(VLAN_MAX + 1))
vlan_ids.add(vlan_id)
for vlan_id in vlan_ids:
n1kv_db_v2.release_vlan(self.session, PHYS_NET, vlan_id)
def test_specific_vlan_inside_pool(self):
vlan_id = VLAN_MIN + 5
self.assertFalse(n1kv_db_v2.get_vlan_allocation(self.session,
PHYS_NET,
vlan_id).allocated)
n1kv_db_v2.reserve_specific_vlan(self.session, PHYS_NET, vlan_id)
self.assertTrue(n1kv_db_v2.get_vlan_allocation(self.session,
PHYS_NET,
vlan_id).allocated)
self.assertRaises(n_exc.VlanIdInUse,
n1kv_db_v2.reserve_specific_vlan,
self.session,
PHYS_NET,
vlan_id)
n1kv_db_v2.release_vlan(self.session, PHYS_NET, vlan_id)
self.assertFalse(n1kv_db_v2.get_vlan_allocation(self.session,
PHYS_NET,
vlan_id).allocated)
def test_specific_vlan_outside_pool(self):
vlan_id = VLAN_MAX + 5
self.assertRaises(c_exc.VlanIDNotFound,
n1kv_db_v2.get_vlan_allocation,
self.session,
PHYS_NET,
vlan_id)
self.assertRaises(c_exc.VlanIDOutsidePool,
n1kv_db_v2.reserve_specific_vlan,
self.session,
PHYS_NET,
vlan_id)
class VxlanAllocationsTest(testlib_api.SqlTestCase,
n1kv_db_v2.NetworkProfile_db_mixin):
def setUp(self):
super(VxlanAllocationsTest, self).setUp()
self.session = db.get_session()
self.net_p = _create_test_network_profile_if_not_there(
self.session, TEST_NETWORK_PROFILE_VXLAN)
n1kv_db_v2.sync_vxlan_allocations(self.session, self.net_p)
def test_sync_vxlan_allocations_outside_segment_range(self):
self.assertRaises(c_exc.VxlanIDNotFound,
n1kv_db_v2.get_vxlan_allocation,
self.session,
VXLAN_MIN - 1)
self.assertRaises(c_exc.VxlanIDNotFound,
n1kv_db_v2.get_vxlan_allocation,
self.session,
VXLAN_MAX + 1)
def test_sync_vxlan_allocations_unallocated_vxlans(self):
self.assertFalse(n1kv_db_v2.get_vxlan_allocation(self.session,
VXLAN_MIN).allocated)
self.assertFalse(n1kv_db_v2.get_vxlan_allocation(self.session,
VXLAN_MIN + 1).
allocated)
self.assertFalse(n1kv_db_v2.get_vxlan_allocation(self.session,
VXLAN_MAX - 1).
allocated)
self.assertFalse(n1kv_db_v2.get_vxlan_allocation(self.session,
VXLAN_MAX).allocated)
def test_vxlan_pool(self):
vxlan_ids = set()
for x in moves.xrange(VXLAN_MIN, VXLAN_MAX + 1):
vxlan = n1kv_db_v2.reserve_vxlan(self.session, self.net_p)
vxlan_id = vxlan[2]
self.assertThat(vxlan_id, matchers.GreaterThan(VXLAN_MIN - 1))
self.assertThat(vxlan_id, matchers.LessThan(VXLAN_MAX + 1))
vxlan_ids.add(vxlan_id)
self.assertRaises(n_exc.NoNetworkAvailable,
n1kv_db_v2.reserve_vxlan,
self.session,
self.net_p)
n1kv_db_v2.release_vxlan(self.session, vxlan_ids.pop())
vxlan = n1kv_db_v2.reserve_vxlan(self.session, self.net_p)
vxlan_id = vxlan[2]
self.assertThat(vxlan_id, matchers.GreaterThan(VXLAN_MIN - 1))
self.assertThat(vxlan_id, matchers.LessThan(VXLAN_MAX + 1))
vxlan_ids.add(vxlan_id)
for vxlan_id in vxlan_ids:
n1kv_db_v2.release_vxlan(self.session, vxlan_id)
n1kv_db_v2.delete_network_profile(self.session, self.net_p.id)
def test_specific_vxlan_inside_pool(self):
vxlan_id = VXLAN_MIN + 5
self.assertFalse(n1kv_db_v2.get_vxlan_allocation(self.session,
vxlan_id).allocated)
n1kv_db_v2.reserve_specific_vxlan(self.session, vxlan_id)
self.assertTrue(n1kv_db_v2.get_vxlan_allocation(self.session,
vxlan_id).allocated)
self.assertRaises(c_exc.VxlanIDInUse,
n1kv_db_v2.reserve_specific_vxlan,
self.session,
vxlan_id)
n1kv_db_v2.release_vxlan(self.session, vxlan_id)
self.assertFalse(n1kv_db_v2.get_vxlan_allocation(self.session,
vxlan_id).allocated)
def test_specific_vxlan_outside_pool(self):
vxlan_id = VXLAN_MAX + 5
self.assertRaises(c_exc.VxlanIDNotFound,
n1kv_db_v2.get_vxlan_allocation,
self.session,
vxlan_id)
self.assertRaises(c_exc.VxlanIDOutsidePool,
n1kv_db_v2.reserve_specific_vxlan,
self.session,
vxlan_id)
class NetworkBindingsTest(test_plugin.NeutronDbPluginV2TestCase):
def setUp(self):
super(NetworkBindingsTest, self).setUp()
self.session = db.get_session()
def test_add_network_binding(self):
with self.network() as network:
TEST_NETWORK_ID = network['network']['id']
self.assertRaises(c_exc.NetworkBindingNotFound,
n1kv_db_v2.get_network_binding,
self.session,
TEST_NETWORK_ID)
p = _create_test_network_profile_if_not_there(self.session)
n1kv_db_v2.add_network_binding(
self.session, TEST_NETWORK_ID, c_const.NETWORK_TYPE_VLAN,
PHYS_NET, 1234, '0.0.0.0', p.id, None)
binding = n1kv_db_v2.get_network_binding(
self.session, TEST_NETWORK_ID)
self.assertIsNotNone(binding)
self.assertEqual(binding.network_id, TEST_NETWORK_ID)
self.assertEqual(binding.network_type, c_const.NETWORK_TYPE_VLAN)
self.assertEqual(binding.physical_network, PHYS_NET)
self.assertEqual(binding.segmentation_id, 1234)
def test_create_multi_segment_network(self):
with self.network() as network:
TEST_NETWORK_ID = network['network']['id']
self.assertRaises(c_exc.NetworkBindingNotFound,
n1kv_db_v2.get_network_binding,
self.session,
TEST_NETWORK_ID)
p = _create_test_network_profile_if_not_there(
self.session,
TEST_NETWORK_PROFILE_MULTI_SEGMENT)
n1kv_db_v2.add_network_binding(
self.session, TEST_NETWORK_ID,
c_const.NETWORK_TYPE_MULTI_SEGMENT,
None, 0, '0.0.0.0', p.id, None)
binding = n1kv_db_v2.get_network_binding(
self.session, TEST_NETWORK_ID)
self.assertIsNotNone(binding)
self.assertEqual(binding.network_id, TEST_NETWORK_ID)
self.assertEqual(binding.network_type,
c_const.NETWORK_TYPE_MULTI_SEGMENT)
self.assertIsNone(binding.physical_network)
self.assertEqual(binding.segmentation_id, 0)
def test_add_multi_segment_binding(self):
with self.network() as network:
TEST_NETWORK_ID = network['network']['id']
self.assertRaises(c_exc.NetworkBindingNotFound,
n1kv_db_v2.get_network_binding,
self.session,
TEST_NETWORK_ID)
p = _create_test_network_profile_if_not_there(
self.session,
TEST_NETWORK_PROFILE_MULTI_SEGMENT)
n1kv_db_v2.add_network_binding(
self.session, TEST_NETWORK_ID,
c_const.NETWORK_TYPE_MULTI_SEGMENT,
None, 0, '0.0.0.0', p.id,
[(TEST_NETWORK_ID2, TEST_NETWORK_ID3)])
binding = n1kv_db_v2.get_network_binding(
self.session, TEST_NETWORK_ID)
self.assertIsNotNone(binding)
self.assertEqual(binding.network_id, TEST_NETWORK_ID)
self.assertEqual(binding.network_type,
c_const.NETWORK_TYPE_MULTI_SEGMENT)
self.assertIsNone(binding.physical_network)
self.assertEqual(binding.segmentation_id, 0)
ms_binding = (n1kv_db_v2.get_multi_segment_network_binding(
self.session, TEST_NETWORK_ID,
(TEST_NETWORK_ID2, TEST_NETWORK_ID3)))
self.assertIsNotNone(ms_binding)
self.assertEqual(ms_binding.multi_segment_id, TEST_NETWORK_ID)
self.assertEqual(ms_binding.segment1_id, TEST_NETWORK_ID2)
self.assertEqual(ms_binding.segment2_id, TEST_NETWORK_ID3)
ms_members = (n1kv_db_v2.get_multi_segment_members(
self.session, TEST_NETWORK_ID))
self.assertEqual(ms_members,
[(TEST_NETWORK_ID2, TEST_NETWORK_ID3)])
self.assertTrue(n1kv_db_v2.is_multi_segment_member(
self.session, TEST_NETWORK_ID2))
self.assertTrue(n1kv_db_v2.is_multi_segment_member(
self.session, TEST_NETWORK_ID3))
n1kv_db_v2.del_multi_segment_binding(
self.session, TEST_NETWORK_ID,
[(TEST_NETWORK_ID2, TEST_NETWORK_ID3)])
ms_members = (n1kv_db_v2.get_multi_segment_members(
self.session, TEST_NETWORK_ID))
self.assertEqual(ms_members, [])
def test_create_vlan_trunk_network(self):
with self.network() as network:
TEST_NETWORK_ID = network['network']['id']
self.assertRaises(c_exc.NetworkBindingNotFound,
n1kv_db_v2.get_network_binding,
self.session,
TEST_NETWORK_ID)
p = _create_test_network_profile_if_not_there(
self.session,
TEST_NETWORK_PROFILE_VLAN_TRUNK)
n1kv_db_v2.add_network_binding(
self.session, TEST_NETWORK_ID, c_const.NETWORK_TYPE_TRUNK,
None, 0, '0.0.0.0', p.id, None)
binding = n1kv_db_v2.get_network_binding(
self.session, TEST_NETWORK_ID)
self.assertIsNotNone(binding)
self.assertEqual(binding.network_id, TEST_NETWORK_ID)
self.assertEqual(binding.network_type, c_const.NETWORK_TYPE_TRUNK)
self.assertIsNone(binding.physical_network)
self.assertEqual(binding.segmentation_id, 0)
def test_create_vxlan_trunk_network(self):
with self.network() as network:
TEST_NETWORK_ID = network['network']['id']
self.assertRaises(c_exc.NetworkBindingNotFound,
n1kv_db_v2.get_network_binding,
self.session,
TEST_NETWORK_ID)
p = _create_test_network_profile_if_not_there(
self.session,
TEST_NETWORK_PROFILE_VXLAN_TRUNK)
n1kv_db_v2.add_network_binding(
self.session, TEST_NETWORK_ID, c_const.NETWORK_TYPE_TRUNK,
None, 0, '0.0.0.0', p.id, None)
binding = n1kv_db_v2.get_network_binding(
self.session, TEST_NETWORK_ID)
self.assertIsNotNone(binding)
self.assertEqual(binding.network_id, TEST_NETWORK_ID)
self.assertEqual(binding.network_type, c_const.NETWORK_TYPE_TRUNK)
self.assertIsNone(binding.physical_network)
self.assertEqual(binding.segmentation_id, 0)
def test_add_vlan_trunk_binding(self):
with self.network() as network1:
with self.network() as network2:
TEST_NETWORK_ID = network1['network']['id']
self.assertRaises(c_exc.NetworkBindingNotFound,
n1kv_db_v2.get_network_binding,
self.session,
TEST_NETWORK_ID)
TEST_NETWORK_ID2 = network2['network']['id']
self.assertRaises(c_exc.NetworkBindingNotFound,
n1kv_db_v2.get_network_binding,
self.session,
TEST_NETWORK_ID2)
p_v = _create_test_network_profile_if_not_there(self.session)
n1kv_db_v2.add_network_binding(
self.session, TEST_NETWORK_ID2, c_const.NETWORK_TYPE_VLAN,
PHYS_NET, 1234, '0.0.0.0', p_v.id, None)
p = _create_test_network_profile_if_not_there(
self.session,
TEST_NETWORK_PROFILE_VLAN_TRUNK)
n1kv_db_v2.add_network_binding(
self.session, TEST_NETWORK_ID, c_const.NETWORK_TYPE_TRUNK,
None, 0, '0.0.0.0', p.id, [(TEST_NETWORK_ID2, 0)])
binding = n1kv_db_v2.get_network_binding(
self.session, TEST_NETWORK_ID)
self.assertIsNotNone(binding)
self.assertEqual(binding.network_id, TEST_NETWORK_ID)
self.assertEqual(binding.network_type,
c_const.NETWORK_TYPE_TRUNK)
self.assertEqual(binding.physical_network, PHYS_NET)
self.assertEqual(binding.segmentation_id, 0)
t_binding = (n1kv_db_v2.get_trunk_network_binding(
self.session, TEST_NETWORK_ID,
(TEST_NETWORK_ID2, 0)))
self.assertIsNotNone(t_binding)
self.assertEqual(t_binding.trunk_segment_id, TEST_NETWORK_ID)
self.assertEqual(t_binding.segment_id, TEST_NETWORK_ID2)
self.assertEqual(t_binding.dot1qtag, '0')
t_members = (n1kv_db_v2.get_trunk_members(
self.session, TEST_NETWORK_ID))
self.assertEqual(t_members,
[(TEST_NETWORK_ID2, '0')])
self.assertTrue(n1kv_db_v2.is_trunk_member(
self.session, TEST_NETWORK_ID2))
n1kv_db_v2.del_trunk_segment_binding(
self.session, TEST_NETWORK_ID,
[(TEST_NETWORK_ID2, '0')])
t_members = (n1kv_db_v2.get_multi_segment_members(
self.session, TEST_NETWORK_ID))
self.assertEqual(t_members, [])
def test_add_vxlan_trunk_binding(self):
with self.network() as network1:
with self.network() as network2:
TEST_NETWORK_ID = network1['network']['id']
self.assertRaises(c_exc.NetworkBindingNotFound,
n1kv_db_v2.get_network_binding,
self.session,
TEST_NETWORK_ID)
TEST_NETWORK_ID2 = network2['network']['id']
self.assertRaises(c_exc.NetworkBindingNotFound,
n1kv_db_v2.get_network_binding,
self.session,
TEST_NETWORK_ID2)
p_v = _create_test_network_profile_if_not_there(
self.session, TEST_NETWORK_PROFILE_VXLAN_TRUNK)
n1kv_db_v2.add_network_binding(
self.session, TEST_NETWORK_ID2,
c_const.NETWORK_TYPE_OVERLAY,
None, 5100, '224.10.10.10', p_v.id, None)
p = _create_test_network_profile_if_not_there(
self.session,
TEST_NETWORK_PROFILE_VXLAN_TRUNK)
n1kv_db_v2.add_network_binding(
self.session, TEST_NETWORK_ID, c_const.NETWORK_TYPE_TRUNK,
None, 0, '0.0.0.0', p.id,
[(TEST_NETWORK_ID2, 5)])
binding = n1kv_db_v2.get_network_binding(
self.session, TEST_NETWORK_ID)
self.assertIsNotNone(binding)
self.assertEqual(binding.network_id, TEST_NETWORK_ID)
self.assertEqual(binding.network_type,
c_const.NETWORK_TYPE_TRUNK)
self.assertIsNone(binding.physical_network)
self.assertEqual(binding.segmentation_id, 0)
t_binding = (n1kv_db_v2.get_trunk_network_binding(
self.session, TEST_NETWORK_ID,
(TEST_NETWORK_ID2, '5')))
self.assertIsNotNone(t_binding)
self.assertEqual(t_binding.trunk_segment_id, TEST_NETWORK_ID)
self.assertEqual(t_binding.segment_id, TEST_NETWORK_ID2)
self.assertEqual(t_binding.dot1qtag, '5')
t_members = (n1kv_db_v2.get_trunk_members(
self.session, TEST_NETWORK_ID))
self.assertEqual(t_members,
[(TEST_NETWORK_ID2, '5')])
self.assertTrue(n1kv_db_v2.is_trunk_member(
self.session, TEST_NETWORK_ID2))
n1kv_db_v2.del_trunk_segment_binding(
self.session, TEST_NETWORK_ID,
[(TEST_NETWORK_ID2, '5')])
t_members = (n1kv_db_v2.get_multi_segment_members(
self.session, TEST_NETWORK_ID))
self.assertEqual(t_members, [])
class NetworkProfileTests(testlib_api.SqlTestCase,
n1kv_db_v2.NetworkProfile_db_mixin):
def setUp(self):
super(NetworkProfileTests, self).setUp()
self.session = db.get_session()
def test_create_network_profile(self):
_db_profile = n1kv_db_v2.create_network_profile(self.session,
TEST_NETWORK_PROFILE)
self.assertIsNotNone(_db_profile)
db_profile = (self.session.query(n1kv_models_v2.NetworkProfile).
filter_by(name=TEST_NETWORK_PROFILE['name']).one())
self.assertIsNotNone(db_profile)
self.assertEqual(_db_profile.id, db_profile.id)
self.assertEqual(_db_profile.name, db_profile.name)
self.assertEqual(_db_profile.segment_type, db_profile.segment_type)
self.assertEqual(_db_profile.segment_range, db_profile.segment_range)
self.assertEqual(_db_profile.multicast_ip_index,
db_profile.multicast_ip_index)
self.assertEqual(_db_profile.multicast_ip_range,
db_profile.multicast_ip_range)
n1kv_db_v2.delete_network_profile(self.session, _db_profile.id)
def test_create_multi_segment_network_profile(self):
_db_profile = (n1kv_db_v2.create_network_profile(
self.session, TEST_NETWORK_PROFILE_MULTI_SEGMENT))
self.assertIsNotNone(_db_profile)
db_profile = (
self.session.query(
n1kv_models_v2.NetworkProfile).filter_by(
name=TEST_NETWORK_PROFILE_MULTI_SEGMENT['name'])
.one())
self.assertIsNotNone(db_profile)
self.assertEqual(_db_profile.id, db_profile.id)
self.assertEqual(_db_profile.name, db_profile.name)
self.assertEqual(_db_profile.segment_type, db_profile.segment_type)
self.assertEqual(_db_profile.segment_range, db_profile.segment_range)
self.assertEqual(_db_profile.multicast_ip_index,
db_profile.multicast_ip_index)
self.assertEqual(_db_profile.multicast_ip_range,
db_profile.multicast_ip_range)
n1kv_db_v2.delete_network_profile(self.session, _db_profile.id)
def test_create_vlan_trunk_network_profile(self):
_db_profile = (n1kv_db_v2.create_network_profile(
self.session, TEST_NETWORK_PROFILE_VLAN_TRUNK))
self.assertIsNotNone(_db_profile)
db_profile = (self.session.query(n1kv_models_v2.NetworkProfile).
filter_by(name=TEST_NETWORK_PROFILE_VLAN_TRUNK['name']).
one())
self.assertIsNotNone(db_profile)
self.assertEqual(_db_profile.id, db_profile.id)
self.assertEqual(_db_profile.name, db_profile.name)
self.assertEqual(_db_profile.segment_type, db_profile.segment_type)
self.assertEqual(_db_profile.segment_range, db_profile.segment_range)
self.assertEqual(_db_profile.multicast_ip_index,
db_profile.multicast_ip_index)
self.assertEqual(_db_profile.multicast_ip_range,
db_profile.multicast_ip_range)
self.assertEqual(_db_profile.sub_type, db_profile.sub_type)
n1kv_db_v2.delete_network_profile(self.session, _db_profile.id)
def test_create_vxlan_trunk_network_profile(self):
_db_profile = (n1kv_db_v2.create_network_profile(
self.session, TEST_NETWORK_PROFILE_VXLAN_TRUNK))
self.assertIsNotNone(_db_profile)
db_profile = (self.session.query(n1kv_models_v2.NetworkProfile).
filter_by(name=TEST_NETWORK_PROFILE_VXLAN_TRUNK['name']).
one())
self.assertIsNotNone(db_profile)
self.assertEqual(_db_profile.id, db_profile.id)
self.assertEqual(_db_profile.name, db_profile.name)
self.assertEqual(_db_profile.segment_type, db_profile.segment_type)
self.assertEqual(_db_profile.segment_range, db_profile.segment_range)
self.assertEqual(_db_profile.multicast_ip_index,
db_profile.multicast_ip_index)
self.assertEqual(_db_profile.multicast_ip_range,
db_profile.multicast_ip_range)
self.assertEqual(_db_profile.sub_type, db_profile.sub_type)
n1kv_db_v2.delete_network_profile(self.session, _db_profile.id)
def test_create_network_profile_overlap(self):
_db_profile = n1kv_db_v2.create_network_profile(self.session,
TEST_NETWORK_PROFILE_2)
ctx = context.get_admin_context()
TEST_NETWORK_PROFILE_2['name'] = 'net-profile-min-overlap'
TEST_NETWORK_PROFILE_2['segment_range'] = SEGMENT_RANGE_MIN_OVERLAP
test_net_profile = {'network_profile': TEST_NETWORK_PROFILE_2}
self.assertRaises(n_exc.InvalidInput,
self.create_network_profile,
ctx,
test_net_profile)
TEST_NETWORK_PROFILE_2['name'] = 'net-profile-max-overlap'
TEST_NETWORK_PROFILE_2['segment_range'] = SEGMENT_RANGE_MAX_OVERLAP
test_net_profile = {'network_profile': TEST_NETWORK_PROFILE_2}
self.assertRaises(n_exc.InvalidInput,
self.create_network_profile,
ctx,
test_net_profile)
TEST_NETWORK_PROFILE_2['name'] = 'net-profile-overlap'
TEST_NETWORK_PROFILE_2['segment_range'] = SEGMENT_RANGE_OVERLAP
test_net_profile = {'network_profile': TEST_NETWORK_PROFILE_2}
self.assertRaises(n_exc.InvalidInput,
self.create_network_profile,
ctx,
test_net_profile)
n1kv_db_v2.delete_network_profile(self.session, _db_profile.id)
def test_delete_network_profile(self):
try:
profile = (self.session.query(n1kv_models_v2.NetworkProfile).
filter_by(name=TEST_NETWORK_PROFILE['name']).one())
except s_exc.NoResultFound:
profile = n1kv_db_v2.create_network_profile(self.session,
TEST_NETWORK_PROFILE)
n1kv_db_v2.delete_network_profile(self.session, profile.id)
try:
self.session.query(n1kv_models_v2.NetworkProfile).filter_by(
name=TEST_NETWORK_PROFILE['name']).one()
except s_exc.NoResultFound:
pass
else:
self.fail("Network Profile (%s) was not deleted" %
TEST_NETWORK_PROFILE['name'])
def test_update_network_profile(self):
TEST_PROFILE_1 = {'name': 'test_profile_1'}
profile = _create_test_network_profile_if_not_there(self.session)
updated_profile = n1kv_db_v2.update_network_profile(self.session,
profile.id,
TEST_PROFILE_1)
self.assertEqual(updated_profile.name, TEST_PROFILE_1['name'])
n1kv_db_v2.delete_network_profile(self.session, profile.id)
def test_get_network_profile(self):
profile = n1kv_db_v2.create_network_profile(self.session,
TEST_NETWORK_PROFILE)
got_profile = n1kv_db_v2.get_network_profile(self.session, profile.id)
self.assertEqual(profile.id, got_profile.id)
self.assertEqual(profile.name, got_profile.name)
n1kv_db_v2.delete_network_profile(self.session, profile.id)
def test_get_network_profiles(self):
test_profiles = [{'name': 'test_profile1',
'segment_type': c_const.NETWORK_TYPE_VLAN,
'physical_network': 'phys1',
'segment_range': '200-210'},
{'name': 'test_profile2',
'segment_type': c_const.NETWORK_TYPE_VLAN,
'physical_network': 'phys1',
'segment_range': '211-220'},
{'name': 'test_profile3',
'segment_type': c_const.NETWORK_TYPE_VLAN,
'physical_network': 'phys1',
'segment_range': '221-230'},
{'name': 'test_profile4',
'segment_type': c_const.NETWORK_TYPE_VLAN,
'physical_network': 'phys1',
'segment_range': '231-240'},
{'name': 'test_profile5',
'segment_type': c_const.NETWORK_TYPE_VLAN,
'physical_network': 'phys1',
'segment_range': '241-250'},
{'name': 'test_profile6',
'segment_type': c_const.NETWORK_TYPE_VLAN,
'physical_network': 'phys1',
'segment_range': '251-260'},
{'name': 'test_profile7',
'segment_type': c_const.NETWORK_TYPE_VLAN,
'physical_network': 'phys1',
'segment_range': '261-270'}]
[n1kv_db_v2.create_network_profile(self.session, p)
for p in test_profiles]
# TODO(abhraut): Fix this test to work with real tenant_td
profiles = n1kv_db_v2._get_network_profiles(db_session=self.session)
self.assertEqual(len(test_profiles), len(list(profiles)))
class PolicyProfileTests(testlib_api.SqlTestCase):
def setUp(self):
super(PolicyProfileTests, self).setUp()
self.session = db.get_session()
def test_create_policy_profile(self):
_db_profile = n1kv_db_v2.create_policy_profile(TEST_POLICY_PROFILE)
self.assertIsNotNone(_db_profile)
db_profile = (self.session.query(n1kv_models_v2.PolicyProfile).
filter_by(name=TEST_POLICY_PROFILE['name']).one)()
self.assertIsNotNone(db_profile)
self.assertTrue(_db_profile.id == db_profile.id)
self.assertTrue(_db_profile.name == db_profile.name)
def test_delete_policy_profile(self):
profile = _create_test_policy_profile_if_not_there(self.session)
n1kv_db_v2.delete_policy_profile(profile.id)
try:
self.session.query(n1kv_models_v2.PolicyProfile).filter_by(
name=TEST_POLICY_PROFILE['name']).one()
except s_exc.NoResultFound:
pass
else:
self.fail("Policy Profile (%s) was not deleted" %
TEST_POLICY_PROFILE['name'])
def test_update_policy_profile(self):
TEST_PROFILE_1 = {'name': 'test_profile_1'}
profile = _create_test_policy_profile_if_not_there(self.session)
updated_profile = n1kv_db_v2.update_policy_profile(self.session,
profile.id,
TEST_PROFILE_1)
self.assertEqual(updated_profile.name, TEST_PROFILE_1['name'])
def test_get_policy_profile(self):
profile = _create_test_policy_profile_if_not_there(self.session)
got_profile = n1kv_db_v2.get_policy_profile(self.session, profile.id)
self.assertEqual(profile.id, got_profile.id)
self.assertEqual(profile.name, got_profile.name)
class ProfileBindingTests(testlib_api.SqlTestCase,
n1kv_db_v2.NetworkProfile_db_mixin,
common_db_mixin.CommonDbMixin):
def setUp(self):
super(ProfileBindingTests, self).setUp()
self.session = db.get_session()
def _create_test_binding_if_not_there(self, tenant_id, profile_id,
profile_type):
try:
_binding = (self.session.query(n1kv_models_v2.ProfileBinding).
filter_by(profile_type=profile_type,
tenant_id=tenant_id,
profile_id=profile_id).one())
except s_exc.NoResultFound:
_binding = n1kv_db_v2.create_profile_binding(self.session,
tenant_id,
profile_id,
profile_type)
return _binding
def test_create_profile_binding(self):
test_tenant_id = "d434dd90-76ec-11e2-bcfd-0800200c9a66"
test_profile_id = "dd7b9741-76ec-11e2-bcfd-0800200c9a66"
test_profile_type = "network"
n1kv_db_v2.create_profile_binding(self.session,
test_tenant_id,
test_profile_id,
test_profile_type)
try:
self.session.query(n1kv_models_v2.ProfileBinding).filter_by(
profile_type=test_profile_type,
tenant_id=test_tenant_id,
profile_id=test_profile_id).one()
except s_exc.MultipleResultsFound:
self.fail("Bindings must be unique")
except s_exc.NoResultFound:
self.fail("Could not create Profile Binding")
def test_update_profile_binding(self):
test_tenant_id = "d434dd90-76ec-11e2-bcfd-0800200c9a66"
test_profile_id = "dd7b9741-76ec-11e2-bcfd-0800200c9a66"
test_profile_type = "network"
n1kv_db_v2.create_profile_binding(self.session,
test_tenant_id,
test_profile_id,
test_profile_type)
new_tenants = ['d434dd90-76ec-11e2-bcfd-0800200c9a67',
'd434dd90-76ec-11e2-bcfd-0800200c9a68',
'd434dd90-76ec-11e2-bcfd-0800200c9a69']
n1kv_db_v2.update_profile_binding(self.session,
test_profile_id,
new_tenants,
test_profile_type)
result = self.session.query(n1kv_models_v2.ProfileBinding).filter_by(
profile_type=test_profile_type,
profile_id=test_profile_id).all()
self.assertEqual(3, len(result))
def test_get_profile_binding(self):
test_tenant_id = "d434dd90-76ec-11e2-bcfd-0800200c9a66"
test_profile_id = "dd7b9741-76ec-11e2-bcfd-0800200c9a66"
test_profile_type = "network"
self._create_test_binding_if_not_there(test_tenant_id,
test_profile_id,
test_profile_type)
binding = n1kv_db_v2.get_profile_binding(self.session,
test_tenant_id,
test_profile_id)
self.assertEqual(binding.tenant_id, test_tenant_id)
self.assertEqual(binding.profile_id, test_profile_id)
self.assertEqual(binding.profile_type, test_profile_type)
def test_get_profile_binding_not_found(self):
self.assertRaises(
c_exc.ProfileTenantBindingNotFound,
n1kv_db_v2.get_profile_binding, self.session, "123", "456")
def test_delete_profile_binding(self):
test_tenant_id = "d434dd90-76ec-11e2-bcfd-0800200c9a66"
test_profile_id = "dd7b9741-76ec-11e2-bcfd-0800200c9a66"
test_profile_type = "network"
self._create_test_binding_if_not_there(test_tenant_id,
test_profile_id,
test_profile_type)
n1kv_db_v2.delete_profile_binding(self.session,
test_tenant_id,
test_profile_id)
q = (self.session.query(n1kv_models_v2.ProfileBinding).filter_by(
profile_type=test_profile_type,
tenant_id=test_tenant_id,
profile_id=test_profile_id))
self.assertFalse(q.count())
def test_default_tenant_replace(self):
ctx = context.get_admin_context()
ctx.tenant_id = "d434dd90-76ec-11e2-bcfd-0800200c9a66"
test_profile_id = "AAAAAAAA-76ec-11e2-bcfd-0800200c9a66"
test_profile_type = "policy"
n1kv_db_v2.create_profile_binding(self.session,
c_const.TENANT_ID_NOT_SET,
test_profile_id,
test_profile_type)
network_profile = {"network_profile": TEST_NETWORK_PROFILE}
self.create_network_profile(ctx, network_profile)
binding = n1kv_db_v2.get_profile_binding(self.session,
ctx.tenant_id,
test_profile_id)
self.assertRaises(
c_exc.ProfileTenantBindingNotFound,
n1kv_db_v2.get_profile_binding,
self.session,
c_const.TENANT_ID_NOT_SET,
test_profile_id)
self.assertNotEqual(binding.tenant_id,
c_const.TENANT_ID_NOT_SET)
| apache-2.0 |
tux-00/ansible | lib/ansible/modules/network/panos/panos_cert_gen_ssh.py | 49 | 6394 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Ansible module to manage PaloAltoNetworks Firewall
# (c) 2016, techbizdev <techbizdev@paloaltonetworks.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: panos_cert_gen_ssh
short_description: generates a self-signed certificate using SSH protocol with SSH key
description:
- This module generates a self-signed certificate that can be used by GlobalProtect client, SSL connector, or
- otherwise. Root certificate must be preset on the system first. This module depends on paramiko for ssh.
author: "Luigi Mori (@jtschichold), Ivan Bojer (@ivanbojer)"
version_added: "2.3"
requirements:
- paramiko
notes:
- Checkmode is not supported.
options:
ip_address:
description:
- IP address (or hostname) of PAN-OS device being configured.
required: true
default: null
key_filename:
description:
- Location of the filename that is used for the auth. Either I(key_filename) or I(password) is required.
required: true
default: null
password:
description:
- Password credentials to use for auth. Either I(key_filename) or I(password) is required.
required: true
default: null
cert_friendly_name:
description:
- Human friendly certificate name (not CN but just a friendly name).
required: true
default: null
cert_cn:
description:
- Certificate CN (common name) embeded in the certificate signature.
required: true
default: null
signed_by:
description:
- Undersigning authority (CA) that MUST already be presents on the device.
required: true
default: null
rsa_nbits:
description:
- Number of bits used by the RSA algorithm for the certificate generation.
required: false
default: "2048"
'''
EXAMPLES = '''
# Generates a new self-signed certificate using ssh
- name: generate self signed certificate
panos_cert_gen_ssh:
ip_address: "192.168.1.1"
password: "paloalto"
cert_cn: "1.1.1.1"
cert_friendly_name: "test123"
signed_by: "root-ca"
'''
RETURN='''
# Default return values
'''
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import get_exception
import time
try:
import paramiko
HAS_LIB=True
except ImportError:
HAS_LIB=False
_PROMPTBUFF = 4096
def wait_with_timeout(module, shell, prompt, timeout=60):
now = time.time()
result = ""
while True:
if shell.recv_ready():
result += shell.recv(_PROMPTBUFF)
endresult = result.strip()
if len(endresult) != 0 and endresult[-1] == prompt:
break
if time.time()-now > timeout:
module.fail_json(msg="Timeout waiting for prompt")
return result
def generate_cert(module, ip_address, key_filename, password,
cert_cn, cert_friendly_name, signed_by, rsa_nbits ):
stdout = ""
client = paramiko.SSHClient()
# add policy to accept all host keys, I haven't found
# a way to retreive the instance SSH key fingerprint from AWS
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
if not key_filename:
client.connect(ip_address, username="admin", password=password)
else:
client.connect(ip_address, username="admin", key_filename=key_filename)
shell = client.invoke_shell()
# wait for the shell to start
buff = wait_with_timeout(module, shell, ">")
stdout += buff
# generate self-signed certificate
if isinstance(cert_cn, list):
cert_cn = cert_cn[0]
cmd = 'request certificate generate signed-by {0} certificate-name {1} name {2} algorithm RSA rsa-nbits {3}\n'.format(
signed_by, cert_friendly_name, cert_cn, rsa_nbits)
shell.send(cmd)
# wait for the shell to complete
buff = wait_with_timeout(module, shell, ">")
stdout += buff
# exit
shell.send('exit\n')
if 'Success' not in buff:
module.fail_json(msg="Error generating self signed certificate: "+stdout)
client.close()
return stdout
def main():
argument_spec = dict(
ip_address=dict(required=True),
key_filename=dict(),
password=dict(no_log=True),
cert_cn=dict(required=True),
cert_friendly_name=dict(required=True),
rsa_nbits=dict(default='2048'),
signed_by=dict(required=True)
)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False,
required_one_of=[['key_filename', 'password']])
if not HAS_LIB:
module.fail_json(msg='paramiko is required for this module')
ip_address = module.params["ip_address"]
key_filename = module.params["key_filename"]
password = module.params["password"]
cert_cn = module.params["cert_cn"]
cert_friendly_name = module.params["cert_friendly_name"]
signed_by = module.params["signed_by"]
rsa_nbits = module.params["rsa_nbits"]
try:
stdout = generate_cert(module,
ip_address,
key_filename,
password,
cert_cn,
cert_friendly_name,
signed_by,
rsa_nbits)
except Exception:
exc = get_exception()
module.fail_json(msg=exc.message)
module.exit_json(changed=True, msg="okey dokey")
if __name__ == '__main__':
main()
| gpl-3.0 |
pipsiscool/audacity | lib-src/lv2/lv2/plugins/eg03-metro.lv2/waflib/Tools/errcheck.py | 331 | 5821 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
typos={'feature':'features','sources':'source','targets':'target','include':'includes','export_include':'export_includes','define':'defines','importpath':'includes','installpath':'install_path','iscopy':'is_copy',}
meths_typos=['__call__','program','shlib','stlib','objects']
from waflib import Logs,Build,Node,Task,TaskGen,ConfigSet,Errors,Utils
import waflib.Tools.ccroot
def check_same_targets(self):
mp=Utils.defaultdict(list)
uids={}
def check_task(tsk):
if not isinstance(tsk,Task.Task):
return
for node in tsk.outputs:
mp[node].append(tsk)
try:
uids[tsk.uid()].append(tsk)
except KeyError:
uids[tsk.uid()]=[tsk]
for g in self.groups:
for tg in g:
try:
for tsk in tg.tasks:
check_task(tsk)
except AttributeError:
check_task(tg)
dupe=False
for(k,v)in mp.items():
if len(v)>1:
dupe=True
msg='* Node %r is created more than once%s. The task generators are:'%(k,Logs.verbose==1 and" (full message on 'waf -v -v')"or"")
Logs.error(msg)
for x in v:
if Logs.verbose>1:
Logs.error(' %d. %r'%(1+v.index(x),x.generator))
else:
Logs.error(' %d. %r in %r'%(1+v.index(x),x.generator.name,getattr(x.generator,'path',None)))
if not dupe:
for(k,v)in uids.items():
if len(v)>1:
Logs.error('* Several tasks use the same identifier. Please check the information on\n http://docs.waf.googlecode.com/git/apidocs_16/Task.html#waflib.Task.Task.uid')
for tsk in v:
Logs.error(' - object %r (%r) defined in %r'%(tsk.__class__.__name__,tsk,tsk.generator))
def check_invalid_constraints(self):
feat=set([])
for x in list(TaskGen.feats.values()):
feat.union(set(x))
for(x,y)in TaskGen.task_gen.prec.items():
feat.add(x)
feat.union(set(y))
ext=set([])
for x in TaskGen.task_gen.mappings.values():
ext.add(x.__name__)
invalid=ext&feat
if invalid:
Logs.error('The methods %r have invalid annotations: @extension <-> @feature/@before_method/@after_method'%list(invalid))
for cls in list(Task.classes.values()):
for x in('before','after'):
for y in Utils.to_list(getattr(cls,x,[])):
if not Task.classes.get(y,None):
Logs.error('Erroneous order constraint %r=%r on task class %r'%(x,y,cls.__name__))
if getattr(cls,'rule',None):
Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")'%cls.__name__)
def replace(m):
oldcall=getattr(Build.BuildContext,m)
def call(self,*k,**kw):
ret=oldcall(self,*k,**kw)
for x in typos:
if x in kw:
if x=='iscopy'and'subst'in getattr(self,'features',''):
continue
err=True
Logs.error('Fix the typo %r -> %r on %r'%(x,typos[x],ret))
return ret
setattr(Build.BuildContext,m,call)
def enhance_lib():
for m in meths_typos:
replace(m)
def ant_glob(self,*k,**kw):
if k:
lst=Utils.to_list(k[0])
for pat in lst:
if'..'in pat.split('/'):
Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'"%k[0])
if kw.get('remove',True):
try:
if self.is_child_of(self.ctx.bldnode)and not kw.get('quiet',False):
Logs.error('Using ant_glob on the build folder (%r) is dangerous (quiet=True to disable this warning)'%self)
except AttributeError:
pass
return self.old_ant_glob(*k,**kw)
Node.Node.old_ant_glob=Node.Node.ant_glob
Node.Node.ant_glob=ant_glob
old=Task.is_before
def is_before(t1,t2):
ret=old(t1,t2)
if ret and old(t2,t1):
Logs.error('Contradictory order constraints in classes %r %r'%(t1,t2))
return ret
Task.is_before=is_before
def check_err_features(self):
lst=self.to_list(self.features)
if'shlib'in lst:
Logs.error('feature shlib -> cshlib, dshlib or cxxshlib')
for x in('c','cxx','d','fc'):
if not x in lst and lst and lst[0]in[x+y for y in('program','shlib','stlib')]:
Logs.error('%r features is probably missing %r'%(self,x))
TaskGen.feature('*')(check_err_features)
def check_err_order(self):
if not hasattr(self,'rule')and not'subst'in Utils.to_list(self.features):
for x in('before','after','ext_in','ext_out'):
if hasattr(self,x):
Logs.warn('Erroneous order constraint %r on non-rule based task generator %r'%(x,self))
else:
for x in('before','after'):
for y in self.to_list(getattr(self,x,[])):
if not Task.classes.get(y,None):
Logs.error('Erroneous order constraint %s=%r on %r (no such class)'%(x,y,self))
TaskGen.feature('*')(check_err_order)
def check_compile(self):
check_invalid_constraints(self)
try:
ret=self.orig_compile()
finally:
check_same_targets(self)
return ret
Build.BuildContext.orig_compile=Build.BuildContext.compile
Build.BuildContext.compile=check_compile
def use_rec(self,name,**kw):
try:
y=self.bld.get_tgen_by_name(name)
except Errors.WafError:
pass
else:
idx=self.bld.get_group_idx(self)
odx=self.bld.get_group_idx(y)
if odx>idx:
msg="Invalid 'use' across build groups:"
if Logs.verbose>1:
msg+='\n target %r\n uses:\n %r'%(self,y)
else:
msg+=" %r uses %r (try 'waf -v -v' for the full error)"%(self.name,name)
raise Errors.WafError(msg)
self.orig_use_rec(name,**kw)
TaskGen.task_gen.orig_use_rec=TaskGen.task_gen.use_rec
TaskGen.task_gen.use_rec=use_rec
def getattri(self,name,default=None):
if name=='append'or name=='add':
raise Errors.WafError('env.append and env.add do not exist: use env.append_value/env.append_unique')
elif name=='prepend':
raise Errors.WafError('env.prepend does not exist: use env.prepend_value')
if name in self.__slots__:
return object.__getattr__(self,name,default)
else:
return self[name]
ConfigSet.ConfigSet.__getattr__=getattri
def options(opt):
enhance_lib()
def configure(conf):
pass
| mit |
alanjw/GreenOpenERP-Win-X86 | openerp/addons/crm_partner_assign/report/crm_lead_report.py | 53 | 6327 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields,osv
from openerp import tools
from openerp.addons.crm import crm
AVAILABLE_STATES = [
('draft','Draft'),
('open','Open'),
('cancel', 'Cancelled'),
('done', 'Closed'),
('pending','Pending')
]
class crm_lead_report_assign(osv.osv):
""" CRM Lead Report """
_name = "crm.lead.report.assign"
_auto = False
_description = "CRM Lead Report"
_columns = {
'year': fields.char('Year', size=64, required=False, readonly=True),
'partner_assigned_id':fields.many2one('res.partner', 'Partner', readonly=True),
'grade_id':fields.many2one('res.partner.grade', 'Grade', readonly=True),
'user_id':fields.many2one('res.users', 'User', readonly=True),
'country_id':fields.many2one('res.country', 'Country', readonly=True),
'section_id':fields.many2one('crm.case.section', 'Sales Team', readonly=True),
'state': fields.selection(AVAILABLE_STATES, 'Status', size=16, readonly=True),
'month':fields.selection([('01', 'January'), ('02', 'February'), \
('03', 'March'), ('04', 'April'),\
('05', 'May'), ('06', 'June'), \
('07', 'July'), ('08', 'August'),\
('09', 'September'), ('10', 'October'),\
('11', 'November'), ('12', 'December')], 'Month', readonly=True),
'company_id': fields.many2one('res.company', 'Company', readonly=True),
'date_assign': fields.date('Partner Date', readonly=True),
'create_date': fields.datetime('Create Date', readonly=True),
'day': fields.char('Day', size=128, readonly=True),
'delay_open': fields.float('Delay to Open',digits=(16,2),readonly=True, group_operator="avg",help="Number of Days to open the case"),
'delay_close': fields.float('Delay to Close',digits=(16,2),readonly=True, group_operator="avg",help="Number of Days to close the case"),
'delay_expected': fields.float('Overpassed Deadline',digits=(16,2),readonly=True, group_operator="avg"),
'probability': fields.float('Avg Probability',digits=(16,2),readonly=True, group_operator="avg"),
'probability_max': fields.float('Max Probability',digits=(16,2),readonly=True, group_operator="max"),
'planned_revenue': fields.float('Planned Revenue',digits=(16,2),readonly=True),
'probable_revenue': fields.float('Probable Revenue', digits=(16,2),readonly=True),
'stage_id': fields.many2one ('crm.case.stage', 'Stage', domain="[('section_ids', '=', section_id)]"),
'partner_id': fields.many2one('res.partner', 'Customer' , readonly=True),
'opening_date': fields.date('Opening Date', readonly=True),
'creation_date': fields.date('Creation Date', readonly=True),
'date_closed': fields.date('Close Date', readonly=True),
'nbr': fields.integer('# of Cases', readonly=True),
'company_id': fields.many2one('res.company', 'Company', readonly=True),
'priority': fields.selection(crm.AVAILABLE_PRIORITIES, 'Priority'),
'type':fields.selection([
('lead','Lead'),
('opportunity','Opportunity'),
],'Type', help="Type is used to separate Leads and Opportunities"),
}
def init(self, cr):
"""
CRM Lead Report
@param cr: the current row, from the database cursor
"""
tools.drop_view_if_exists(cr, 'crm_lead_report_assign')
cr.execute("""
CREATE OR REPLACE VIEW crm_lead_report_assign AS (
SELECT
c.id,
to_char(c.date_assign, 'YYYY') as year,
to_char(c.date_assign, 'MM') as month,
to_char(c.date_assign, 'YYYY-MM-DD') as day,
to_char(c.create_date, 'YYYY-MM-DD') as creation_date,
to_char(c.date_open, 'YYYY-MM-DD') as opening_date,
to_char(c.date_closed, 'YYYY-mm-dd') as date_closed,
c.state,
c.date_assign,
c.user_id,
c.probability,
c.probability as probability_max,
c.stage_id,
c.type,
c.company_id,
c.priority,
c.section_id,
c.partner_id,
c.country_id,
c.planned_revenue,
c.partner_assigned_id,
p.grade_id,
p.date as partner_date,
c.planned_revenue*(c.probability/100) as probable_revenue,
1 as nbr,
date_trunc('day',c.create_date) as create_date,
extract('epoch' from (c.date_closed-c.create_date))/(3600*24) as delay_close,
extract('epoch' from (c.date_deadline - c.date_closed))/(3600*24) as delay_expected,
extract('epoch' from (c.date_open-c.create_date))/(3600*24) as delay_open
FROM
crm_lead c
left join res_partner p on (c.partner_assigned_id=p.id)
)""")
crm_lead_report_assign()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
hlin117/scikit-learn | sklearn/tests/test_multioutput.py | 23 | 12429 | from __future__ import division
import numpy as np
import scipy.sparse as sp
from sklearn.utils import shuffle
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_false
from sklearn.utils.testing import assert_raises_regex
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_not_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.exceptions import NotFittedError
from sklearn import datasets
from sklearn.base import clone
from sklearn.ensemble import GradientBoostingRegressor, RandomForestClassifier
from sklearn.linear_model import Lasso
from sklearn.linear_model import SGDClassifier
from sklearn.linear_model import SGDRegressor
from sklearn.linear_model import LogisticRegression
from sklearn.svm import LinearSVC
from sklearn.multiclass import OneVsRestClassifier
from sklearn.multioutput import MultiOutputRegressor, MultiOutputClassifier
def test_multi_target_regression():
X, y = datasets.make_regression(n_targets=3)
X_train, y_train = X[:50], y[:50]
X_test, y_test = X[50:], y[50:]
references = np.zeros_like(y_test)
for n in range(3):
rgr = GradientBoostingRegressor(random_state=0)
rgr.fit(X_train, y_train[:, n])
references[:, n] = rgr.predict(X_test)
rgr = MultiOutputRegressor(GradientBoostingRegressor(random_state=0))
rgr.fit(X_train, y_train)
y_pred = rgr.predict(X_test)
assert_almost_equal(references, y_pred)
def test_multi_target_regression_partial_fit():
X, y = datasets.make_regression(n_targets=3)
X_train, y_train = X[:50], y[:50]
X_test, y_test = X[50:], y[50:]
references = np.zeros_like(y_test)
half_index = 25
for n in range(3):
sgr = SGDRegressor(random_state=0)
sgr.partial_fit(X_train[:half_index], y_train[:half_index, n])
sgr.partial_fit(X_train[half_index:], y_train[half_index:, n])
references[:, n] = sgr.predict(X_test)
sgr = MultiOutputRegressor(SGDRegressor(random_state=0))
sgr.partial_fit(X_train[:half_index], y_train[:half_index])
sgr.partial_fit(X_train[half_index:], y_train[half_index:])
y_pred = sgr.predict(X_test)
assert_almost_equal(references, y_pred)
def test_multi_target_regression_one_target():
# Test multi target regression raises
X, y = datasets.make_regression(n_targets=1)
rgr = MultiOutputRegressor(GradientBoostingRegressor(random_state=0))
assert_raises(ValueError, rgr.fit, X, y)
def test_multi_target_sparse_regression():
X, y = datasets.make_regression(n_targets=3)
X_train, y_train = X[:50], y[:50]
X_test = X[50:]
for sparse in [sp.csr_matrix, sp.csc_matrix, sp.coo_matrix, sp.dok_matrix,
sp.lil_matrix]:
rgr = MultiOutputRegressor(Lasso(random_state=0))
rgr_sparse = MultiOutputRegressor(Lasso(random_state=0))
rgr.fit(X_train, y_train)
rgr_sparse.fit(sparse(X_train), y_train)
assert_almost_equal(rgr.predict(X_test),
rgr_sparse.predict(sparse(X_test)))
def test_multi_target_sample_weights_api():
X = [[1, 2, 3], [4, 5, 6]]
y = [[3.141, 2.718], [2.718, 3.141]]
w = [0.8, 0.6]
rgr = MultiOutputRegressor(Lasso())
assert_raises_regex(ValueError, "does not support sample weights",
rgr.fit, X, y, w)
# no exception should be raised if the base estimator supports weights
rgr = MultiOutputRegressor(GradientBoostingRegressor(random_state=0))
rgr.fit(X, y, w)
def test_multi_target_sample_weight_partial_fit():
# weighted regressor
X = [[1, 2, 3], [4, 5, 6]]
y = [[3.141, 2.718], [2.718, 3.141]]
w = [2., 1.]
rgr_w = MultiOutputRegressor(SGDRegressor(random_state=0))
rgr_w.partial_fit(X, y, w)
# weighted with different weights
w = [2., 2.]
rgr = MultiOutputRegressor(SGDRegressor(random_state=0))
rgr.partial_fit(X, y, w)
assert_not_equal(rgr.predict(X)[0][0], rgr_w.predict(X)[0][0])
def test_multi_target_sample_weights():
# weighted regressor
Xw = [[1, 2, 3], [4, 5, 6]]
yw = [[3.141, 2.718], [2.718, 3.141]]
w = [2., 1.]
rgr_w = MultiOutputRegressor(GradientBoostingRegressor(random_state=0))
rgr_w.fit(Xw, yw, w)
# unweighted, but with repeated samples
X = [[1, 2, 3], [1, 2, 3], [4, 5, 6]]
y = [[3.141, 2.718], [3.141, 2.718], [2.718, 3.141]]
rgr = MultiOutputRegressor(GradientBoostingRegressor(random_state=0))
rgr.fit(X, y)
X_test = [[1.5, 2.5, 3.5], [3.5, 4.5, 5.5]]
assert_almost_equal(rgr.predict(X_test), rgr_w.predict(X_test))
# Import the data
iris = datasets.load_iris()
# create a multiple targets by randomized shuffling and concatenating y.
X = iris.data
y1 = iris.target
y2 = shuffle(y1, random_state=1)
y3 = shuffle(y1, random_state=2)
y = np.column_stack((y1, y2, y3))
n_samples, n_features = X.shape
n_outputs = y.shape[1]
n_classes = len(np.unique(y1))
classes = list(map(np.unique, (y1, y2, y3)))
def test_multi_output_classification_partial_fit_parallelism():
sgd_linear_clf = SGDClassifier(loss='log', random_state=1)
mor = MultiOutputClassifier(sgd_linear_clf, n_jobs=-1)
mor.partial_fit(X, y, classes)
est1 = mor.estimators_[0]
mor.partial_fit(X, y)
est2 = mor.estimators_[0]
# parallelism requires this to be the case for a sane implementation
assert_false(est1 is est2)
def test_multi_output_classification_partial_fit():
# test if multi_target initializes correctly with base estimator and fit
# assert predictions work as expected for predict
sgd_linear_clf = SGDClassifier(loss='log', random_state=1)
multi_target_linear = MultiOutputClassifier(sgd_linear_clf)
# train the multi_target_linear and also get the predictions.
half_index = X.shape[0] // 2
multi_target_linear.partial_fit(
X[:half_index], y[:half_index], classes=classes)
first_predictions = multi_target_linear.predict(X)
assert_equal((n_samples, n_outputs), first_predictions.shape)
multi_target_linear.partial_fit(X[half_index:], y[half_index:])
second_predictions = multi_target_linear.predict(X)
assert_equal((n_samples, n_outputs), second_predictions.shape)
# train the linear classification with each column and assert that
# predictions are equal after first partial_fit and second partial_fit
for i in range(3):
# create a clone with the same state
sgd_linear_clf = clone(sgd_linear_clf)
sgd_linear_clf.partial_fit(
X[:half_index], y[:half_index, i], classes=classes[i])
assert_array_equal(sgd_linear_clf.predict(X), first_predictions[:, i])
sgd_linear_clf.partial_fit(X[half_index:], y[half_index:, i])
assert_array_equal(sgd_linear_clf.predict(X), second_predictions[:, i])
def test_mutli_output_classifiation_partial_fit_no_first_classes_exception():
sgd_linear_clf = SGDClassifier(loss='log', random_state=1)
multi_target_linear = MultiOutputClassifier(sgd_linear_clf)
assert_raises_regex(ValueError, "classes must be passed on the first call "
"to partial_fit.",
multi_target_linear.partial_fit, X, y)
def test_multi_output_classification():
# test if multi_target initializes correctly with base estimator and fit
# assert predictions work as expected for predict, prodict_proba and score
forest = RandomForestClassifier(n_estimators=10, random_state=1)
multi_target_forest = MultiOutputClassifier(forest)
# train the multi_target_forest and also get the predictions.
multi_target_forest.fit(X, y)
predictions = multi_target_forest.predict(X)
assert_equal((n_samples, n_outputs), predictions.shape)
predict_proba = multi_target_forest.predict_proba(X)
assert len(predict_proba) == n_outputs
for class_probabilities in predict_proba:
assert_equal((n_samples, n_classes), class_probabilities.shape)
assert_array_equal(np.argmax(np.dstack(predict_proba), axis=1),
predictions)
# train the forest with each column and assert that predictions are equal
for i in range(3):
forest_ = clone(forest) # create a clone with the same state
forest_.fit(X, y[:, i])
assert_equal(list(forest_.predict(X)), list(predictions[:, i]))
assert_array_equal(list(forest_.predict_proba(X)),
list(predict_proba[i]))
def test_multiclass_multioutput_estimator():
# test to check meta of meta estimators
svc = LinearSVC(random_state=0)
multi_class_svc = OneVsRestClassifier(svc)
multi_target_svc = MultiOutputClassifier(multi_class_svc)
multi_target_svc.fit(X, y)
predictions = multi_target_svc.predict(X)
assert_equal((n_samples, n_outputs), predictions.shape)
# train the forest with each column and assert that predictions are equal
for i in range(3):
multi_class_svc_ = clone(multi_class_svc) # create a clone
multi_class_svc_.fit(X, y[:, i])
assert_equal(list(multi_class_svc_.predict(X)),
list(predictions[:, i]))
def test_multiclass_multioutput_estimator_predict_proba():
seed = 542
# make test deterministic
rng = np.random.RandomState(seed)
# random features
X = rng.normal(size=(5, 5))
# random labels
y1 = np.array(['b', 'a', 'a', 'b', 'a']).reshape(5, 1) # 2 classes
y2 = np.array(['d', 'e', 'f', 'e', 'd']).reshape(5, 1) # 3 classes
Y = np.concatenate([y1, y2], axis=1)
clf = MultiOutputClassifier(LogisticRegression(random_state=seed))
clf.fit(X, Y)
y_result = clf.predict_proba(X)
y_actual = [np.array([[0.23481764, 0.76518236],
[0.67196072, 0.32803928],
[0.54681448, 0.45318552],
[0.34883923, 0.65116077],
[0.73687069, 0.26312931]]),
np.array([[0.5171785, 0.23878628, 0.24403522],
[0.22141451, 0.64102704, 0.13755846],
[0.16751315, 0.18256843, 0.64991843],
[0.27357372, 0.55201592, 0.17441036],
[0.65745193, 0.26062899, 0.08191907]])]
for i in range(len(y_actual)):
assert_almost_equal(y_result[i], y_actual[i])
def test_multi_output_classification_sample_weights():
# weighted classifier
Xw = [[1, 2, 3], [4, 5, 6]]
yw = [[3, 2], [2, 3]]
w = np.asarray([2., 1.])
forest = RandomForestClassifier(n_estimators=10, random_state=1)
clf_w = MultiOutputClassifier(forest)
clf_w.fit(Xw, yw, w)
# unweighted, but with repeated samples
X = [[1, 2, 3], [1, 2, 3], [4, 5, 6]]
y = [[3, 2], [3, 2], [2, 3]]
forest = RandomForestClassifier(n_estimators=10, random_state=1)
clf = MultiOutputClassifier(forest)
clf.fit(X, y)
X_test = [[1.5, 2.5, 3.5], [3.5, 4.5, 5.5]]
assert_almost_equal(clf.predict(X_test), clf_w.predict(X_test))
def test_multi_output_classification_partial_fit_sample_weights():
# weighted classifier
Xw = [[1, 2, 3], [4, 5, 6], [1.5, 2.5, 3.5]]
yw = [[3, 2], [2, 3], [3, 2]]
w = np.asarray([2., 1., 1.])
sgd_linear_clf = SGDClassifier(random_state=1)
clf_w = MultiOutputClassifier(sgd_linear_clf)
clf_w.fit(Xw, yw, w)
# unweighted, but with repeated samples
X = [[1, 2, 3], [1, 2, 3], [4, 5, 6], [1.5, 2.5, 3.5]]
y = [[3, 2], [3, 2], [2, 3], [3, 2]]
sgd_linear_clf = SGDClassifier(random_state=1)
clf = MultiOutputClassifier(sgd_linear_clf)
clf.fit(X, y)
X_test = [[1.5, 2.5, 3.5]]
assert_array_almost_equal(clf.predict(X_test), clf_w.predict(X_test))
def test_multi_output_exceptions():
# NotFittedError when fit is not done but score, predict and
# and predict_proba are called
moc = MultiOutputClassifier(LinearSVC(random_state=0))
assert_raises(NotFittedError, moc.predict, y)
assert_raises(NotFittedError, moc.predict_proba, y)
assert_raises(NotFittedError, moc.score, X, y)
# ValueError when number of outputs is different
# for fit and score
y_new = np.column_stack((y1, y2))
moc.fit(X, y)
assert_raises(ValueError, moc.score, X, y_new)
| bsd-3-clause |
hn8841182/20150623-test02 | static/editor.py | 92 | 5584 | # -*- coding: utf-8 -*-
import sys
import time
import traceback
import dis
from browser import document as doc, window,alert
# set height of container to 66% of screen
_height = doc.documentElement.clientHeight
_s = doc['container']
_s.style.height = '%spx' % int(_height*0.66)
has_ace = True
try:
editor = window.ace.edit("editor")
session = editor.getSession()
session.setMode("ace/mode/python")
editor.setOptions({
'width': '390px;',
'enableLiveAutocompletion': True,
'enableSnippets': True,
'highlightActiveLine': False,
'highlightSelectedWord': True
})
except:
from browser import html
editor = html.TEXTAREA(rows=20,cols=70)
doc["editor"] <= editor
def get_value(): return editor.value
def set_value(x):editor.value=x
editor.getValue = get_value
editor.setValue = set_value
has_ace = False
if sys.has_local_storage:
from browser.local_storage import storage
else:
storage = None
if 'set_debug' in doc:
__BRYTHON__.debug = int(doc['set_debug'].checked)
def reset_src():
if storage is not None and "py_src" in storage:
editor.setValue(storage["py_src"])
else:
editor.setValue('''#coding: utf-8
# 猜數字遊戲
import random
標準答案 = random.randint(1, 100)
print(標準答案)
你猜的數字 = int(input("請輸入您所猜的整數:"))
猜測次數 = 1
while 標準答案 != 你猜的數字:
if 標準答案 < 你猜的數字:
print("太大了,再猜一次 :)加油")
else:
print("太小了,再猜一次 :)加油")
你猜的數字 = int(input("請輸入您所猜的整數:"))
猜測次數 += 1
print("猜對了!總共猜了", 猜測次數, "次")
''')
editor.scrollToRow(0)
editor.gotoLine(0)
def reset_src_area():
if storage and "py_src" in storage:
editor.value = storage["py_src"]
else:
editor.setValue('''#coding: utf-8
# 猜數字遊戲
import random
標準答案 = random.randint(1, 100)
print(標準答案)
你猜的數字 = int(input("請輸入您所猜的整數:"))
猜測次數 = 1
while 標準答案 != 你猜的數字:
if 標準答案 < 你猜的數字:
print("太大了,再猜一次 :)加油")
else:
print("太小了,再猜一次 :)加油")
你猜的數字 = int(input("請輸入您所猜的整數:"))
猜測次數 += 1
print("猜對了!總共猜了", 猜測次數, "次")
''')
editor.scrollToRow(0)
editor.gotoLine(0)
def reset_src_area():
if storage and "py_src" in storage:
editor.value = storage["py_src"]
else:
editor.value = '''#coding: utf-8
# 猜數字遊戲
import random
標準答案 = random.randint(1, 100)
print(標準答案)
你猜的數字 = int(input("請輸入您所猜的整數:"))
猜測次數 = 1
while 標準答案 != 你猜的數字:
if 標準答案 < 你猜的數字:
print("太大了,再猜一次 :)加油")
else:
print("太小了,再猜一次 :)加油")
你猜的數字 = int(input("請輸入您所猜的整數:"))
猜測次數 += 1
print("猜對了!總共猜了", 猜測次數, "次")
'''
def write(data):
doc["console"].value += '%s' % data
#sys.stdout = object() #not needed when importing sys via src/Lib/sys.py
sys.stdout.write = write
#sys.stderr = object() # ditto
sys.stderr.write = write
def to_str(xx):
return str(xx)
info = sys.implementation.version
# Yen
#doc['version'].text = '%s.%s.%s' %(info.major,info.minor,info.micro)
output = ''
def show_console(ev):
doc["console"].value = output
doc["console"].cols = 60
def run(in_globals=False):
global output
doc["console"].value=''
src = editor.getValue()
if storage is not None:
storage["py_src"]=src
t0 = time.perf_counter()
try:
if(in_globals):
exec(src)
else:
ns = {}
exec(src,ns)
state = 1
except Exception as exc:
traceback.print_exc()
state = 0
output = doc["console"].value
print('<completed in %6.2f ms>' % ((time.perf_counter()-t0)*1000.0))
return state
# load a Python script
def load_script(evt):
_name=evt.target.value+'?foo=%s' %time.time()
editor.setValue(open(_name).read())
def show_js(ev):
src = editor.getValue()
doc["console"].value = dis.dis(src)
# Yen defined
def clear_text(ev):
editor.setValue('')
if sys.has_local_storage:
storage["py_src"]=''
doc["console"].value=''
def clear_src(ev):
editor.setValue('')
if sys.has_local_storage:
storage["py_src"]=''
def clear_canvas(ev):
canvas = doc["plotarea"]
ctx = canvas.getContext("2d")
# Store the current transformation matrix
ctx.save();
# Use the identity matrix while clearing the canvas
ctx.setTransform(1, 0, 0, 1, 0, 0);
ctx.clearRect(0, 0, canvas.width, canvas.height);
# Restore the transform
ctx.restore();
#ctx.clearRect(0, 0, canvas.width, canvas.height)
def clear_console(ev):
doc["console"].value=''
def change_theme(evt):
_theme=evt.target.value
editor.setTheme(_theme)
if storage:
storage["ace_theme"]=_theme
doc["ace_theme"].bind("change",change_theme)
def reset_theme():
if storage:
if "ace_theme" in storage:
editor.setTheme(storage["ace_theme"])
doc["ace_theme"].value=storage["ace_theme"]
def reset_the_src(ev):
if has_ace:
reset_src()
reset_theme()
else:
reset_src_area()
if has_ace:
reset_src()
else:
reset_src_area() | gpl-3.0 |
SummerLW/Perf-Insight-Report | third_party/gsutil/third_party/crcmod_osx/crcmod/_crcfunpy.py | 68 | 3073 | #-----------------------------------------------------------------------------
# Low level CRC functions for use by crcmod. This version is implemented in
# Python for a couple of reasons. 1) Provide a reference implememtation.
# 2) Provide a version that can be used on systems where a C compiler is not
# available for building extension modules.
#
# Copyright (c) 2004 Raymond L. Buvel
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#-----------------------------------------------------------------------------
def _crc8(data, crc, table):
crc = crc & 0xFF
for x in data:
crc = table[ord(x) ^ crc]
return crc
def _crc8r(data, crc, table):
crc = crc & 0xFF
for x in data:
crc = table[ord(x) ^ crc]
return crc
def _crc16(data, crc, table):
crc = crc & 0xFFFF
for x in data:
crc = table[ord(x) ^ ((crc>>8) & 0xFF)] ^ ((crc << 8) & 0xFF00)
return crc
def _crc16r(data, crc, table):
crc = crc & 0xFFFF
for x in data:
crc = table[ord(x) ^ (crc & 0xFF)] ^ (crc >> 8)
return crc
def _crc24(data, crc, table):
crc = crc & 0xFFFFFF
for x in data:
crc = table[ord(x) ^ (int(crc>>16) & 0xFF)] ^ ((crc << 8) & 0xFFFF00)
return crc
def _crc24r(data, crc, table):
crc = crc & 0xFFFFFF
for x in data:
crc = table[ord(x) ^ int(crc & 0xFF)] ^ (crc >> 8)
return crc
def _crc32(data, crc, table):
crc = crc & 0xFFFFFFFFL
for x in data:
crc = table[ord(x) ^ (int(crc>>24) & 0xFF)] ^ ((crc << 8) & 0xFFFFFF00L)
return crc
def _crc32r(data, crc, table):
crc = crc & 0xFFFFFFFFL
for x in data:
crc = table[ord(x) ^ int(crc & 0xFFL)] ^ (crc >> 8)
return crc
def _crc64(data, crc, table):
crc = crc & 0xFFFFFFFFFFFFFFFFL
for x in data:
crc = table[ord(x) ^ (int(crc>>56) & 0xFF)] ^ ((crc << 8) & 0xFFFFFFFFFFFFFF00L)
return crc
def _crc64r(data, crc, table):
crc = crc & 0xFFFFFFFFFFFFFFFFL
for x in data:
crc = table[ord(x) ^ int(crc & 0xFFL)] ^ (crc >> 8)
return crc
| bsd-3-clause |
garyjyao1/ansible | lib/ansible/modules/extras/windows/win_iis_website.py | 84 | 3462 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Henrik Wallström <henrik@wallstroms.nu>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: win_iis_website
version_added: "2.0"
short_description: Configures a IIS Web site.
description:
- Creates, Removes and configures a IIS Web site
options:
name:
description:
- Names of web site
required: true
default: null
aliases: []
state:
description:
- State of the web site
choices:
- started
- restarted
- stopped
- absent
required: false
default: null
aliases: []
physical_path:
description:
- The physical path on the remote host to use for the new site. The specified folder must already exist.
required: false
default: null
aliases: []
application_pool:
description:
- The application pool in which the new site executes.
required: false
default: null
aliases: []
port:
description:
- The port to bind to / use for the new site.
required: false
default: null
aliases: []
ip:
description:
- The IP address to bind to / use for the new site.
required: false
default: null
aliases: []
hostname:
description:
- The host header to bind to / use for the new site.
required: false
default: null
aliases: []
ssl:
description:
- Enables HTTPS binding on the site..
required: false
default: null
aliases: []
parameters:
description:
- Custom site Parameters from string where properties are seperated by a pipe and property name/values by colon Ex. "foo:1|bar:2"
required: false
default: null
aliases: []
author: Henrik Wallström
'''
EXAMPLES = '''
# This return information about an existing host
$ ansible -i vagrant-inventory -m win_iis_website -a "name='Default Web Site'" window
host | success >> {
"changed": false,
"site": {
"ApplicationPool": "DefaultAppPool",
"Bindings": [
"*:80:"
],
"ID": 1,
"Name": "Default Web Site",
"PhysicalPath": "%SystemDrive%\\inetpub\\wwwroot",
"State": "Stopped"
}
}
# This stops an existing site.
$ ansible -i hosts -m win_iis_website -a "name='Default Web Site' state=stopped" host
# This creates a new site.
$ ansible -i hosts -m win_iis_website -a "name=acme physical_path=c:\\sites\\acme" host
# Change logfile .
$ ansible -i hosts -m win_iis_website -a "name=acme physical_path=c:\\sites\\acme" host
# Playbook example
---
- name: Acme IIS site
win_iis_website:
name: "Acme"
state: started
port: 80
ip: 127.0.0.1
hostname: acme.local
application_pool: "acme"
physical_path: 'c:\\sites\\acme'
parameters: 'logfile.directory:c:\\sites\\logs'
register: website
'''
| gpl-3.0 |
theirc/CTS | shipments/migrations/0014_rename_location_model.py | 2 | 1076 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from shipments.db_views import drop_views, add_views
class Migration(migrations.Migration):
dependencies = [
('shipments', '0013_merge'),
]
operations = [
migrations.RunPython(drop_views, add_views),
migrations.RenameModel("Location", "PackageScan"),
migrations.AlterModelTable(
name='packagescan',
table='shipments_location',
),
migrations.RenameField(
model_name='package',
old_name='last_location',
new_name='last_scan',
),
migrations.RenameField(
model_name='package',
old_name='last_location_status_label',
new_name='last_scan_status_label',
),
migrations.RenameField(
model_name='shipment',
old_name='last_location_status_label',
new_name='last_scan_status_label',
),
migrations.RunPython(add_views, drop_views),
]
| bsd-3-clause |
smartforceplus/SmartForceplus | .local/share/Odoo/addons/8.0/builder/models/demo/normal_distribution.py | 1 | 1081 | import random
from openerp import models, api, fields
__author__ = 'one'
class NormalDistributionGenerator(models.Model):
_name = 'builder.ir.model.demo.generator.normal_distribution'
_description = 'Normal Distribution Generator'
_inherits = {
'builder.ir.model.demo.generator': 'base_id'
}
_inherit = ['ir.mixin.polymorphism.subclass', 'builder.ir.model.demo.generator.base']
_target_type = 'integer'
base_id = fields.Many2one(
comodel_name='builder.ir.model.demo.generator',
string='Base',
ondelete='cascade',
required=True
)
mean = fields.Float('Mean')
stdev = fields.Float('Std Deviation')
_defaults = {
'subclass_model': lambda s, c, u, cxt=None: s._name
}
@api.multi
def get_generator(self, field):
while True:
yield self.format_value(field, random.gauss(self.mean, self.stdev))
@api.model
def format_value(self, field, value):
if field.ttype == 'integer':
return int(value)
else:
return value | agpl-3.0 |
prefetchnta/questlab | bin/script/python/luckyman.py | 1 | 4810 | #-*- coding: utf-8 -*-
##################################
# 彩票中奖测试脚本
#
# 创建时间: 2018-09-12
##################################
import sys
#######################################
# 体彩 6+1 有效判断
#######################################
def check_sixa1(num):
if len(num) < 6 + 1 + 1:
return False
if num[0].isdigit() and \
num[1].isdigit() and \
num[2].isdigit() and \
num[3].isdigit() and \
num[4].isdigit() and \
num[5].isdigit() and \
num[7].isdigit() and num[6] == "+":
return True
return False
#######################################
# 体彩 6+1 中奖判断
#######################################
def lucky_sixa1(zhong, ur_num):
if zhong == ur_num:
print(ur_num + ": <T HIT !!!!!!!!!!!!>")
return True
if zhong[:6] == ur_num[:6]:
print(ur_num + ": <1 HIT !!!!!!!!>")
return True
if zhong[:5] == ur_num[:5] or zhong[1:6] == ur_num[1:6]:
print(ur_num + ": <2 HIT !!!!>")
return True
money = ("300", "20", "5")
for idx in range(0, 3):
leng = 4 - idx
cnts = 3 + idx
for beg in range(0, cnts):
sub1 = zhong[beg:beg+leng]
sub2 = ur_num[beg:beg+leng]
if sub1 == sub2:
print(ur_num + ": <" + money[idx] + " YUAN ^_^>")
return True
print(ur_num + ":")
return False
#######################################
# 双色球有效判断
#######################################
def check_dball(num):
if len(num) < 6 * 2 + 1 + 2:
return False
numbs = []
if num[ 0].isdigit() and num[ 1].isdigit() and \
num[ 2].isdigit() and num[ 3].isdigit() and \
num[ 4].isdigit() and num[ 5].isdigit() and \
num[ 6].isdigit() and num[ 7].isdigit() and \
num[ 8].isdigit() and num[ 9].isdigit() and \
num[10].isdigit() and num[11].isdigit() and \
num[13].isdigit() and num[14].isdigit() and num[12] == "+":
for idx in range(0, 12, 2):
ball = int(num[idx:idx+2])
if ball < 1 or ball > 33 or ball in numbs:
return False
numbs.append(ball)
ball = int(num[13:15])
if ball < 1 or ball > 16:
return False
return True
return False
#######################################
# 双色球中奖判断
#######################################
def lucky_dball(zhong, ur_num):
ll = 0
rr = 0
zhong_last = 0
zhong_list = []
ur_num_last = 0
ur_num_list = []
for idx in range(0, 12, 2):
ball1 = int(zhong[idx:idx+2])
ball2 = int(ur_num[idx:idx+2])
zhong_list.append(ball1)
ur_num_list.append(ball2)
zhong_last = int(zhong[13:15])
ur_num_last = int(ur_num[13:15])
ii = 0
while ii < len(ur_num_list):
jj = 0
while jj < len(zhong_list):
if ur_num_list[ii] == zhong_list[jj]:
ll += 1
zhong_list[jj] = 0
break
jj += 1
ii += 1
if ur_num_last == zhong_last:
rr += 1
if (ll == 6 and rr == 1):
print(ur_num + ": <1 HIT !!!!!!!!>")
return True
if (ll == 6):
print(ur_num + ": <2 HIT !!!!>")
return True
if (ll == 5 and rr == 1):
print(ur_num + ": <3000 YUAN ^_^>")
return True
if (ll == 5) or (ll == 4 and rr == 1):
print(ur_num + ": <200 YUAN ^_^>")
return True
if (ll == 4) or (ll == 3 and rr == 1):
print(ur_num + ": <10 YUAN ^_^>")
return True
if (rr == 1) or (ll == 1 and rr == 1) or (ll == 2 and rr == 1):
print(ur_num + ": <5 YUAN ^_^>")
return True
print(ur_num + ":")
return False
# 参数判断
if len(sys.argv) != 4:
print("Usage: luckyman.py <type> <number> <ur numbers>")
exit()
luck_type = (sys.argv[1]).strip()
luck_numb = (sys.argv[2]).strip()
luck_file = (sys.argv[3]).strip()
try:
fp = open(luck_file, "r")
lines = fp.readlines()
fp.close()
except:
print("invalid number file: " + luck_file)
exit()
# 根据类型判断
if luck_type == "sixa1":
check_it = check_sixa1
lucky_it = lucky_sixa1
elif luck_type == "dball":
check_it = check_dball
lucky_it = lucky_dball
else:
print("invalid type: " + luck_type)
exit()
# 统一判断中奖
if not check_it(luck_numb):
print("invalid number: " + luck_numb)
exit()
for numb in lines:
numb = numb.strip()
if not check_it(numb):
print("invalid ur number: " + numb)
else:
lucky_it(luck_numb, numb)
| lgpl-2.1 |
folleselavida/freebefore30 | node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/scripts/vim2pygments.py | 127 | 26283 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Vim Colorscheme Converter
~~~~~~~~~~~~~~~~~~~~~~~~~
This script converts vim colorscheme files to valid pygments
style classes meant for putting into modules.
:copyright 2006 by Armin Ronacher.
:license: BSD, see LICENSE for details.
"""
import sys
import re
from os import path
from cStringIO import StringIO
split_re = re.compile(r'(?<!\\)\s+')
SCRIPT_NAME = 'Vim Colorscheme Converter'
SCRIPT_VERSION = '0.1'
COLORS = {
# Numeric Colors
'0': '#000000',
'1': '#c00000',
'2': '#008000',
'3': '#808000',
'4': '#0000c0',
'5': '#c000c0',
'6': '#008080',
'7': '#c0c0c0',
'8': '#808080',
'9': '#ff6060',
'10': '#00ff00',
'11': '#ffff00',
'12': '#8080ff',
'13': '#ff40ff',
'14': '#00ffff',
'15': '#ffffff',
# Named Colors
'alice': '#f0f8ff',
'aliceblue': '#f0f8ff',
'antique': '#faebd7',
'antiquewhite': '#faebd7',
'antiquewhite1': '#ffefdb',
'antiquewhite2': '#eedfcc',
'antiquewhite3': '#cdc0b0',
'antiquewhite4': '#8b8378',
'aquamarine': '#7fffd4',
'aquamarine1': '#7fffd4',
'aquamarine2': '#76eec6',
'aquamarine3': '#66cdaa',
'aquamarine4': '#458b74',
'azure': '#f0ffff',
'azure1': '#f0ffff',
'azure2': '#e0eeee',
'azure3': '#c1cdcd',
'azure4': '#838b8b',
'beige': '#f5f5dc',
'bisque': '#ffe4c4',
'bisque1': '#ffe4c4',
'bisque2': '#eed5b7',
'bisque3': '#cdb79e',
'bisque4': '#8b7d6b',
'black': '#000000',
'blanched': '#ffebcd',
'blanchedalmond': '#ffebcd',
'blue': '#8a2be2',
'blue1': '#0000ff',
'blue2': '#0000ee',
'blue3': '#0000cd',
'blue4': '#00008b',
'blueviolet': '#8a2be2',
'brown': '#a52a2a',
'brown1': '#ff4040',
'brown2': '#ee3b3b',
'brown3': '#cd3333',
'brown4': '#8b2323',
'burlywood': '#deb887',
'burlywood1': '#ffd39b',
'burlywood2': '#eec591',
'burlywood3': '#cdaa7d',
'burlywood4': '#8b7355',
'cadet': '#5f9ea0',
'cadetblue': '#5f9ea0',
'cadetblue1': '#98f5ff',
'cadetblue2': '#8ee5ee',
'cadetblue3': '#7ac5cd',
'cadetblue4': '#53868b',
'chartreuse': '#7fff00',
'chartreuse1': '#7fff00',
'chartreuse2': '#76ee00',
'chartreuse3': '#66cd00',
'chartreuse4': '#458b00',
'chocolate': '#d2691e',
'chocolate1': '#ff7f24',
'chocolate2': '#ee7621',
'chocolate3': '#cd661d',
'chocolate4': '#8b4513',
'coral': '#ff7f50',
'coral1': '#ff7256',
'coral2': '#ee6a50',
'coral3': '#cd5b45',
'coral4': '#8b3e2f',
'cornflower': '#6495ed',
'cornflowerblue': '#6495ed',
'cornsilk': '#fff8dc',
'cornsilk1': '#fff8dc',
'cornsilk2': '#eee8cd',
'cornsilk3': '#cdc8b1',
'cornsilk4': '#8b8878',
'cyan': '#00ffff',
'cyan1': '#00ffff',
'cyan2': '#00eeee',
'cyan3': '#00cdcd',
'cyan4': '#008b8b',
'dark': '#8b0000',
'darkblue': '#00008b',
'darkcyan': '#008b8b',
'darkgoldenrod': '#b8860b',
'darkgoldenrod1': '#ffb90f',
'darkgoldenrod2': '#eead0e',
'darkgoldenrod3': '#cd950c',
'darkgoldenrod4': '#8b6508',
'darkgray': '#a9a9a9',
'darkgreen': '#006400',
'darkgrey': '#a9a9a9',
'darkkhaki': '#bdb76b',
'darkmagenta': '#8b008b',
'darkolivegreen': '#556b2f',
'darkolivegreen1': '#caff70',
'darkolivegreen2': '#bcee68',
'darkolivegreen3': '#a2cd5a',
'darkolivegreen4': '#6e8b3d',
'darkorange': '#ff8c00',
'darkorange1': '#ff7f00',
'darkorange2': '#ee7600',
'darkorange3': '#cd6600',
'darkorange4': '#8b4500',
'darkorchid': '#9932cc',
'darkorchid1': '#bf3eff',
'darkorchid2': '#b23aee',
'darkorchid3': '#9a32cd',
'darkorchid4': '#68228b',
'darkred': '#8b0000',
'darksalmon': '#e9967a',
'darkseagreen': '#8fbc8f',
'darkseagreen1': '#c1ffc1',
'darkseagreen2': '#b4eeb4',
'darkseagreen3': '#9bcd9b',
'darkseagreen4': '#698b69',
'darkslateblue': '#483d8b',
'darkslategray': '#2f4f4f',
'darkslategray1': '#97ffff',
'darkslategray2': '#8deeee',
'darkslategray3': '#79cdcd',
'darkslategray4': '#528b8b',
'darkslategrey': '#2f4f4f',
'darkturquoise': '#00ced1',
'darkviolet': '#9400d3',
'deep': '#ff1493',
'deeppink': '#ff1493',
'deeppink1': '#ff1493',
'deeppink2': '#ee1289',
'deeppink3': '#cd1076',
'deeppink4': '#8b0a50',
'deepskyblue': '#00bfff',
'deepskyblue1': '#00bfff',
'deepskyblue2': '#00b2ee',
'deepskyblue3': '#009acd',
'deepskyblue4': '#00688b',
'dim': '#696969',
'dimgray': '#696969',
'dimgrey': '#696969',
'dodger': '#1e90ff',
'dodgerblue': '#1e90ff',
'dodgerblue1': '#1e90ff',
'dodgerblue2': '#1c86ee',
'dodgerblue3': '#1874cd',
'dodgerblue4': '#104e8b',
'firebrick': '#b22222',
'firebrick1': '#ff3030',
'firebrick2': '#ee2c2c',
'firebrick3': '#cd2626',
'firebrick4': '#8b1a1a',
'floral': '#fffaf0',
'floralwhite': '#fffaf0',
'forest': '#228b22',
'forestgreen': '#228b22',
'gainsboro': '#dcdcdc',
'ghost': '#f8f8ff',
'ghostwhite': '#f8f8ff',
'gold': '#ffd700',
'gold1': '#ffd700',
'gold2': '#eec900',
'gold3': '#cdad00',
'gold4': '#8b7500',
'goldenrod': '#daa520',
'goldenrod1': '#ffc125',
'goldenrod2': '#eeb422',
'goldenrod3': '#cd9b1d',
'goldenrod4': '#8b6914',
'gray': '#bebebe',
'gray0': '#000000',
'gray1': '#030303',
'gray10': '#1a1a1a',
'gray100': '#ffffff',
'gray11': '#1c1c1c',
'gray12': '#1f1f1f',
'gray13': '#212121',
'gray14': '#242424',
'gray15': '#262626',
'gray16': '#292929',
'gray17': '#2b2b2b',
'gray18': '#2e2e2e',
'gray19': '#303030',
'gray2': '#050505',
'gray20': '#333333',
'gray21': '#363636',
'gray22': '#383838',
'gray23': '#3b3b3b',
'gray24': '#3d3d3d',
'gray25': '#404040',
'gray26': '#424242',
'gray27': '#454545',
'gray28': '#474747',
'gray29': '#4a4a4a',
'gray3': '#080808',
'gray30': '#4d4d4d',
'gray31': '#4f4f4f',
'gray32': '#525252',
'gray33': '#545454',
'gray34': '#575757',
'gray35': '#595959',
'gray36': '#5c5c5c',
'gray37': '#5e5e5e',
'gray38': '#616161',
'gray39': '#636363',
'gray4': '#0a0a0a',
'gray40': '#666666',
'gray41': '#696969',
'gray42': '#6b6b6b',
'gray43': '#6e6e6e',
'gray44': '#707070',
'gray45': '#737373',
'gray46': '#757575',
'gray47': '#787878',
'gray48': '#7a7a7a',
'gray49': '#7d7d7d',
'gray5': '#0d0d0d',
'gray50': '#7f7f7f',
'gray51': '#828282',
'gray52': '#858585',
'gray53': '#878787',
'gray54': '#8a8a8a',
'gray55': '#8c8c8c',
'gray56': '#8f8f8f',
'gray57': '#919191',
'gray58': '#949494',
'gray59': '#969696',
'gray6': '#0f0f0f',
'gray60': '#999999',
'gray61': '#9c9c9c',
'gray62': '#9e9e9e',
'gray63': '#a1a1a1',
'gray64': '#a3a3a3',
'gray65': '#a6a6a6',
'gray66': '#a8a8a8',
'gray67': '#ababab',
'gray68': '#adadad',
'gray69': '#b0b0b0',
'gray7': '#121212',
'gray70': '#b3b3b3',
'gray71': '#b5b5b5',
'gray72': '#b8b8b8',
'gray73': '#bababa',
'gray74': '#bdbdbd',
'gray75': '#bfbfbf',
'gray76': '#c2c2c2',
'gray77': '#c4c4c4',
'gray78': '#c7c7c7',
'gray79': '#c9c9c9',
'gray8': '#141414',
'gray80': '#cccccc',
'gray81': '#cfcfcf',
'gray82': '#d1d1d1',
'gray83': '#d4d4d4',
'gray84': '#d6d6d6',
'gray85': '#d9d9d9',
'gray86': '#dbdbdb',
'gray87': '#dedede',
'gray88': '#e0e0e0',
'gray89': '#e3e3e3',
'gray9': '#171717',
'gray90': '#e5e5e5',
'gray91': '#e8e8e8',
'gray92': '#ebebeb',
'gray93': '#ededed',
'gray94': '#f0f0f0',
'gray95': '#f2f2f2',
'gray96': '#f5f5f5',
'gray97': '#f7f7f7',
'gray98': '#fafafa',
'gray99': '#fcfcfc',
'green': '#adff2f',
'green1': '#00ff00',
'green2': '#00ee00',
'green3': '#00cd00',
'green4': '#008b00',
'greenyellow': '#adff2f',
'grey': '#bebebe',
'grey0': '#000000',
'grey1': '#030303',
'grey10': '#1a1a1a',
'grey100': '#ffffff',
'grey11': '#1c1c1c',
'grey12': '#1f1f1f',
'grey13': '#212121',
'grey14': '#242424',
'grey15': '#262626',
'grey16': '#292929',
'grey17': '#2b2b2b',
'grey18': '#2e2e2e',
'grey19': '#303030',
'grey2': '#050505',
'grey20': '#333333',
'grey21': '#363636',
'grey22': '#383838',
'grey23': '#3b3b3b',
'grey24': '#3d3d3d',
'grey25': '#404040',
'grey26': '#424242',
'grey27': '#454545',
'grey28': '#474747',
'grey29': '#4a4a4a',
'grey3': '#080808',
'grey30': '#4d4d4d',
'grey31': '#4f4f4f',
'grey32': '#525252',
'grey33': '#545454',
'grey34': '#575757',
'grey35': '#595959',
'grey36': '#5c5c5c',
'grey37': '#5e5e5e',
'grey38': '#616161',
'grey39': '#636363',
'grey4': '#0a0a0a',
'grey40': '#666666',
'grey41': '#696969',
'grey42': '#6b6b6b',
'grey43': '#6e6e6e',
'grey44': '#707070',
'grey45': '#737373',
'grey46': '#757575',
'grey47': '#787878',
'grey48': '#7a7a7a',
'grey49': '#7d7d7d',
'grey5': '#0d0d0d',
'grey50': '#7f7f7f',
'grey51': '#828282',
'grey52': '#858585',
'grey53': '#878787',
'grey54': '#8a8a8a',
'grey55': '#8c8c8c',
'grey56': '#8f8f8f',
'grey57': '#919191',
'grey58': '#949494',
'grey59': '#969696',
'grey6': '#0f0f0f',
'grey60': '#999999',
'grey61': '#9c9c9c',
'grey62': '#9e9e9e',
'grey63': '#a1a1a1',
'grey64': '#a3a3a3',
'grey65': '#a6a6a6',
'grey66': '#a8a8a8',
'grey67': '#ababab',
'grey68': '#adadad',
'grey69': '#b0b0b0',
'grey7': '#121212',
'grey70': '#b3b3b3',
'grey71': '#b5b5b5',
'grey72': '#b8b8b8',
'grey73': '#bababa',
'grey74': '#bdbdbd',
'grey75': '#bfbfbf',
'grey76': '#c2c2c2',
'grey77': '#c4c4c4',
'grey78': '#c7c7c7',
'grey79': '#c9c9c9',
'grey8': '#141414',
'grey80': '#cccccc',
'grey81': '#cfcfcf',
'grey82': '#d1d1d1',
'grey83': '#d4d4d4',
'grey84': '#d6d6d6',
'grey85': '#d9d9d9',
'grey86': '#dbdbdb',
'grey87': '#dedede',
'grey88': '#e0e0e0',
'grey89': '#e3e3e3',
'grey9': '#171717',
'grey90': '#e5e5e5',
'grey91': '#e8e8e8',
'grey92': '#ebebeb',
'grey93': '#ededed',
'grey94': '#f0f0f0',
'grey95': '#f2f2f2',
'grey96': '#f5f5f5',
'grey97': '#f7f7f7',
'grey98': '#fafafa',
'grey99': '#fcfcfc',
'honeydew': '#f0fff0',
'honeydew1': '#f0fff0',
'honeydew2': '#e0eee0',
'honeydew3': '#c1cdc1',
'honeydew4': '#838b83',
'hot': '#ff69b4',
'hotpink': '#ff69b4',
'hotpink1': '#ff6eb4',
'hotpink2': '#ee6aa7',
'hotpink3': '#cd6090',
'hotpink4': '#8b3a62',
'indian': '#cd5c5c',
'indianred': '#cd5c5c',
'indianred1': '#ff6a6a',
'indianred2': '#ee6363',
'indianred3': '#cd5555',
'indianred4': '#8b3a3a',
'ivory': '#fffff0',
'ivory1': '#fffff0',
'ivory2': '#eeeee0',
'ivory3': '#cdcdc1',
'ivory4': '#8b8b83',
'khaki': '#f0e68c',
'khaki1': '#fff68f',
'khaki2': '#eee685',
'khaki3': '#cdc673',
'khaki4': '#8b864e',
'lavender': '#fff0f5',
'lavenderblush': '#fff0f5',
'lavenderblush1': '#fff0f5',
'lavenderblush2': '#eee0e5',
'lavenderblush3': '#cdc1c5',
'lavenderblush4': '#8b8386',
'lawn': '#7cfc00',
'lawngreen': '#7cfc00',
'lemon': '#fffacd',
'lemonchiffon': '#fffacd',
'lemonchiffon1': '#fffacd',
'lemonchiffon2': '#eee9bf',
'lemonchiffon3': '#cdc9a5',
'lemonchiffon4': '#8b8970',
'light': '#90ee90',
'lightblue': '#add8e6',
'lightblue1': '#bfefff',
'lightblue2': '#b2dfee',
'lightblue3': '#9ac0cd',
'lightblue4': '#68838b',
'lightcoral': '#f08080',
'lightcyan': '#e0ffff',
'lightcyan1': '#e0ffff',
'lightcyan2': '#d1eeee',
'lightcyan3': '#b4cdcd',
'lightcyan4': '#7a8b8b',
'lightgoldenrod': '#eedd82',
'lightgoldenrod1': '#ffec8b',
'lightgoldenrod2': '#eedc82',
'lightgoldenrod3': '#cdbe70',
'lightgoldenrod4': '#8b814c',
'lightgoldenrodyellow': '#fafad2',
'lightgray': '#d3d3d3',
'lightgreen': '#90ee90',
'lightgrey': '#d3d3d3',
'lightpink': '#ffb6c1',
'lightpink1': '#ffaeb9',
'lightpink2': '#eea2ad',
'lightpink3': '#cd8c95',
'lightpink4': '#8b5f65',
'lightsalmon': '#ffa07a',
'lightsalmon1': '#ffa07a',
'lightsalmon2': '#ee9572',
'lightsalmon3': '#cd8162',
'lightsalmon4': '#8b5742',
'lightseagreen': '#20b2aa',
'lightskyblue': '#87cefa',
'lightskyblue1': '#b0e2ff',
'lightskyblue2': '#a4d3ee',
'lightskyblue3': '#8db6cd',
'lightskyblue4': '#607b8b',
'lightslateblue': '#8470ff',
'lightslategray': '#778899',
'lightslategrey': '#778899',
'lightsteelblue': '#b0c4de',
'lightsteelblue1': '#cae1ff',
'lightsteelblue2': '#bcd2ee',
'lightsteelblue3': '#a2b5cd',
'lightsteelblue4': '#6e7b8b',
'lightyellow': '#ffffe0',
'lightyellow1': '#ffffe0',
'lightyellow2': '#eeeed1',
'lightyellow3': '#cdcdb4',
'lightyellow4': '#8b8b7a',
'lime': '#32cd32',
'limegreen': '#32cd32',
'linen': '#faf0e6',
'magenta': '#ff00ff',
'magenta1': '#ff00ff',
'magenta2': '#ee00ee',
'magenta3': '#cd00cd',
'magenta4': '#8b008b',
'maroon': '#b03060',
'maroon1': '#ff34b3',
'maroon2': '#ee30a7',
'maroon3': '#cd2990',
'maroon4': '#8b1c62',
'medium': '#9370db',
'mediumaquamarine': '#66cdaa',
'mediumblue': '#0000cd',
'mediumorchid': '#ba55d3',
'mediumorchid1': '#e066ff',
'mediumorchid2': '#d15fee',
'mediumorchid3': '#b452cd',
'mediumorchid4': '#7a378b',
'mediumpurple': '#9370db',
'mediumpurple1': '#ab82ff',
'mediumpurple2': '#9f79ee',
'mediumpurple3': '#8968cd',
'mediumpurple4': '#5d478b',
'mediumseagreen': '#3cb371',
'mediumslateblue': '#7b68ee',
'mediumspringgreen': '#00fa9a',
'mediumturquoise': '#48d1cc',
'mediumvioletred': '#c71585',
'midnight': '#191970',
'midnightblue': '#191970',
'mint': '#f5fffa',
'mintcream': '#f5fffa',
'misty': '#ffe4e1',
'mistyrose': '#ffe4e1',
'mistyrose1': '#ffe4e1',
'mistyrose2': '#eed5d2',
'mistyrose3': '#cdb7b5',
'mistyrose4': '#8b7d7b',
'moccasin': '#ffe4b5',
'navajo': '#ffdead',
'navajowhite': '#ffdead',
'navajowhite1': '#ffdead',
'navajowhite2': '#eecfa1',
'navajowhite3': '#cdb38b',
'navajowhite4': '#8b795e',
'navy': '#000080',
'navyblue': '#000080',
'old': '#fdf5e6',
'oldlace': '#fdf5e6',
'olive': '#6b8e23',
'olivedrab': '#6b8e23',
'olivedrab1': '#c0ff3e',
'olivedrab2': '#b3ee3a',
'olivedrab3': '#9acd32',
'olivedrab4': '#698b22',
'orange': '#ff4500',
'orange1': '#ffa500',
'orange2': '#ee9a00',
'orange3': '#cd8500',
'orange4': '#8b5a00',
'orangered': '#ff4500',
'orangered1': '#ff4500',
'orangered2': '#ee4000',
'orangered3': '#cd3700',
'orangered4': '#8b2500',
'orchid': '#da70d6',
'orchid1': '#ff83fa',
'orchid2': '#ee7ae9',
'orchid3': '#cd69c9',
'orchid4': '#8b4789',
'pale': '#db7093',
'palegoldenrod': '#eee8aa',
'palegreen': '#98fb98',
'palegreen1': '#9aff9a',
'palegreen2': '#90ee90',
'palegreen3': '#7ccd7c',
'palegreen4': '#548b54',
'paleturquoise': '#afeeee',
'paleturquoise1': '#bbffff',
'paleturquoise2': '#aeeeee',
'paleturquoise3': '#96cdcd',
'paleturquoise4': '#668b8b',
'palevioletred': '#db7093',
'palevioletred1': '#ff82ab',
'palevioletred2': '#ee799f',
'palevioletred3': '#cd6889',
'palevioletred4': '#8b475d',
'papaya': '#ffefd5',
'papayawhip': '#ffefd5',
'peach': '#ffdab9',
'peachpuff': '#ffdab9',
'peachpuff1': '#ffdab9',
'peachpuff2': '#eecbad',
'peachpuff3': '#cdaf95',
'peachpuff4': '#8b7765',
'peru': '#cd853f',
'pink': '#ffc0cb',
'pink1': '#ffb5c5',
'pink2': '#eea9b8',
'pink3': '#cd919e',
'pink4': '#8b636c',
'plum': '#dda0dd',
'plum1': '#ffbbff',
'plum2': '#eeaeee',
'plum3': '#cd96cd',
'plum4': '#8b668b',
'powder': '#b0e0e6',
'powderblue': '#b0e0e6',
'purple': '#a020f0',
'purple1': '#9b30ff',
'purple2': '#912cee',
'purple3': '#7d26cd',
'purple4': '#551a8b',
'red': '#ff0000',
'red1': '#ff0000',
'red2': '#ee0000',
'red3': '#cd0000',
'red4': '#8b0000',
'rosy': '#bc8f8f',
'rosybrown': '#bc8f8f',
'rosybrown1': '#ffc1c1',
'rosybrown2': '#eeb4b4',
'rosybrown3': '#cd9b9b',
'rosybrown4': '#8b6969',
'royal': '#4169e1',
'royalblue': '#4169e1',
'royalblue1': '#4876ff',
'royalblue2': '#436eee',
'royalblue3': '#3a5fcd',
'royalblue4': '#27408b',
'saddle': '#8b4513',
'saddlebrown': '#8b4513',
'salmon': '#fa8072',
'salmon1': '#ff8c69',
'salmon2': '#ee8262',
'salmon3': '#cd7054',
'salmon4': '#8b4c39',
'sandy': '#f4a460',
'sandybrown': '#f4a460',
'sea': '#2e8b57',
'seagreen': '#2e8b57',
'seagreen1': '#54ff9f',
'seagreen2': '#4eee94',
'seagreen3': '#43cd80',
'seagreen4': '#2e8b57',
'seashell': '#fff5ee',
'seashell1': '#fff5ee',
'seashell2': '#eee5de',
'seashell3': '#cdc5bf',
'seashell4': '#8b8682',
'sienna': '#a0522d',
'sienna1': '#ff8247',
'sienna2': '#ee7942',
'sienna3': '#cd6839',
'sienna4': '#8b4726',
'sky': '#87ceeb',
'skyblue': '#87ceeb',
'skyblue1': '#87ceff',
'skyblue2': '#7ec0ee',
'skyblue3': '#6ca6cd',
'skyblue4': '#4a708b',
'slate': '#6a5acd',
'slateblue': '#6a5acd',
'slateblue1': '#836fff',
'slateblue2': '#7a67ee',
'slateblue3': '#6959cd',
'slateblue4': '#473c8b',
'slategray': '#708090',
'slategray1': '#c6e2ff',
'slategray2': '#b9d3ee',
'slategray3': '#9fb6cd',
'slategray4': '#6c7b8b',
'slategrey': '#708090',
'snow': '#fffafa',
'snow1': '#fffafa',
'snow2': '#eee9e9',
'snow3': '#cdc9c9',
'snow4': '#8b8989',
'spring': '#00ff7f',
'springgreen': '#00ff7f',
'springgreen1': '#00ff7f',
'springgreen2': '#00ee76',
'springgreen3': '#00cd66',
'springgreen4': '#008b45',
'steel': '#4682b4',
'steelblue': '#4682b4',
'steelblue1': '#63b8ff',
'steelblue2': '#5cacee',
'steelblue3': '#4f94cd',
'steelblue4': '#36648b',
'tan': '#d2b48c',
'tan1': '#ffa54f',
'tan2': '#ee9a49',
'tan3': '#cd853f',
'tan4': '#8b5a2b',
'thistle': '#d8bfd8',
'thistle1': '#ffe1ff',
'thistle2': '#eed2ee',
'thistle3': '#cdb5cd',
'thistle4': '#8b7b8b',
'tomato': '#ff6347',
'tomato1': '#ff6347',
'tomato2': '#ee5c42',
'tomato3': '#cd4f39',
'tomato4': '#8b3626',
'turquoise': '#40e0d0',
'turquoise1': '#00f5ff',
'turquoise2': '#00e5ee',
'turquoise3': '#00c5cd',
'turquoise4': '#00868b',
'violet': '#ee82ee',
'violetred': '#d02090',
'violetred1': '#ff3e96',
'violetred2': '#ee3a8c',
'violetred3': '#cd3278',
'violetred4': '#8b2252',
'wheat': '#f5deb3',
'wheat1': '#ffe7ba',
'wheat2': '#eed8ae',
'wheat3': '#cdba96',
'wheat4': '#8b7e66',
'white': '#ffffff',
'whitesmoke': '#f5f5f5',
'yellow': '#ffff00',
'yellow1': '#ffff00',
'yellow2': '#eeee00',
'yellow3': '#cdcd00',
'yellow4': '#8b8b00',
'yellowgreen': '#9acd32'
}
TOKENS = {
'normal': '',
'string': 'String',
'number': 'Number',
'float': 'Number.Float',
'constant': 'Name.Constant',
'number': 'Number',
'statement': ('Keyword', 'Name.Tag'),
'identifier': 'Name.Variable',
'operator': 'Operator.Word',
'label': 'Name.Label',
'exception': 'Name.Exception',
'function': ('Name.Function', 'Name.Attribute'),
'preproc': 'Comment.Preproc',
'comment': 'Comment',
'type': 'Keyword.Type',
'diffadd': 'Generic.Inserted',
'diffdelete': 'Generic.Deleted',
'error': 'Generic.Error',
'errormsg': 'Generic.Traceback',
'title': ('Generic.Heading', 'Generic.Subheading'),
'underlined': 'Generic.Emph',
'special': 'Name.Entity',
'nontext': 'Generic.Output'
}
TOKEN_TYPES = set()
for token in TOKENS.itervalues():
if not isinstance(token, tuple):
token = (token,)
for token in token:
if token:
TOKEN_TYPES.add(token.split('.')[0])
def get_vim_color(color):
if color.startswith('#'):
if len(color) == 7:
return color
else:
return '#%s0' % '0'.join(color)[1:]
return COLORS.get(color.lower())
def find_colors(code):
colors = {'Normal': {}}
bg_color = None
def set(attrib, value):
if token not in colors:
colors[token] = {}
if key.startswith('gui') or attrib not in colors[token]:
colors[token][attrib] = value
for line in code.splitlines():
if line.startswith('"'):
continue
parts = split_re.split(line.strip())
if len(parts) == 2 and parts[0] == 'set':
p = parts[1].split()
if p[0] == 'background' and p[1] == 'dark':
token = 'Normal'
bg_color = '#000000'
elif len(parts) > 2 and \
len(parts[0]) >= 2 and \
'highlight'.startswith(parts[0]):
token = parts[1].lower()
if token not in TOKENS:
continue
for item in parts[2:]:
p = item.split('=', 1)
if not len(p) == 2:
continue
key, value = p
if key in ('ctermfg', 'guifg'):
color = get_vim_color(value)
if color:
set('color', color)
elif key in ('ctermbg', 'guibg'):
color = get_vim_color(value)
if color:
set('bgcolor', color)
elif key in ('term', 'cterm', 'gui'):
items = value.split(',')
for item in items:
item = item.lower()
if item == 'none':
set('noinherit', True)
elif item == 'bold':
set('bold', True)
elif item == 'underline':
set('underline', True)
elif item == 'italic':
set('italic', True)
if bg_color is not None and not colors['Normal'].get('bgcolor'):
colors['Normal']['bgcolor'] = bg_color
color_map = {}
for token, styles in colors.iteritems():
if token in TOKENS:
tmp = []
if styles.get('noinherit'):
tmp.append('noinherit')
if 'color' in styles:
tmp.append(styles['color'])
if 'bgcolor' in styles:
tmp.append('bg:' + styles['bgcolor'])
if styles.get('bold'):
tmp.append('bold')
if styles.get('italic'):
tmp.append('italic')
if styles.get('underline'):
tmp.append('underline')
tokens = TOKENS[token]
if not isinstance(tokens, tuple):
tokens = (tokens,)
for token in tokens:
color_map[token] = ' '.join(tmp)
default_token = color_map.pop('')
return default_token, color_map
class StyleWriter(object):
def __init__(self, code, name):
self.code = code
self.name = name.lower()
def write_header(self, out):
out.write('# -*- coding: utf-8 -*-\n"""\n')
out.write(' %s Colorscheme\n' % self.name.title())
out.write(' %s\n\n' % ('~' * (len(self.name) + 12)))
out.write(' Converted by %s\n' % SCRIPT_NAME)
out.write('"""\nfrom pygments.style import Style\n')
out.write('from pygments.token import Token, %s\n\n' % ', '.join(TOKEN_TYPES))
out.write('class %sStyle(Style):\n\n' % self.name.title())
def write(self, out):
self.write_header(out)
default_token, tokens = find_colors(self.code)
tokens = tokens.items()
tokens.sort(lambda a, b: cmp(len(a[0]), len(a[1])))
bg_color = [x[3:] for x in default_token.split() if x.startswith('bg:')]
if bg_color:
out.write(' background_color = %r\n' % bg_color[0])
out.write(' styles = {\n')
out.write(' %-20s%r,\n' % ('Token:', default_token))
for token, definition in tokens:
if definition:
out.write(' %-20s%r,\n' % (token + ':', definition))
out.write(' }')
def __repr__(self):
out = StringIO()
self.write_style(out)
return out.getvalue()
def convert(filename, stream=None):
name = path.basename(filename)
if name.endswith('.vim'):
name = name[:-4]
f = file(filename)
code = f.read()
f.close()
writer = StyleWriter(code, name)
if stream is not None:
out = stream
else:
out = StringIO()
writer.write(out)
if stream is None:
return out.getvalue()
def main():
if len(sys.argv) != 2 or sys.argv[1] in ('-h', '--help'):
print 'Usage: %s <filename.vim>' % sys.argv[0]
return 2
if sys.argv[1] in ('-v', '--version'):
print '%s %s' % (SCRIPT_NAME, SCRIPT_VERSION)
return
filename = sys.argv[1]
if not (path.exists(filename) and path.isfile(filename)):
print 'Error: %s not found' % filename
return 1
convert(filename, sys.stdout)
sys.stdout.write('\n')
if __name__ == '__main__':
sys.exit(main() or 0)
| mit |
mlperf/training_results_v0.7 | Google/benchmarks/dlrm/implementations/dlrm-research-TF-tpu-v4-128/utils.py | 1 | 3037 | # Lint as: python3
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for configuring training DLRM training script."""
from absl import flags
import tensorflow.compat.v1 as tf
from REDACTED.tensorflow_models.mlperf.models.rough.mlp_log import mlp_log
FLAGS = flags.FLAGS
# Metric names.
ACC_KEY = "accuracy"
AUC_KEY = "auc"
PRECISION_KEY = "precision"
RECALL_KEY = "recall"
def create_scalar_summary(name, simple_value):
return tf.Summary(
value=[tf.Summary.Value(tag=name, simple_value=simple_value)])
def train_loop_iters():
def _ceil(n, d):
return (n + d - 1) // d
return _ceil(FLAGS.train_steps, FLAGS.steps_between_evals)
def lr_fn(params, global_step):
"""Calculates adjusted LR based on global step.
Linear warmup and polynomial decay.
Args:
params: Params dict for the model.
global_step: Variable representing the current step.
Returns:
New learning rate tensor (float32).
"""
decay_exp = 2
base_learning_rate = params["learning_rate"]
global_step = tf.cast(global_step, tf.float32)
lr_warmup_steps = tf.constant(params["lr_warmup_steps"], tf.float32)
decay_steps_float = tf.constant(params["decay_steps"], tf.float32)
decay_start_step_float = tf.constant(params["decay_start_step"], tf.float32)
global_batch_size = params["batch_size"]
scaling_factor = global_batch_size / 2048.0
adjusted_lr = base_learning_rate * scaling_factor
adjusted_lr = tf.constant(adjusted_lr, tf.float32)
if not params["lr_warmup_steps"]:
return adjusted_lr
change_rate = adjusted_lr / lr_warmup_steps
warmup_lr = adjusted_lr - (lr_warmup_steps - global_step) * change_rate
steps_since_decay_start_float = global_step - decay_start_step_float
already_decayed_steps = tf.minimum(steps_since_decay_start_float,
decay_steps_float)
decay_lr = adjusted_lr * ((decay_steps_float - already_decayed_steps) /
decay_steps_float)**decay_exp
decay_lr = tf.maximum(decay_lr, tf.constant(0.0000001))
is_warmup_step = tf.cast(global_step < lr_warmup_steps, tf.float32)
is_decay_step = tf.cast(global_step > decay_start_step_float, tf.float32)
is_middle_step = tf.cast(
tf.equal(is_warmup_step + is_decay_step, 0.0), tf.float32)
lr = (is_warmup_step * warmup_lr + is_middle_step * adjusted_lr +
is_decay_step * decay_lr)
return lr
| apache-2.0 |
dxwu/BinderFilter | resources/android-toolchain-16/lib/python2.7/lib-tk/ScrolledText.py | 133 | 1836 | """A ScrolledText widget feels like a text widget but also has a
vertical scroll bar on its right. (Later, options may be added to
add a horizontal bar as well, to make the bars disappear
automatically when not needed, to move them to the other side of the
window, etc.)
Configuration options are passed to the Text widget.
A Frame widget is inserted between the master and the text, to hold
the Scrollbar widget.
Most methods calls are inherited from the Text widget; Pack, Grid and
Place methods are redirected to the Frame widget however.
"""
__all__ = ['ScrolledText']
from Tkinter import Frame, Text, Scrollbar, Pack, Grid, Place
from Tkconstants import RIGHT, LEFT, Y, BOTH
class ScrolledText(Text):
def __init__(self, master=None, **kw):
self.frame = Frame(master)
self.vbar = Scrollbar(self.frame)
self.vbar.pack(side=RIGHT, fill=Y)
kw.update({'yscrollcommand': self.vbar.set})
Text.__init__(self, self.frame, **kw)
self.pack(side=LEFT, fill=BOTH, expand=True)
self.vbar['command'] = self.yview
# Copy geometry methods of self.frame without overriding Text
# methods -- hack!
text_meths = vars(Text).keys()
methods = vars(Pack).keys() + vars(Grid).keys() + vars(Place).keys()
methods = set(methods).difference(text_meths)
for m in methods:
if m[0] != '_' and m != 'config' and m != 'configure':
setattr(self, m, getattr(self.frame, m))
def __str__(self):
return str(self.frame)
def example():
import __main__
from Tkconstants import END
stext = ScrolledText(bg='white', height=10)
stext.insert(END, __main__.__doc__)
stext.pack(fill=BOTH, side=LEFT, expand=True)
stext.focus_set()
stext.mainloop()
if __name__ == "__main__":
example()
| mit |
micahmicah/new-years-2016 | node_modules/node-sass/node_modules/pangyp/gyp/tools/pretty_sln.py | 1831 | 5099 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Prints the information in a sln file in a diffable way.
It first outputs each projects in alphabetical order with their
dependencies.
Then it outputs a possible build order.
"""
__author__ = 'nsylvain (Nicolas Sylvain)'
import os
import re
import sys
import pretty_vcproj
def BuildProject(project, built, projects, deps):
# if all dependencies are done, we can build it, otherwise we try to build the
# dependency.
# This is not infinite-recursion proof.
for dep in deps[project]:
if dep not in built:
BuildProject(dep, built, projects, deps)
print project
built.append(project)
def ParseSolution(solution_file):
# All projects, their clsid and paths.
projects = dict()
# A list of dependencies associated with a project.
dependencies = dict()
# Regular expressions that matches the SLN format.
# The first line of a project definition.
begin_project = re.compile(r'^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
r'}"\) = "(.*)", "(.*)", "(.*)"$')
# The last line of a project definition.
end_project = re.compile('^EndProject$')
# The first line of a dependency list.
begin_dep = re.compile(
r'ProjectSection\(ProjectDependencies\) = postProject$')
# The last line of a dependency list.
end_dep = re.compile('EndProjectSection$')
# A line describing a dependency.
dep_line = re.compile(' *({.*}) = ({.*})$')
in_deps = False
solution = open(solution_file)
for line in solution:
results = begin_project.search(line)
if results:
# Hack to remove icu because the diff is too different.
if results.group(1).find('icu') != -1:
continue
# We remove "_gyp" from the names because it helps to diff them.
current_project = results.group(1).replace('_gyp', '')
projects[current_project] = [results.group(2).replace('_gyp', ''),
results.group(3),
results.group(2)]
dependencies[current_project] = []
continue
results = end_project.search(line)
if results:
current_project = None
continue
results = begin_dep.search(line)
if results:
in_deps = True
continue
results = end_dep.search(line)
if results:
in_deps = False
continue
results = dep_line.search(line)
if results and in_deps and current_project:
dependencies[current_project].append(results.group(1))
continue
# Change all dependencies clsid to name instead.
for project in dependencies:
# For each dependencies in this project
new_dep_array = []
for dep in dependencies[project]:
# Look for the project name matching this cldis
for project_info in projects:
if projects[project_info][1] == dep:
new_dep_array.append(project_info)
dependencies[project] = sorted(new_dep_array)
return (projects, dependencies)
def PrintDependencies(projects, deps):
print "---------------------------------------"
print "Dependencies for all projects"
print "---------------------------------------"
print "-- --"
for (project, dep_list) in sorted(deps.items()):
print "Project : %s" % project
print "Path : %s" % projects[project][0]
if dep_list:
for dep in dep_list:
print " - %s" % dep
print ""
print "-- --"
def PrintBuildOrder(projects, deps):
print "---------------------------------------"
print "Build order "
print "---------------------------------------"
print "-- --"
built = []
for (project, _) in sorted(deps.items()):
if project not in built:
BuildProject(project, built, projects, deps)
print "-- --"
def PrintVCProj(projects):
for project in projects:
print "-------------------------------------"
print "-------------------------------------"
print project
print project
print project
print "-------------------------------------"
print "-------------------------------------"
project_path = os.path.abspath(os.path.join(os.path.dirname(sys.argv[1]),
projects[project][2]))
pretty = pretty_vcproj
argv = [ '',
project_path,
'$(SolutionDir)=%s\\' % os.path.dirname(sys.argv[1]),
]
argv.extend(sys.argv[3:])
pretty.main(argv)
def main():
# check if we have exactly 1 parameter.
if len(sys.argv) < 2:
print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0]
return 1
(projects, deps) = ParseSolution(sys.argv[1])
PrintDependencies(projects, deps)
PrintBuildOrder(projects, deps)
if '--recursive' in sys.argv:
PrintVCProj(projects)
return 0
if __name__ == '__main__':
sys.exit(main())
| mit |
badloop/SickRage | lib/unidecode/x053.py | 252 | 4616 | data = (
'Yun ', # 0x00
'Mwun ', # 0x01
'Nay ', # 0x02
'Gai ', # 0x03
'Gai ', # 0x04
'Bao ', # 0x05
'Cong ', # 0x06
'[?] ', # 0x07
'Xiong ', # 0x08
'Peng ', # 0x09
'Ju ', # 0x0a
'Tao ', # 0x0b
'Ge ', # 0x0c
'Pu ', # 0x0d
'An ', # 0x0e
'Pao ', # 0x0f
'Fu ', # 0x10
'Gong ', # 0x11
'Da ', # 0x12
'Jiu ', # 0x13
'Qiong ', # 0x14
'Bi ', # 0x15
'Hua ', # 0x16
'Bei ', # 0x17
'Nao ', # 0x18
'Chi ', # 0x19
'Fang ', # 0x1a
'Jiu ', # 0x1b
'Yi ', # 0x1c
'Za ', # 0x1d
'Jiang ', # 0x1e
'Kang ', # 0x1f
'Jiang ', # 0x20
'Kuang ', # 0x21
'Hu ', # 0x22
'Xia ', # 0x23
'Qu ', # 0x24
'Bian ', # 0x25
'Gui ', # 0x26
'Qie ', # 0x27
'Zang ', # 0x28
'Kuang ', # 0x29
'Fei ', # 0x2a
'Hu ', # 0x2b
'Tou ', # 0x2c
'Gui ', # 0x2d
'Gui ', # 0x2e
'Hui ', # 0x2f
'Dan ', # 0x30
'Gui ', # 0x31
'Lian ', # 0x32
'Lian ', # 0x33
'Suan ', # 0x34
'Du ', # 0x35
'Jiu ', # 0x36
'Qu ', # 0x37
'Xi ', # 0x38
'Pi ', # 0x39
'Qu ', # 0x3a
'Yi ', # 0x3b
'Qia ', # 0x3c
'Yan ', # 0x3d
'Bian ', # 0x3e
'Ni ', # 0x3f
'Qu ', # 0x40
'Shi ', # 0x41
'Xin ', # 0x42
'Qian ', # 0x43
'Nian ', # 0x44
'Sa ', # 0x45
'Zu ', # 0x46
'Sheng ', # 0x47
'Wu ', # 0x48
'Hui ', # 0x49
'Ban ', # 0x4a
'Shi ', # 0x4b
'Xi ', # 0x4c
'Wan ', # 0x4d
'Hua ', # 0x4e
'Xie ', # 0x4f
'Wan ', # 0x50
'Bei ', # 0x51
'Zu ', # 0x52
'Zhuo ', # 0x53
'Xie ', # 0x54
'Dan ', # 0x55
'Mai ', # 0x56
'Nan ', # 0x57
'Dan ', # 0x58
'Ji ', # 0x59
'Bo ', # 0x5a
'Shuai ', # 0x5b
'Bu ', # 0x5c
'Kuang ', # 0x5d
'Bian ', # 0x5e
'Bu ', # 0x5f
'Zhan ', # 0x60
'Qia ', # 0x61
'Lu ', # 0x62
'You ', # 0x63
'Lu ', # 0x64
'Xi ', # 0x65
'Gua ', # 0x66
'Wo ', # 0x67
'Xie ', # 0x68
'Jie ', # 0x69
'Jie ', # 0x6a
'Wei ', # 0x6b
'Ang ', # 0x6c
'Qiong ', # 0x6d
'Zhi ', # 0x6e
'Mao ', # 0x6f
'Yin ', # 0x70
'Wei ', # 0x71
'Shao ', # 0x72
'Ji ', # 0x73
'Que ', # 0x74
'Luan ', # 0x75
'Shi ', # 0x76
'Juan ', # 0x77
'Xie ', # 0x78
'Xu ', # 0x79
'Jin ', # 0x7a
'Que ', # 0x7b
'Wu ', # 0x7c
'Ji ', # 0x7d
'E ', # 0x7e
'Qing ', # 0x7f
'Xi ', # 0x80
'[?] ', # 0x81
'Han ', # 0x82
'Zhan ', # 0x83
'E ', # 0x84
'Ting ', # 0x85
'Li ', # 0x86
'Zhe ', # 0x87
'Han ', # 0x88
'Li ', # 0x89
'Ya ', # 0x8a
'Ya ', # 0x8b
'Yan ', # 0x8c
'She ', # 0x8d
'Zhi ', # 0x8e
'Zha ', # 0x8f
'Pang ', # 0x90
'[?] ', # 0x91
'He ', # 0x92
'Ya ', # 0x93
'Zhi ', # 0x94
'Ce ', # 0x95
'Pang ', # 0x96
'Ti ', # 0x97
'Li ', # 0x98
'She ', # 0x99
'Hou ', # 0x9a
'Ting ', # 0x9b
'Zui ', # 0x9c
'Cuo ', # 0x9d
'Fei ', # 0x9e
'Yuan ', # 0x9f
'Ce ', # 0xa0
'Yuan ', # 0xa1
'Xiang ', # 0xa2
'Yan ', # 0xa3
'Li ', # 0xa4
'Jue ', # 0xa5
'Sha ', # 0xa6
'Dian ', # 0xa7
'Chu ', # 0xa8
'Jiu ', # 0xa9
'Qin ', # 0xaa
'Ao ', # 0xab
'Gui ', # 0xac
'Yan ', # 0xad
'Si ', # 0xae
'Li ', # 0xaf
'Chang ', # 0xb0
'Lan ', # 0xb1
'Li ', # 0xb2
'Yan ', # 0xb3
'Yan ', # 0xb4
'Yuan ', # 0xb5
'Si ', # 0xb6
'Gong ', # 0xb7
'Lin ', # 0xb8
'Qiu ', # 0xb9
'Qu ', # 0xba
'Qu ', # 0xbb
'Uk ', # 0xbc
'Lei ', # 0xbd
'Du ', # 0xbe
'Xian ', # 0xbf
'Zhuan ', # 0xc0
'San ', # 0xc1
'Can ', # 0xc2
'Can ', # 0xc3
'Can ', # 0xc4
'Can ', # 0xc5
'Ai ', # 0xc6
'Dai ', # 0xc7
'You ', # 0xc8
'Cha ', # 0xc9
'Ji ', # 0xca
'You ', # 0xcb
'Shuang ', # 0xcc
'Fan ', # 0xcd
'Shou ', # 0xce
'Guai ', # 0xcf
'Ba ', # 0xd0
'Fa ', # 0xd1
'Ruo ', # 0xd2
'Shi ', # 0xd3
'Shu ', # 0xd4
'Zhuo ', # 0xd5
'Qu ', # 0xd6
'Shou ', # 0xd7
'Bian ', # 0xd8
'Xu ', # 0xd9
'Jia ', # 0xda
'Pan ', # 0xdb
'Sou ', # 0xdc
'Gao ', # 0xdd
'Wei ', # 0xde
'Sou ', # 0xdf
'Die ', # 0xe0
'Rui ', # 0xe1
'Cong ', # 0xe2
'Kou ', # 0xe3
'Gu ', # 0xe4
'Ju ', # 0xe5
'Ling ', # 0xe6
'Gua ', # 0xe7
'Tao ', # 0xe8
'Kou ', # 0xe9
'Zhi ', # 0xea
'Jiao ', # 0xeb
'Zhao ', # 0xec
'Ba ', # 0xed
'Ding ', # 0xee
'Ke ', # 0xef
'Tai ', # 0xf0
'Chi ', # 0xf1
'Shi ', # 0xf2
'You ', # 0xf3
'Qiu ', # 0xf4
'Po ', # 0xf5
'Xie ', # 0xf6
'Hao ', # 0xf7
'Si ', # 0xf8
'Tan ', # 0xf9
'Chi ', # 0xfa
'Le ', # 0xfb
'Diao ', # 0xfc
'Ji ', # 0xfd
'[?] ', # 0xfe
'Hong ', # 0xff
)
| gpl-3.0 |
sgml/popcorn_maker | vendor-local/lib/python/blessings/tests.py | 3 | 8082 | # -*- coding: utf-8 -*-
"""Automated tests (as opposed to human-verified test patterns)
It was tempting to mock out curses to get predictable output from ``tigetstr``,
but there are concrete integration-testing benefits in not doing so. For
instance, ``tigetstr`` changed its return type in Python 3.2.3. So instead, we
simply create all our test ``Terminal`` instances with a known terminal type.
All we require from the host machine is that a standard terminfo definition of
xterm-256color exists.
"""
from __future__ import with_statement # Make 2.5-compatible
from curses import tigetstr, tparm
from functools import partial
from StringIO import StringIO
import sys
from nose import SkipTest
from nose.tools import eq_
# This tests that __all__ is correct, since we use below everything that should
# be imported:
from blessings import *
TestTerminal = partial(Terminal, kind='xterm-256color')
def unicode_cap(cap):
"""Return the result of ``tigetstr`` except as Unicode."""
return tigetstr(cap).decode('utf-8')
def unicode_parm(cap, *parms):
"""Return the result of ``tparm(tigetstr())`` except as Unicode."""
return tparm(tigetstr(cap), *parms).decode('utf-8')
def test_capability():
"""Check that a capability lookup works.
Also test that Terminal grabs a reasonable default stream. This test
assumes it will be run from a tty.
"""
t = TestTerminal()
sc = unicode_cap('sc')
eq_(t.save, sc)
eq_(t.save, sc) # Make sure caching doesn't screw it up.
def test_capability_without_tty():
"""Assert capability templates are '' when stream is not a tty."""
t = TestTerminal(stream=StringIO())
eq_(t.save, u'')
eq_(t.red, u'')
def test_capability_with_forced_tty():
"""If we force styling, capabilities had better not (generally) be empty."""
t = TestTerminal(stream=StringIO(), force_styling=True)
eq_(t.save, unicode_cap('sc'))
def test_parametrization():
"""Test parametrizing a capability."""
eq_(TestTerminal().cup(3, 4), unicode_parm('cup', 3, 4))
def height_and_width():
"""Assert that ``height_and_width()`` returns ints."""
t = TestTerminal() # kind shouldn't matter.
assert isinstance(int, t.height)
assert isinstance(int, t.width)
def test_stream_attr():
"""Make sure Terminal exposes a ``stream`` attribute that defaults to something sane."""
eq_(Terminal().stream, sys.__stdout__)
def test_location():
"""Make sure ``location()`` does what it claims."""
t = TestTerminal(stream=StringIO(), force_styling=True)
with t.location(3, 4):
t.stream.write(u'hi')
eq_(t.stream.getvalue(), unicode_cap('sc') +
unicode_parm('cup', 4, 3) +
u'hi' +
unicode_cap('rc'))
def test_horizontal_location():
"""Make sure we can move the cursor horizontally without changing rows."""
t = TestTerminal(stream=StringIO(), force_styling=True)
with t.location(x=5):
pass
eq_(t.stream.getvalue(), unicode_cap('sc') +
unicode_parm('hpa', 5) +
unicode_cap('rc'))
def test_null_location():
"""Make sure ``location()`` with no args just does position restoration."""
t = TestTerminal(stream=StringIO(), force_styling=True)
with t.location():
pass
eq_(t.stream.getvalue(), unicode_cap('sc') +
unicode_cap('rc'))
def test_zero_location():
"""Make sure ``location()`` pays attention to 0-valued args."""
t = TestTerminal(stream=StringIO(), force_styling=True)
with t.location(0, 0):
pass
eq_(t.stream.getvalue(), unicode_cap('sc') +
unicode_parm('cup', 0, 0) +
unicode_cap('rc'))
def test_null_fileno():
"""Make sure ``Terminal`` works when ``fileno`` is ``None``.
This simulates piping output to another program.
"""
out = StringIO()
out.fileno = None
t = TestTerminal(stream=out)
eq_(t.save, u'')
def test_mnemonic_colors():
"""Make sure color shortcuts work."""
def color(num):
return unicode_parm('setaf', num)
def on_color(num):
return unicode_parm('setab', num)
# Avoid testing red, blue, yellow, and cyan, since they might someday
# change depending on terminal type.
t = TestTerminal()
eq_(t.white, color(7))
eq_(t.green, color(2)) # Make sure it's different than white.
eq_(t.on_black, on_color(0))
eq_(t.on_green, on_color(2))
eq_(t.bright_black, color(8))
eq_(t.bright_green, color(10))
eq_(t.on_bright_black, on_color(8))
eq_(t.on_bright_green, on_color(10))
def test_callable_numeric_colors():
"""``color(n)`` should return a formatting wrapper."""
t = TestTerminal()
eq_(t.color(5)('smoo'), t.magenta + 'smoo' + t.normal)
eq_(t.color(5)('smoo'), t.color(5) + 'smoo' + t.normal)
eq_(t.on_color(2)('smoo'), t.on_green + 'smoo' + t.normal)
eq_(t.on_color(2)('smoo'), t.on_color(2) + 'smoo' + t.normal)
def test_null_callable_numeric_colors():
"""``color(n)`` should be a no-op on null terminals."""
t = TestTerminal(stream=StringIO())
eq_(t.color(5)('smoo'), 'smoo')
eq_(t.on_color(6)('smoo'), 'smoo')
def test_naked_color_cap():
"""``term.color`` should return a stringlike capability."""
t = TestTerminal()
eq_(t.color + '', t.setaf + '')
def test_number_of_colors_without_tty():
"""``number_of_colors`` should return 0 when there's no tty."""
# Hypothesis: once setupterm() has run and decided the tty supports 256
# colors, it never changes its mind.
raise SkipTest
t = TestTerminal(stream=StringIO())
eq_(t.number_of_colors, 0)
t = TestTerminal(stream=StringIO(), force_styling=True)
eq_(t.number_of_colors, 0)
def test_number_of_colors_with_tty():
"""``number_of_colors`` should work."""
t = TestTerminal()
eq_(t.number_of_colors, 256)
def test_formatting_functions():
"""Test crazy-ass formatting wrappers, both simple and compound."""
t = TestTerminal()
# By now, it should be safe to use sugared attributes. Other tests test those.
eq_(t.bold(u'hi'), t.bold + u'hi' + t.normal)
eq_(t.green('hi'), t.green + u'hi' + t.normal) # Plain strs for Python 2.x
# Test some non-ASCII chars, probably not necessary:
eq_(t.bold_green(u'boö'), t.bold + t.green + u'boö' + t.normal)
eq_(t.bold_underline_green_on_red('boo'),
t.bold + t.underline + t.green + t.on_red + u'boo' + t.normal)
# Don't spell things like this:
eq_(t.on_bright_red_bold_bright_green_underline('meh'),
t.on_bright_red + t.bold + t.bright_green + t.underline + u'meh' + t.normal)
def test_formatting_functions_without_tty():
"""Test crazy-ass formatting wrappers when there's no tty."""
t = TestTerminal(stream=StringIO())
eq_(t.bold(u'hi'), u'hi')
eq_(t.green('hi'), u'hi')
# Test non-ASCII chars, no longer really necessary:
eq_(t.bold_green(u'boö'), u'boö')
eq_(t.bold_underline_green_on_red('loo'), u'loo')
eq_(t.on_bright_red_bold_bright_green_underline('meh'), u'meh')
def test_nice_formatting_errors():
"""Make sure you get nice hints if you misspell a formatting wrapper."""
t = TestTerminal()
try:
t.bold_misspelled('hey')
except TypeError, e:
assert 'probably misspelled' in e.args[0]
try:
t.bold_misspelled(u'hey') # unicode
except TypeError, e:
assert 'probably misspelled' in e.args[0]
try:
t.bold_misspelled(None) # an arbitrary non-string
except TypeError, e:
assert 'probably misspelled' not in e.args[0]
try:
t.bold_misspelled('a', 'b') # >1 string arg
except TypeError, e:
assert 'probably misspelled' not in e.args[0]
def test_init_descriptor_always_initted():
"""We should be able to get a height and width even on no-tty Terminals."""
t = Terminal(stream=StringIO())
eq_(type(t.height), int)
| bsd-3-clause |
aptivate/ckanext-datasetversions | ckanext/datasetversions/tests/logic/action/test_get.py | 1 | 13763 | import ckan.logic as logic
import ckan.tests.helpers as helpers
import ckan.tests.factories as factories
from ckanext.datasetversions.tests.helpers import (
assert_equals,
assert_raises,
assert_true,
TestBase,
)
class TestPackageShowBase(TestBase):
def setup(self):
super(TestPackageShowBase, self).setup()
self.user = factories.User()
self.organization = factories.Organization(
user=self.user)
self.logged_out_context = {'ignore_auth': False,
'auth_user_obj': None}
class TestPackageShowThreeVersions(TestPackageShowBase):
def setup(self):
super(TestPackageShowThreeVersions, self).setup()
self.v2 = helpers.call_action('package_create',
context={'user': self.user['id']},
name='189-ma001-2',
version='2',
owner_org=self.organization['id'])
self.v1 = helpers.call_action('package_create',
context={'user': self.user['id']},
name='189-ma001-1',
version='1',
owner_org=self.organization['id'])
self.v10 = helpers.call_action('package_create',
context={'user': self.user['id']},
name='189-ma001-10',
version='10',
owner_org=self.organization['id'])
helpers.call_action('dataset_version_create',
context={'user': self.user['id']},
id=self.v10['id'],
base_name='189-ma001',
owner_org=self.organization['id'])
helpers.call_action('dataset_version_create',
context={'user': self.user['id']},
id=self.v1['id'],
base_name='189-ma001',
owner_org=self.organization['id'])
helpers.call_action('dataset_version_create',
context={'user': self.user['id']},
id=self.v2['id'],
base_name='189-ma001',
owner_org=self.organization['id'])
self.parent = helpers.call_action('ckan_package_show',
id='189-ma001')
def test_latest_version_displayed_when_showing_parent(self):
dataset = helpers.call_action('package_show',
id=self.parent['id'])
assert_equals(dataset['name'], self.v10['name'])
def test_child_version_displayed_when_showing_child(self):
dataset = helpers.call_action('package_show',
id=self.v2['id'])
assert_equals(dataset['name'], self.v2['name'])
def test_all_versions_displayed_when_showing_parent(self):
dataset = helpers.call_action('package_show',
id=self.parent['id'])
self.assert_version_names(dataset, [
self.v10['name'],
self.v2['name'],
self.v1['name']])
def test_all_versions_displayed_when_showing_child(self):
dataset = helpers.call_action('package_show',
id=self.v2['id'])
self.assert_version_names(dataset, [
self.v10['name'],
self.v2['name'],
self.v1['name'],
])
def test_tracking_summary_returned_for_parent(self):
dataset = helpers.call_action('package_show',
id=self.parent['id'],
include_tracking=True)
assert_true('tracking_summary' in dataset)
def test_relationships_not_included_for_parent(self):
dataset = helpers.call_action('package_show',
id=self.parent['id'])
assert_true('relationships_as_subject' not in dataset)
assert_true('relationships_as_object' not in dataset)
def test_relationships_not_included_for_child(self):
dataset = helpers.call_action('package_show',
id=self.v1['id'])
assert_true('relationships_as_subject' not in dataset)
assert_true('relationships_as_object' not in dataset)
def test_versions_dont_accumulate(self):
[rel_10] = helpers.call_action(
'package_relationships_list',
id=self.v10['id'],
rel='child_of')
assert_equals(rel_10['subject'], '189-ma001-10')
assert_equals(rel_10['type'], 'child_of')
assert_equals(rel_10['object'], '189-ma001')
dataset_dict = helpers.call_action('package_show',
id='189-ma001')
updated_dict = helpers.call_action('package_update',
context={'user': self.user['id']},
**dataset_dict)
[rel_10] = helpers.call_action(
'package_relationships_list',
id=self.v10['id'],
rel='child_of')
assert_equals(rel_10['subject'], '189-ma001-10')
assert_equals(rel_10['type'], 'child_of')
assert_equals(rel_10['object'], '189-ma001')
assert_true('_versions' in updated_dict)
# Versions would appear twice here if they accumulated, or they would
# if the validators didn't complain
self.assert_version_names(updated_dict, [
self.v10['name'],
self.v2['name'],
self.v1['name'],
])
def test_versions_do_not_include_deleted_items(self):
helpers.call_action('package_delete',
id=self.v2['name'])
dataset = helpers.call_action('package_show',
id=self.parent['id'])
assert_true(self.v2['name'] not in
self.get_version_names(dataset))
def test_versions_do_not_include_private_items(self):
v12 = helpers.call_action('package_create',
context={'user': self.user['id']},
name='189-ma001-12',
private=True,
owner_org=self.organization['id'],
version=12)
helpers.call_action('dataset_version_create',
id=v12['id'],
base_name='189-ma001')
dataset = helpers.call_action('package_show',
id=self.parent['id'])
assert_true(v12['name'] not in self.get_version_names(dataset))
def test_versions_empty_if_all_deleted(self):
helpers.call_action('package_delete',
id=self.v1['name'])
helpers.call_action('package_delete',
id=self.v2['name'])
helpers.call_action('package_delete',
id=self.v10['name'])
dataset = helpers.call_action('package_show',
id=self.parent['id'])
assert_equals(dataset['_versions'], [])
def test_latest_url_is_parent(self):
dataset = helpers.call_action('package_show',
id=self.parent['id'])
self.assert_version_urls(dataset, [
self.parent['name'],
self.v2['name'],
self.v1['name']])
def test_can_see_latest_version_when_logged_out(self):
dataset = helpers.call_action('package_show',
self.logged_out_context,
id=self.parent['id'])
assert_equals(dataset['name'], self.v10['name'])
class TestLoggedOutPackageShow(TestPackageShowBase):
def setup(self):
super(TestLoggedOutPackageShow, self).setup()
def test_private_versioned_dataset_not_available_as_latest(self):
v12 = helpers.call_action('package_create',
context={'user': self.user['id']},
name='189-ma001-12',
private=True,
owner_org=self.organization['id'],
version=12)
helpers.call_action('dataset_version_create',
id=v12['id'],
base_name='189-ma001')
dataset = helpers.call_action('package_show',
context=self.logged_out_context,
id='189-ma001')
assert_true(v12['name'] not in self.get_version_names(dataset))
def test_not_authorized_for_private_unversioned_dataset(self):
dataset = helpers.call_action('package_create',
context={'user': self.user['id']},
name='dataset-without-versions',
private=True,
owner_org=self.organization['id'])
assert_raises(logic.NotAuthorized,
helpers.call_action,
'package_show',
self.logged_out_context,
id=dataset['id'])
def test_not_authorized_for_private_versioned_dataset(self):
v12 = helpers.call_action('package_create',
context={'user': self.user['id']},
name='189-ma001-12',
private=True,
owner_org=self.organization['id'],
version=12)
helpers.call_action('dataset_version_create',
id=v12['id'],
base_name='189-ma001')
assert_raises(logic.NotAuthorized,
helpers.call_action,
'package_show',
self.logged_out_context,
id='189-ma001-12')
def test_authorized_for_public_versioned_dataset_when_other_private(self):
v1 = helpers.call_action('package_create',
context={'user': self.user['id']},
name='189-ma001-1',
private=False,
owner_org=self.organization['id'],
version=1)
v2 = helpers.call_action('package_create',
context={'user': self.user['id']},
name='189-ma001-2',
private=True,
owner_org=self.organization['id'],
version=2)
helpers.call_action('dataset_version_create',
id=v1['id'],
base_name='189-ma001')
helpers.call_action('dataset_version_create',
id=v2['id'],
base_name='189-ma001')
dataset = helpers.call_action('package_show',
context=self.logged_out_context,
id='189-ma001-1')
assert_true(v1['name'] in self.get_version_names(dataset))
class TestPackageSearch(TestBase):
def setup(self):
super(TestPackageSearch, self).setup()
self.user = factories.User()
self.organization = factories.Organization(user=self.user)
self.v1 = helpers.call_action('package_create',
name='189-ma001-1',
version='1')
helpers.call_action('dataset_version_create',
id=self.v1['id'],
base_name='189-ma001',
context={'user': self.user['name']},
owner_org=self.organization['id'])
self.parent = helpers.call_action('ckan_package_show',
id='189-ma001')
def test_search_results_do_not_include_parent_version_if_private(self):
results = helpers.call_action('package_search',
q='*:*',
start='0',
rows='20',
sort='metadata_modified desc')
names = [r['name'] for r in results['results']]
assert_true('189-ma001' not in names)
class TestVersionNumber(TestBase):
def test_non_numeric_version_number_treated_as_zero(self):
v1 = helpers.call_action('package_create',
name='189-ma001-1',
version='1')
v2 = helpers.call_action('package_create',
name='189-ma001-2',
version='v2')
helpers.call_action('dataset_version_create',
id=v2['id'],
base_name='189-ma001')
helpers.call_action('dataset_version_create',
id=v1['id'],
base_name='189-ma001')
dataset = helpers.call_action('package_show',
id='189-ma001')
self.assert_version_names(dataset, [v1['name'], v2['name']])
| agpl-3.0 |
klen/marshmallow-peewee | marshmallow_peewee/schema.py | 1 | 2800 | import marshmallow as ma
import peewee as pw
from .convert import ModelConverter
from .fields import Related
class SchemaOpts(ma.SchemaOpts):
def __init__(self, meta, **kwargs):
super(SchemaOpts, self).__init__(meta, **kwargs)
self.model = getattr(meta, 'model', None)
self.dump_only_pk = getattr(meta, 'dump_only_pk', True)
self.string_keys = getattr(meta, 'string_keys', True)
if self.model and not issubclass(self.model, pw.Model):
raise ValueError("`model` must be a subclass of peewee.Model")
self.model_converter = getattr(meta, 'model_converter', ModelConverter)
INHERITANCE_OPTIONS = 'model', 'model_converter', 'dump_only_pk', 'string_keys'
class SchemaMeta(ma.schema.SchemaMeta):
def __new__(mcs, name, bases, attrs):
"""Support inheritance for model and model_converter Meta options."""
if 'Meta' in attrs and bases:
meta = attrs['Meta']
base_meta = getattr(bases[0], 'Meta', None)
for option in INHERITANCE_OPTIONS:
if hasattr(meta, option) or not hasattr(base_meta, option):
continue
setattr(meta, option, getattr(base_meta, option))
return super(SchemaMeta, mcs).__new__(mcs, name, bases, attrs)
@classmethod
def get_declared_fields(mcs, klass, cls_fields, inherited_fields, dict_cls):
declared_fields = dict_cls()
opts = klass.opts
base_fields = super(SchemaMeta, mcs).get_declared_fields(
klass, cls_fields, inherited_fields, dict_cls
)
model = getattr(opts, 'model', None)
if model:
for name, field in base_fields.items():
if isinstance(field, Related) and field.nested is None:
field.init_model(model, name)
converter = opts.model_converter(opts=opts)
declared_fields.update(converter.fields_for_model(model))
declared_fields.update(base_fields)
return declared_fields
class ModelSchema(ma.Schema, metaclass=SchemaMeta):
OPTIONS_CLASS = SchemaOpts
def __init__(self, instance=None, **kwargs):
self.instance = instance
super(ModelSchema, self).__init__(**kwargs)
@ma.post_load
def make_instance(self, data, **kwargs):
"""Build object from data."""
if not self.opts.model:
return data
if self.instance is not None:
for key, value in data.items():
setattr(self.instance, key, value)
return self.instance
return self.opts.model(**data)
def load(self, data, instance=None, *args, **kwargs):
self.instance = instance or self.instance
return super(ModelSchema, self).load(data, *args, **kwargs)
| mit |
algorhythms/LeetCode | 841 Keys and Rooms.py | 1 | 1758 | #!/usr/bin/python3
"""
There are N rooms and you start in room 0. Each room has a distinct number in
0, 1, 2, ..., N-1, and each room may have some keys to access the next room.
Formally, each room i has a list of keys rooms[i], and each key rooms[i][j] is
an integer in [0, 1, ..., N-1] where N = rooms.length. A key rooms[i][j] = v
opens the room with number v.
Initially, all the rooms start locked (except for room 0).
You can walk back and forth between rooms freely.
Return true if and only if you can enter every room.
Example 1:
Input: [[1],[2],[3],[]]
Output: true
Explanation:
We start in room 0, and pick up key 1.
We then go to room 1, and pick up key 2.
We then go to room 2, and pick up key 3.
We then go to room 3. Since we were able to go to every room, we return true.
Example 2:
Input: [[1,3],[3,0,1],[2],[0]]
Output: false
Explanation: We can't enter the room with number 2.
Note:
1 <= rooms.length <= 1000
0 <= rooms[i].length <= 1000
The number of keys in all rooms combined is at most 3000.
"""
from typing import List
class Solution:
def canVisitAllRooms(self, G: List[List[int]]) -> bool:
"""
starting from 0
need a queue to keep track of processing nodes? Implicitly handle by dfs
stacks
"""
n = len(G)
visited = [0 for _ in range(n)] # 0 locked, 1 visited
self.dfs(G, 0, visited)
return all(e == 1 for e in visited)
def dfs(self, G, u, visited):
visited[u] = 1
for nbr in G[u]:
if not visited[nbr]:
self.dfs(G, nbr, visited)
if __name__ == "__main__":
assert Solution().canVisitAllRooms([[1],[2],[3],[]]) == True
assert Solution().canVisitAllRooms([[1,3],[3,0,1],[2],[0]]) == False
| mit |
byterom/android_external_chromium_org | cc/PRESUBMIT.py | 26 | 11222 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for cc.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools.
"""
import re
import string
CC_SOURCE_FILES=(r'^cc/.*\.(cc|h)$',)
def CheckChangeLintsClean(input_api, output_api):
input_api.cpplint._cpplint_state.ResetErrorCounts() # reset global state
source_filter = lambda x: input_api.FilterSourceFile(
x, white_list=CC_SOURCE_FILES, black_list=None)
files = [f.AbsoluteLocalPath() for f in
input_api.AffectedSourceFiles(source_filter)]
level = 1 # strict, but just warn
for file_name in files:
input_api.cpplint.ProcessFile(file_name, level)
if not input_api.cpplint._cpplint_state.error_count:
return []
return [output_api.PresubmitPromptWarning(
'Changelist failed cpplint.py check.')]
def CheckAsserts(input_api, output_api, white_list=CC_SOURCE_FILES, black_list=None):
black_list = tuple(black_list or input_api.DEFAULT_BLACK_LIST)
source_file_filter = lambda x: input_api.FilterSourceFile(x, white_list, black_list)
assert_files = []
notreached_files = []
for f in input_api.AffectedSourceFiles(source_file_filter):
contents = input_api.ReadFile(f, 'rb')
# WebKit ASSERT() is not allowed.
if re.search(r"\bASSERT\(", contents):
assert_files.append(f.LocalPath())
# WebKit ASSERT_NOT_REACHED() is not allowed.
if re.search(r"ASSERT_NOT_REACHED\(", contents):
notreached_files.append(f.LocalPath())
if assert_files:
return [output_api.PresubmitError(
'These files use ASSERT instead of using DCHECK:',
items=assert_files)]
if notreached_files:
return [output_api.PresubmitError(
'These files use ASSERT_NOT_REACHED instead of using NOTREACHED:',
items=notreached_files)]
return []
def CheckStdAbs(input_api, output_api,
white_list=CC_SOURCE_FILES, black_list=None):
black_list = tuple(black_list or input_api.DEFAULT_BLACK_LIST)
source_file_filter = lambda x: input_api.FilterSourceFile(x,
white_list,
black_list)
using_std_abs_files = []
found_fabs_files = []
missing_std_prefix_files = []
for f in input_api.AffectedSourceFiles(source_file_filter):
contents = input_api.ReadFile(f, 'rb')
if re.search(r"using std::f?abs;", contents):
using_std_abs_files.append(f.LocalPath())
if re.search(r"\bfabsf?\(", contents):
found_fabs_files.append(f.LocalPath());
no_std_prefix = r"(?<!std::)"
# Matches occurrences of abs/absf/fabs/fabsf without a "std::" prefix.
abs_without_prefix = r"%s(\babsf?\()" % no_std_prefix
fabs_without_prefix = r"%s(\bfabsf?\()" % no_std_prefix
# Skips matching any lines that have "// NOLINT".
no_nolint = r"(?![^\n]*//\s+NOLINT)"
expression = re.compile("(%s|%s)%s" %
(abs_without_prefix, fabs_without_prefix, no_nolint))
if expression.search(contents):
missing_std_prefix_files.append(f.LocalPath())
result = []
if using_std_abs_files:
result.append(output_api.PresubmitError(
'These files have "using std::abs" which is not permitted.',
items=using_std_abs_files))
if found_fabs_files:
result.append(output_api.PresubmitError(
'std::abs() should be used instead of std::fabs() for consistency.',
items=found_fabs_files))
if missing_std_prefix_files:
result.append(output_api.PresubmitError(
'These files use abs(), absf(), fabs(), or fabsf() without qualifying '
'the std namespace. Please use std::abs() in all places.',
items=missing_std_prefix_files))
return result
def CheckPassByValue(input_api,
output_api,
white_list=CC_SOURCE_FILES,
black_list=None):
black_list = tuple(black_list or input_api.DEFAULT_BLACK_LIST)
source_file_filter = lambda x: input_api.FilterSourceFile(x,
white_list,
black_list)
local_errors = []
# Well-defined simple classes containing only <= 4 ints, or <= 2 floats.
pass_by_value_types = ['base::Time',
'base::TimeTicks',
]
for f in input_api.AffectedSourceFiles(source_file_filter):
contents = input_api.ReadFile(f, 'rb')
match = re.search(
r'\bconst +' + '(?P<type>(%s))&' %
string.join(pass_by_value_types, '|'),
contents)
if match:
local_errors.append(output_api.PresubmitError(
'%s passes %s by const ref instead of by value.' %
(f.LocalPath(), match.group('type'))))
return local_errors
def CheckTodos(input_api, output_api):
errors = []
source_file_filter = lambda x: x
for f in input_api.AffectedSourceFiles(source_file_filter):
contents = input_api.ReadFile(f, 'rb')
if ('FIX'+'ME') in contents:
errors.append(f.LocalPath())
if errors:
return [output_api.PresubmitError(
'All TODO comments should be of the form TODO(name). ' +
'Use TODO instead of FIX' + 'ME',
items=errors)]
return []
def FindUnquotedQuote(contents, pos):
match = re.search(r"(?<!\\)(?P<quote>\")", contents[pos:])
return -1 if not match else match.start("quote") + pos
def FindUselessIfdefs(input_api, output_api):
errors = []
source_file_filter = lambda x: x
for f in input_api.AffectedSourceFiles(source_file_filter):
contents = input_api.ReadFile(f, 'rb')
if re.search(r'#if\s*0\s', contents):
errors.append(f.LocalPath())
if errors:
return [output_api.PresubmitError(
'Don\'t use #if '+'0; just delete the code.',
items=errors)]
return []
def FindNamespaceInBlock(pos, namespace, contents, whitelist=[]):
open_brace = -1
close_brace = -1
quote = -1
name = -1
brace_count = 1
quote_count = 0
while pos < len(contents) and brace_count > 0:
if open_brace < pos: open_brace = contents.find("{", pos)
if close_brace < pos: close_brace = contents.find("}", pos)
if quote < pos: quote = FindUnquotedQuote(contents, pos)
if name < pos: name = contents.find(("%s::" % namespace), pos)
if name < 0:
return False # The namespace is not used at all.
if open_brace < 0:
open_brace = len(contents)
if close_brace < 0:
close_brace = len(contents)
if quote < 0:
quote = len(contents)
next = min(open_brace, min(close_brace, min(quote, name)))
if next == open_brace:
brace_count += 1
elif next == close_brace:
brace_count -= 1
elif next == quote:
quote_count = 0 if quote_count else 1
elif next == name and not quote_count:
in_whitelist = False
for w in whitelist:
if re.match(w, contents[next:]):
in_whitelist = True
break
if not in_whitelist:
return True
pos = next + 1
return False
# Checks for the use of cc:: within the cc namespace, which is usually
# redundant.
def CheckNamespace(input_api, output_api):
errors = []
source_file_filter = lambda x: x
for f in input_api.AffectedSourceFiles(source_file_filter):
contents = input_api.ReadFile(f, 'rb')
match = re.search(r'namespace\s*cc\s*{', contents)
if match:
whitelist = [
r"cc::remove_if\b",
]
if FindNamespaceInBlock(match.end(), 'cc', contents, whitelist=whitelist):
errors.append(f.LocalPath())
if errors:
return [output_api.PresubmitError(
'Do not use cc:: inside of the cc namespace.',
items=errors)]
return []
def CheckForUseOfWrongClock(input_api,
output_api,
white_list=CC_SOURCE_FILES,
black_list=None):
"""Make sure new lines of code don't use a clock susceptible to skew."""
black_list = tuple(black_list or input_api.DEFAULT_BLACK_LIST)
source_file_filter = lambda x: input_api.FilterSourceFile(x,
white_list,
black_list)
# Regular expression that should detect any explicit references to the
# base::Time type (or base::Clock/DefaultClock), whether in using decls,
# typedefs, or to call static methods.
base_time_type_pattern = r'(^|\W)base::(Time|Clock|DefaultClock)(\W|$)'
# Regular expression that should detect references to the base::Time class
# members, such as a call to base::Time::Now.
base_time_member_pattern = r'(^|\W)(Time|Clock|DefaultClock)::'
# Regular expression to detect "using base::Time" declarations. We want to
# prevent these from triggerring a warning. For example, it's perfectly
# reasonable for code to be written like this:
#
# using base::Time;
# ...
# int64 foo_us = foo_s * Time::kMicrosecondsPerSecond;
using_base_time_decl_pattern = r'^\s*using\s+(::)?base::Time\s*;'
# Regular expression to detect references to the kXXX constants in the
# base::Time class. We want to prevent these from triggerring a warning.
base_time_konstant_pattern = r'(^|\W)Time::k\w+'
problem_re = input_api.re.compile(
r'(' + base_time_type_pattern + r')|(' + base_time_member_pattern + r')')
exception_re = input_api.re.compile(
r'(' + using_base_time_decl_pattern + r')|(' +
base_time_konstant_pattern + r')')
problems = []
for f in input_api.AffectedSourceFiles(source_file_filter):
for line_number, line in f.ChangedContents():
if problem_re.search(line):
if not exception_re.search(line):
problems.append(
' %s:%d\n %s' % (f.LocalPath(), line_number, line.strip()))
if problems:
return [output_api.PresubmitPromptOrNotify(
'You added one or more references to the base::Time class and/or one\n'
'of its member functions (or base::Clock/DefaultClock). In cc code,\n'
'it is most certainly incorrect! Instead use base::TimeTicks.\n\n'
'\n'.join(problems))]
else:
return []
def CheckChangeOnUpload(input_api, output_api):
results = []
results += CheckAsserts(input_api, output_api)
results += CheckStdAbs(input_api, output_api)
results += CheckPassByValue(input_api, output_api)
results += CheckChangeLintsClean(input_api, output_api)
results += CheckTodos(input_api, output_api)
results += CheckNamespace(input_api, output_api)
results += CheckForUseOfWrongClock(input_api, output_api)
results += FindUselessIfdefs(input_api, output_api)
results += input_api.canned_checks.CheckPatchFormatted(input_api, output_api)
return results
def GetPreferredTryMasters(project, change):
return {
'tryserver.blink': {
'linux_blink_rel': set(['defaulttests']),
},
'tryserver.chromium.gpu': {
'linux_gpu': set(['defaulttests']),
'mac_gpu': set(['defaulttests']),
'win_gpu': set(['defaulttests']),
},
}
| bsd-3-clause |
GirlsCodePy/girlscode-coursebuilder | modules/oeditor/oeditor_tests.py | 3 | 7414 | # Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functional tests for the oeditor module."""
__author__ = [
'John Cox (johncox@google.com)',
]
import re
import time
from common import crypto
from common import users
from controllers import sites
from models import config
from models import courses
from models import transforms
from modules.oeditor import oeditor
from tests.functional import actions
from google.appengine.api import namespace_manager
class ObjectEditorTest(actions.TestBase):
def tearDown(self):
config.Registry.test_overrides = {}
super(ObjectEditorTest, self).tearDown()
def get_oeditor_dom(self):
actions.login('test@example.com', is_admin=True)
response = self.get(
'/admin?action=config_edit&name=gcb_admin_user_emails')
return self.parse_html_string(response.body)
def get_script_tag_by_src(self, src):
return self.get_oeditor_dom().find('.//script[@src="%s"]' % src)
def test_get_drive_tag_parent_frame_script_src_set(self):
self.assertIsNotNone(self.get_script_tag_by_src(
'/modules/core_tags/_static/js/drive_tag_parent_frame.js'))
def test_get_drive_tag_script_manager_script_src_set(
self):
self.assertIsNotNone(self.get_script_tag_by_src(
'/modules/core_tags/_static/js/drive_tag_script_manager.js'))
class ButtonbarCssHandlerTests(actions.TestBase):
def _get(self):
return self.get('/modules/oeditor/buttonbar.css')
def test_response_is_cacheable(self):
self.assertEqual(
'max-age=600, public', self._get().headers['Cache-Control'])
def test_content_type_is_css(self):
self.assertEqual('text/css', self._get().headers['Content-Type'])
class EditorPrefsTests(actions.TestBase):
COURSE_NAME = 'test_editor_state'
EDITOR_STATE = {'objectives': {'editorType': 'html'}}
def setUp(self):
super(EditorPrefsTests, self).setUp()
self.base = '/' + self.COURSE_NAME
app_context = actions.simple_add_course(
self.COURSE_NAME, 'admin@example.com', 'Test Editor State')
self.old_namespace = namespace_manager.get_namespace()
namespace_manager.set_namespace('ns_%s' % self.COURSE_NAME)
self.course = courses.Course(None, app_context)
self.unit = self.course.add_unit()
self.lesson = self.course.add_lesson(self.unit)
self.course.save()
self.location = '/%s/rest/course/lesson' % self.COURSE_NAME
self.key = self.lesson.lesson_id
def tearDown(self):
del sites.Registry.test_overrides[sites.GCB_COURSES_CONFIG.name]
namespace_manager.set_namespace(self.old_namespace)
super(EditorPrefsTests, self).tearDown()
def _post(self, xsrf_token=None, payload=None):
request = {}
if xsrf_token is None:
xsrf_token = crypto.XsrfTokenManager.create_xsrf_token(
oeditor.EditorPrefsRestHandler.XSRF_TOKEN)
request['xsrf_token'] = xsrf_token
if payload is None:
payload = {
'location': self.location,
'key': self.key,
'state': self.EDITOR_STATE
}
request['payload'] = transforms.dumps(payload)
data = {'request': transforms.dumps(request)}
return self.post('oeditor/rest/editor_prefs', data, expect_errors=True)
def test_safe_key(self):
def transform_function(pii_str):
return 'tr(%s)' % pii_str
key_name = oeditor.EditorPrefsDao.create_key_name(
321, self.location, self.key)
dto = oeditor.EditorPrefsDto(key_name, {})
oeditor.EditorPrefsDao.save(dto)
entity = oeditor.EditorPrefsEntity.get_by_key_name(key_name)
safe_key = oeditor.EditorPrefsEntity.safe_key(
entity.key(), transform_function)
self.assertEqual(
'tr(321):/%s/rest/course/lesson:%s' % (
self.COURSE_NAME, self.lesson.lesson_id),
safe_key.name())
def test_rest_handler_requires_user_in_session(self):
response = self._post()
self.assertEquals(401, response.status_int)
def test_rest_handler_requires_course_admin(self):
actions.login('user@example.com', is_admin=False)
response = self._post()
self.assertEquals(200, response.status_int)
body = transforms.loads(response.body)
self.assertEquals(401, body['status'])
def test_rest_handler_requires_xsrf_token(self):
response = self._post(xsrf_token='bad_token')
self.assertEquals(200, response.status_int)
body = transforms.loads(response.body)
self.assertEquals(403, body['status'])
def test_rest_handler_saves_state(self):
actions.login('user@example.com', is_admin=True)
response = self._post()
self.assertEquals(200, response.status_int)
body = transforms.loads(response.body)
self.assertEquals(200, body['status'])
user = users.get_current_user()
key_name = oeditor.EditorPrefsDao.create_key_name(
user.user_id(), self.location, self.key)
dto = oeditor.EditorPrefsDao.load(key_name)
self.assertEquals(self.EDITOR_STATE, dto.dict)
def test_oeditor_returns_state(self):
actions.login('user@example.com', is_admin=True)
xsrf_timestamp = long(time.time())
xsrf_token = crypto.XsrfTokenManager.create_xsrf_token(
oeditor.EditorPrefsRestHandler.XSRF_TOKEN)
self._post(xsrf_token=xsrf_token)
response = self.get('dashboard?action=edit_lesson&key=%s' % (
self.lesson.lesson_id))
expected = {
'xsrf_token': xsrf_token,
'location': self.location,
'key': str(self.key),
'prefs': self.EDITOR_STATE
}
expected = transforms.loads(transforms.dumps(expected))
match = re.search(
r'cb_global.editor_prefs = JSON.parse\((.*)\);', response.body)
actual = match.group(1)
actual = transforms.loads('"%s"' % actual)
actual = transforms.loads(actual[1:-1])
# If the time moves up to the next second between the moment we
# generate our expected XSRF token and the response to the GET call is
# made, the XSRF tokens will mismatch, and the test will fail. Allow
# a tolerance of up to 5 sceonds to allow for that (and thus suppress
# test flakes.
tolerance = 0
while actual['xsrf_token'] != expected['xsrf_token'] and tolerance < 5:
tolerance += 1
expected['xsrf_token'] = crypto.XsrfTokenManager._create_token(
oeditor.EditorPrefsRestHandler.XSRF_TOKEN,
xsrf_timestamp + tolerance)
self.assertEquals(expected, actual)
| gpl-3.0 |
kingvuplus/nn-gui | lib/python/Plugins/SystemPlugins/Hotplug/plugin.py | 54 | 1931 | from Plugins.Plugin import PluginDescriptor
from Components.Harddisk import harddiskmanager
from twisted.internet.protocol import Protocol, Factory
hotplugNotifier = []
def processHotplugData(self, v):
print "hotplug:", v
action = v.get("ACTION")
device = v.get("DEVPATH")
physdevpath = v.get("PHYSDEVPATH")
media_state = v.get("X_E2_MEDIA_STATUS")
dev = device.split('/')[-1]
if action == "add":
error, blacklisted, removable, is_cdrom, partitions, medium_found = harddiskmanager.addHotplugPartition(dev, physdevpath)
elif action == "remove":
harddiskmanager.removeHotplugPartition(dev)
elif media_state is not None:
if media_state == '1':
harddiskmanager.removeHotplugPartition(dev)
harddiskmanager.addHotplugPartition(dev, physdevpath)
elif media_state == '0':
harddiskmanager.removeHotplugPartition(dev)
for callback in hotplugNotifier:
try:
callback(dev, action or media_state)
except AttributeError:
hotplugNotifier.remove(callback)
class Hotplug(Protocol):
def connectionMade(self):
print "HOTPLUG connection!"
self.received = ""
def dataReceived(self, data):
print "hotplug:", data
self.received += data
print "complete", self.received
def connectionLost(self, reason):
print "HOTPLUG connection lost!"
data = self.received.split('\0')[:-1]
v = {}
for x in data:
i = x.find('=')
var, val = x[:i], x[i+1:]
v[var] = val
processHotplugData(self, v)
def autostart(reason, **kwargs):
if reason == 0:
print "starting hotplug handler"
from twisted.internet import reactor
import os
try:
os.remove("/tmp/hotplug.socket")
except OSError:
pass
factory = Factory()
factory.protocol = Hotplug
reactor.listenUNIX("/tmp/hotplug.socket", factory)
def Plugins(**kwargs):
return PluginDescriptor(name = "Hotplug", description = "listens to hotplug events", where = PluginDescriptor.WHERE_AUTOSTART, needsRestart = True, fnc = autostart)
| gpl-2.0 |
rhelmer/inquisitor | docs/conf.py | 50 | 7243 | # -*- coding: utf-8 -*-
#
# playdoh documentation build configuration file, created by
# sphinx-quickstart on Tue Jan 4 15:11:09 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'a playdoh-based project'
copyright = u'2011, the authors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'playdohdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'playdoh.tex', u'playdoh Documentation',
u'Mozilla', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'a-playdoh-app', u"a-playdoh-app's Documentation",
[u'the authors'], 1)
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| bsd-3-clause |
akshay-bhardwaj/python-webdriver | test/selenium/webdriver/common/appcache_tests.py | 24 | 1387 | #Copyright 2007-2009 WebDriver committers
#Copyright 2007-2009 Google Inc.
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
from selenium.webdriver.common.html5.application_cache import ApplicationCache
import unittest
class AppCacheTests(unittest.TestCase):
def testWeCanGetTheStatusOfTheAppCache(self):
self._loadPage('html5Page')
self.driver.implicitly_wait(2)
app_cache = self.driver.application_cache
status = app_cache.status
while status == ApplicationCache.DOWNLOADING:
status = app_cache.status
self.assertEquals(ApplicationCache.UNCACHED, app_cache.status)
def _pageURL(self, name):
return "http://localhost:%d/%s.html" % (self.webserver.port, name)
def _loadSimplePage(self):
self._loadPage("simpleTest")
def _loadPage(self, name):
self.driver.get(self._pageURL(name))
| gpl-3.0 |
mitsuhiko/django | django/views/decorators/clickjacking.py | 550 | 1759 | from functools import wraps
from django.utils.decorators import available_attrs
def xframe_options_deny(view_func):
"""
Modifies a view function so its response has the X-Frame-Options HTTP
header set to 'DENY' as long as the response doesn't already have that
header set.
e.g.
@xframe_options_deny
def some_view(request):
...
"""
def wrapped_view(*args, **kwargs):
resp = view_func(*args, **kwargs)
if resp.get('X-Frame-Options', None) is None:
resp['X-Frame-Options'] = 'DENY'
return resp
return wraps(view_func, assigned=available_attrs(view_func))(wrapped_view)
def xframe_options_sameorigin(view_func):
"""
Modifies a view function so its response has the X-Frame-Options HTTP
header set to 'SAMEORIGIN' as long as the response doesn't already have
that header set.
e.g.
@xframe_options_sameorigin
def some_view(request):
...
"""
def wrapped_view(*args, **kwargs):
resp = view_func(*args, **kwargs)
if resp.get('X-Frame-Options', None) is None:
resp['X-Frame-Options'] = 'SAMEORIGIN'
return resp
return wraps(view_func, assigned=available_attrs(view_func))(wrapped_view)
def xframe_options_exempt(view_func):
"""
Modifies a view function by setting a response variable that instructs
XFrameOptionsMiddleware to NOT set the X-Frame-Options HTTP header.
e.g.
@xframe_options_exempt
def some_view(request):
...
"""
def wrapped_view(*args, **kwargs):
resp = view_func(*args, **kwargs)
resp.xframe_options_exempt = True
return resp
return wraps(view_func, assigned=available_attrs(view_func))(wrapped_view)
| bsd-3-clause |
Kongsea/tensorflow | tensorflow/contrib/predictor/contrib_estimator_predictor.py | 58 | 3152 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A `Predictor constructed from a `tf.contrib.learn.Estimator`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.learn.python.learn.utils import saved_model_export_utils
from tensorflow.contrib.predictor import predictor
from tensorflow.python.framework import ops
from tensorflow.python.training import monitored_session
from tensorflow.python.training import saver
class ContribEstimatorPredictor(predictor.Predictor):
"""A `Predictor constructed from a `tf.contrib.learn.Estimator`."""
def __init__(self,
estimator,
prediction_input_fn,
input_alternative_key=None,
output_alternative_key=None,
graph=None):
"""Initialize a `ContribEstimatorPredictor`.
Args:
estimator: an instance of `tf.contrib.learn.Estimator`.
prediction_input_fn: a function that takes no arguments and returns an
instance of `InputFnOps`.
input_alternative_key: Optional. Specify the input alternative used for
prediction.
output_alternative_key: Specify the output alternative used for
prediction. Not needed for single-headed models but required for
multi-headed models.
graph: Optional. The Tensorflow `graph` in which prediction should be
done.
"""
self._graph = graph or ops.Graph()
with self._graph.as_default():
input_fn_ops = prediction_input_fn()
# pylint: disable=protected-access
model_fn_ops = estimator._get_predict_ops(input_fn_ops.features)
# pylint: enable=protected-access
checkpoint_path = saver.latest_checkpoint(estimator.model_dir)
self._session = monitored_session.MonitoredSession(
session_creator=monitored_session.ChiefSessionCreator(
checkpoint_filename_with_path=checkpoint_path))
input_alternative_key = (
input_alternative_key or
saved_model_export_utils.DEFAULT_INPUT_ALTERNATIVE_KEY)
input_alternatives, _ = saved_model_export_utils.get_input_alternatives(
input_fn_ops)
self._feed_tensors = input_alternatives[input_alternative_key]
(output_alternatives,
output_alternative_key) = saved_model_export_utils.get_output_alternatives(
model_fn_ops, output_alternative_key)
_, fetch_tensors = output_alternatives[output_alternative_key]
self._fetch_tensors = fetch_tensors
| apache-2.0 |
souzainf3/namebench | nb_third_party/jinja2/bccache.py | 284 | 9994 | # -*- coding: utf-8 -*-
"""
jinja2.bccache
~~~~~~~~~~~~~~
This module implements the bytecode cache system Jinja is optionally
using. This is useful if you have very complex template situations and
the compiliation of all those templates slow down your application too
much.
Situations where this is useful are often forking web applications that
are initialized on the first request.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD.
"""
from os import path, listdir
import marshal
import tempfile
import cPickle as pickle
import fnmatch
from cStringIO import StringIO
try:
from hashlib import sha1
except ImportError:
from sha import new as sha1
from jinja2.utils import open_if_exists
bc_version = 1
bc_magic = 'j2'.encode('ascii') + pickle.dumps(bc_version, 2)
class Bucket(object):
"""Buckets are used to store the bytecode for one template. It's created
and initialized by the bytecode cache and passed to the loading functions.
The buckets get an internal checksum from the cache assigned and use this
to automatically reject outdated cache material. Individual bytecode
cache subclasses don't have to care about cache invalidation.
"""
def __init__(self, environment, key, checksum):
self.environment = environment
self.key = key
self.checksum = checksum
self.reset()
def reset(self):
"""Resets the bucket (unloads the bytecode)."""
self.code = None
def load_bytecode(self, f):
"""Loads bytecode from a file or file like object."""
# make sure the magic header is correct
magic = f.read(len(bc_magic))
if magic != bc_magic:
self.reset()
return
# the source code of the file changed, we need to reload
checksum = pickle.load(f)
if self.checksum != checksum:
self.reset()
return
# now load the code. Because marshal is not able to load
# from arbitrary streams we have to work around that
if isinstance(f, file):
self.code = marshal.load(f)
else:
self.code = marshal.loads(f.read())
def write_bytecode(self, f):
"""Dump the bytecode into the file or file like object passed."""
if self.code is None:
raise TypeError('can\'t write empty bucket')
f.write(bc_magic)
pickle.dump(self.checksum, f, 2)
if isinstance(f, file):
marshal.dump(self.code, f)
else:
f.write(marshal.dumps(self.code))
def bytecode_from_string(self, string):
"""Load bytecode from a string."""
self.load_bytecode(StringIO(string))
def bytecode_to_string(self):
"""Return the bytecode as string."""
out = StringIO()
self.write_bytecode(out)
return out.getvalue()
class BytecodeCache(object):
"""To implement your own bytecode cache you have to subclass this class
and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of
these methods are passed a :class:`~jinja2.bccache.Bucket`.
A very basic bytecode cache that saves the bytecode on the file system::
from os import path
class MyCache(BytecodeCache):
def __init__(self, directory):
self.directory = directory
def load_bytecode(self, bucket):
filename = path.join(self.directory, bucket.key)
if path.exists(filename):
with open(filename, 'rb') as f:
bucket.load_bytecode(f)
def dump_bytecode(self, bucket):
filename = path.join(self.directory, bucket.key)
with open(filename, 'wb') as f:
bucket.write_bytecode(f)
A more advanced version of a filesystem based bytecode cache is part of
Jinja2.
"""
def load_bytecode(self, bucket):
"""Subclasses have to override this method to load bytecode into a
bucket. If they are not able to find code in the cache for the
bucket, it must not do anything.
"""
raise NotImplementedError()
def dump_bytecode(self, bucket):
"""Subclasses have to override this method to write the bytecode
from a bucket back to the cache. If it unable to do so it must not
fail silently but raise an exception.
"""
raise NotImplementedError()
def clear(self):
"""Clears the cache. This method is not used by Jinja2 but should be
implemented to allow applications to clear the bytecode cache used
by a particular environment.
"""
def get_cache_key(self, name, filename=None):
"""Returns the unique hash key for this template name."""
hash = sha1(name.encode('utf-8'))
if filename is not None:
if isinstance(filename, unicode):
filename = filename.encode('utf-8')
hash.update('|' + filename)
return hash.hexdigest()
def get_source_checksum(self, source):
"""Returns a checksum for the source."""
return sha1(source.encode('utf-8')).hexdigest()
def get_bucket(self, environment, name, filename, source):
"""Return a cache bucket for the given template. All arguments are
mandatory but filename may be `None`.
"""
key = self.get_cache_key(name, filename)
checksum = self.get_source_checksum(source)
bucket = Bucket(environment, key, checksum)
self.load_bytecode(bucket)
return bucket
def set_bucket(self, bucket):
"""Put the bucket into the cache."""
self.dump_bytecode(bucket)
class FileSystemBytecodeCache(BytecodeCache):
"""A bytecode cache that stores bytecode on the filesystem. It accepts
two arguments: The directory where the cache items are stored and a
pattern string that is used to build the filename.
If no directory is specified the system temporary items folder is used.
The pattern can be used to have multiple separate caches operate on the
same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s``
is replaced with the cache key.
>>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache')
This bytecode cache supports clearing of the cache using the clear method.
"""
def __init__(self, directory=None, pattern='__jinja2_%s.cache'):
if directory is None:
directory = tempfile.gettempdir()
self.directory = directory
self.pattern = pattern
def _get_cache_filename(self, bucket):
return path.join(self.directory, self.pattern % bucket.key)
def load_bytecode(self, bucket):
f = open_if_exists(self._get_cache_filename(bucket), 'rb')
if f is not None:
try:
bucket.load_bytecode(f)
finally:
f.close()
def dump_bytecode(self, bucket):
f = open(self._get_cache_filename(bucket), 'wb')
try:
bucket.write_bytecode(f)
finally:
f.close()
def clear(self):
# imported lazily here because google app-engine doesn't support
# write access on the file system and the function does not exist
# normally.
from os import remove
files = fnmatch.filter(listdir(self.directory), self.pattern % '*')
for filename in files:
try:
remove(path.join(self.directory, filename))
except OSError:
pass
class MemcachedBytecodeCache(BytecodeCache):
"""This class implements a bytecode cache that uses a memcache cache for
storing the information. It does not enforce a specific memcache library
(tummy's memcache or cmemcache) but will accept any class that provides
the minimal interface required.
Libraries compatible with this class:
- `werkzeug <http://werkzeug.pocoo.org/>`_.contrib.cache
- `python-memcached <http://www.tummy.com/Community/software/python-memcached/>`_
- `cmemcache <http://gijsbert.org/cmemcache/>`_
(Unfortunately the django cache interface is not compatible because it
does not support storing binary data, only unicode. You can however pass
the underlying cache client to the bytecode cache which is available
as `django.core.cache.cache._client`.)
The minimal interface for the client passed to the constructor is this:
.. class:: MinimalClientInterface
.. method:: set(key, value[, timeout])
Stores the bytecode in the cache. `value` is a string and
`timeout` the timeout of the key. If timeout is not provided
a default timeout or no timeout should be assumed, if it's
provided it's an integer with the number of seconds the cache
item should exist.
.. method:: get(key)
Returns the value for the cache key. If the item does not
exist in the cache the return value must be `None`.
The other arguments to the constructor are the prefix for all keys that
is added before the actual cache key and the timeout for the bytecode in
the cache system. We recommend a high (or no) timeout.
This bytecode cache does not support clearing of used items in the cache.
The clear method is a no-operation function.
"""
def __init__(self, client, prefix='jinja2/bytecode/', timeout=None):
self.client = client
self.prefix = prefix
self.timeout = timeout
def load_bytecode(self, bucket):
code = self.client.get(self.prefix + bucket.key)
if code is not None:
bucket.bytecode_from_string(code)
def dump_bytecode(self, bucket):
args = (self.prefix + bucket.key, bucket.bytecode_to_string())
if self.timeout is not None:
args += (self.timeout,)
self.client.set(*args)
| apache-2.0 |
Edmonton-Public-Library/centennial | util/email/email_template.py | 1 | 1374 | import sys
from Crypto.Cipher import AES
import urllib
def getRegistrationNotification(name, baseUrl, email, creationTime, MIME):
activationKey = aesEncrypt(email + "=" + creationTime)
keyValue = urllib.urlencode({"key" : activationKey})
urlFormatter = "_formatUrlAs%s" % (MIME)
url = getattr(sys.modules[__name__], urlFormatter)("%s/account/activate?%s" % (baseUrl, keyValue))
textFormatter = "_formatTextAs%s" % (MIME)
return getattr(sys.modules[__name__], textFormatter)(_REGISTRATION_NOTIFICATION % (name, url))
def _formatUrlAshtml(raw):
return "<a href=\"%s\">%s</a>" % (raw,raw)
def _formatTextAshtml(raw):
return """\
<html>
<head></head>
<body>
<p>Greetings from EPL<br>
%s
</p>
</body>
</html>
""" % (raw)
def aesEncrypt(msg):
diff = len(msg) % 16
padd = " "*(16-diff)
msg += padd
cipher = AES.new(_ENCRYPTION_KEY)
return cipher.encrypt(msg)
def aesDecrypt(msg):
cipher = AES.new(_ENCRYPTION_KEY)
return cipher.decrypt(msg)
_ENCRYPTION_KEY = b'Sixteen byte key'
_REGISTRATION_NOTIFICATION = """Thank you %s for registering with EPL TimeMap. To activate your account, you will need to verify your account by navigating to the following URL: %s"""
PASSWORD_RESET_EMAIL = """To reset your password, please click on the following link: %s"""
| mit |
JimCallahanOrlando/DistrictBuilder | django/publicmapping/redistricting/templatetags/redistricting_extras.py | 1 | 5091 | """
Extra tags and templates used in the django
templating system for the redistricting app.
This file is part of The Public Mapping Project
https://github.com/PublicMapping/
License:
Copyright 2010 Micah Altman, Michael McDonald
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Author:
Andrew Jennings, David Zwarg
"""
from django import template
from django.template.defaultfilters import floatformat
from django.utils.translation import ugettext as _
register = template.Library()
@register.filter
def spellnumber(value):
"""
This filter converts a number into its spelled-out equivalent.
Note: not all numbers are implemented. The return value for a
number greater than twenty will be the non-spelled-out version.
Parameters:
value - A number value
"""
try:
return [_("zero"), _("one"), _("two"), _("three"), _("four"), _("five"), _("six"), _("seven"), _("eight"), _("nine"),
_("ten"), _("eleven"), _("twelve"), _("thirteen"), _("fourteen"), _("fifteen"), _("sixteen"),
_("seventeen"), _("eighteen"), _("nineteen"), _("twenty")][value]
except:
return value
@register.filter
def dictsort_ignorecase(value, arg):
"""
Takes a list of dicts, returns that list sorted by the property given in
the argument. Sort is case insensitive.
"""
def lower_if_string(object):
try:
return object.lower()
except AttributeError:
return object
var_resolve = template.Variable(arg).resolve
decorated = [(lower_if_string(var_resolve(item)), item) for item in value]
decorated.sort()
return [item[1] for item in decorated]
@register.filter
def count_true_values(value, key):
"""
This filter accepts a list of dicts and returns the count of "True"
values in the list. The "key" value is the key in the dict to check
for True
"""
try:
return str(len(filter(lambda x: x[key], value)))
except:
return ''
@register.filter
def avg_report_column(districtscores, row):
"""
This filter extracts all scores in a set of districtscores that are
related to the row, by using 'avg_key', and returns an average of the score.
Parameters:
districtscores - A list of districtscores
row - A single score row
"""
if 'avg_key' not in row:
return ''
try:
avg_key = row['avg_key']
total = 0
num_items = 0
for districtscore in districtscores:
if districtscore['district'].district_id == 0:
continue
for score in districtscore['scores']:
for scorerow in score['score']:
if 'avg_key' in scorerow and avg_key == scorerow['avg_key']:
num_items += 1
total += float(scorerow['value'])
except:
return 'N/A'
return format_report_value({ 'type': row['type'], 'value': 0 if not num_items else total / num_items })
@register.filter
def count_report_row_elements(row):
"""
This filter returns the length of a list found in a score row.
Parameters:
row - A single score row
"""
try:
if (row['type'] == 'list'):
return floatformat(len(row['value']), 0)
except:
return ''
return ''
@register.filter
def format_report_value(row):
"""
This filter formats a score based on it's type.
Parameters:
row - A single score row
"""
try:
if row['type'] == 'integer':
return floatformat(row['value'], 0)
if row['type'] == 'percent':
return floatformat(row['value'] * 100, 2) + '%'
if row['type'] == 'boolean':
# Rather than using Upper on the string value, we'll be specific
# for the sake of i18n
return _('True') if row['value'] is True else _('False')
if row['type'] == 'list':
return ' '.join([str(x) for x in row['value']])
except:
return 'N/A'
return row['value']
@register.filter
def format_report_class(row):
"""
This filter returns a css class based on the score's type.
Parameters:
row - A single score row
"""
try:
if row['type'] in ['integer', 'float', 'percent']:
return 'right'
if row['type'] == 'list':
return 'left'
if row['type'] == 'boolean':
return 'center ' + str(row['value']).lower()
except:
pass
return 'center'
| apache-2.0 |
chvrga/outdoor-explorer | java/play-1.4.4/python/Lib/encodings/zlib_codec.py | 88 | 3117 | """ Python 'zlib_codec' Codec - zlib compression encoding
Unlike most of the other codecs which target Unicode, this codec
will return Python string objects for both encode and decode.
Written by Marc-Andre Lemburg (mal@lemburg.com).
"""
import codecs
import zlib # this codec needs the optional zlib module !
### Codec APIs
def zlib_encode(input,errors='strict'):
""" Encodes the object input and returns a tuple (output
object, length consumed).
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = zlib.compress(input)
return (output, len(input))
def zlib_decode(input,errors='strict'):
""" Decodes the object input and returns a tuple (output
object, length consumed).
input must be an object which provides the bf_getreadbuf
buffer slot. Python strings, buffer objects and memory
mapped files are examples of objects providing this slot.
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = zlib.decompress(input)
return (output, len(input))
class Codec(codecs.Codec):
def encode(self, input, errors='strict'):
return zlib_encode(input, errors)
def decode(self, input, errors='strict'):
return zlib_decode(input, errors)
class IncrementalEncoder(codecs.IncrementalEncoder):
def __init__(self, errors='strict'):
assert errors == 'strict'
self.errors = errors
self.compressobj = zlib.compressobj()
def encode(self, input, final=False):
if final:
c = self.compressobj.compress(input)
return c + self.compressobj.flush()
else:
return self.compressobj.compress(input)
def reset(self):
self.compressobj = zlib.compressobj()
class IncrementalDecoder(codecs.IncrementalDecoder):
def __init__(self, errors='strict'):
assert errors == 'strict'
self.errors = errors
self.decompressobj = zlib.decompressobj()
def decode(self, input, final=False):
if final:
c = self.decompressobj.decompress(input)
return c + self.decompressobj.flush()
else:
return self.decompressobj.decompress(input)
def reset(self):
self.decompressobj = zlib.decompressobj()
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='zlib',
encode=zlib_encode,
decode=zlib_decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| mit |
erdewit/ib_insync | ib_insync/wrapper.py | 1 | 43752 | """Wrapper to handle incoming messages."""
import asyncio
import logging
from collections import defaultdict
from contextlib import suppress
from datetime import datetime, timezone
from typing import (
Any, Dict, List, Optional, Set, Tuple, Union, cast)
from ib_insync.contract import (
Contract, ContractDescription, ContractDetails, ScanData)
from ib_insync.objects import (
AccountValue, BarData, BarDataList, CommissionReport,
DOMLevel, DepthMktDataDescription,
Dividends, Execution, Fill, FundamentalRatios, HistogramData,
HistoricalNews, HistoricalTick, HistoricalTickBidAsk, HistoricalTickLast,
MktDepthData, NewsArticle, NewsBulletin, NewsProvider, NewsTick,
OptionChain, OptionComputation, PnL, PnLSingle, PortfolioItem, Position,
PriceIncrement, RealTimeBar, RealTimeBarList, TickAttribBidAsk,
TickAttribLast, TickByTickAllLast, TickByTickBidAsk, TickByTickMidPoint,
TickData, TradeLogEntry)
from ib_insync.order import Order, OrderState, OrderStatus, Trade
from ib_insync.ticker import Ticker
from ib_insync.util import (
UNSET_DOUBLE, UNSET_INTEGER, dataclassAsDict, dataclassUpdate,
globalErrorEvent, isNan, parseIBDatetime)
__all__ = ['RequestError', 'Wrapper']
OrderKeyType = Union[int, Tuple[int, int]]
class RequestError(Exception):
"""
Exception to raise when the API reports an error that can be tied to
a single request.
"""
def __init__(self, reqId: int, code: int, message: str):
"""
Args:
reqId: Original request ID.
code: Original error code.
message: Original error message.
"""
super().__init__(f'API error: {code}: {message}')
self.reqId = reqId
self.code = code
self.message = message
class Wrapper:
"""Wrapper implementation for use with the IB class."""
def __init__(self, ib):
self.ib = ib
self._logger = logging.getLogger('ib_insync.wrapper')
self._timeoutHandle = None
self.reset()
def reset(self):
self.accountValues: Dict[tuple, AccountValue] = {}
# (account, tag, currency, modelCode) -> AccountValue
self.acctSummary: Dict[tuple, AccountValue] = {}
# (account, tag, currency) -> AccountValue
self.portfolio: Dict[str, Dict[int, PortfolioItem]] = defaultdict(dict)
# account -> conId -> PortfolioItem
self.positions = defaultdict(dict)
# account -> conId -> Position
self.trades: Dict[OrderKeyType, Trade] = {}
# (client, orderId) or permId -> Trade
self.permId2Trade: Dict[int, Trade] = {}
# permId -> Trade
self.fills: Dict[str, Fill] = {}
# execId -> Fill
self.newsTicks: List[NewsTick] = []
self.msgId2NewsBulletin: Dict[int, NewsBulletin] = {}
# msgId -> NewsBulletin
self.tickers: Dict[int, Ticker] = {}
# id(Contract) -> Ticker
self.pendingTickers: Set[Ticker] = set()
self.reqId2Ticker: Dict[int, Ticker] = {}
# reqId -> Ticker
self.ticker2ReqId: Dict[Union[int, str], Dict[Ticker, int]] = \
defaultdict(dict)
# tickType -> Ticker -> reqId
self.reqId2MarketDataType: Dict[int, int] = {}
# reqId -> marketDataType
self.reqId2Subscriber: Dict[int, Any] = {}
# live bars or live scan data
self.reqId2PnL: Dict[int, PnL] = {}
# reqId -> PnL
self.reqId2PnlSingle: Dict[int, PnLSingle] = {}
# reqId -> PnLSingle
self.pnlKey2ReqId: Dict[tuple, int] = {}
# (account, modelCode) -> reqId
self.pnlSingleKey2ReqId: Dict[tuple, int] = {}
# (account, modelCode, conId) -> reqId
# futures and results are linked by key:
self._futures: Dict[Any, asyncio.Future] = {}
self._results: Dict[Any, Any] = {}
# UTC time of last network packet arrival:
self.lastTime: datetime = None
self._reqId2Contract: Dict[int, Contract] = {}
self.accounts: List[str] = []
self.clientId: int = -1
self._timeout: float = 0
self.setTimeout(0)
def connectionClosed(self):
for ticker in self.tickers.values():
ticker.updateEvent.set_done()
for sub in self.reqId2Subscriber.values():
sub.updateEvent.set_done()
error = ConnectionError('Socket disconnect')
for future in self._futures.values():
if not future.done():
future.set_exception(error)
globalErrorEvent.emit(error)
self.reset()
def startReq(self, key, contract=None, container=None):
"""
Start a new request and return the future that is associated
with with the key and container. The container is a list by default.
"""
future = asyncio.Future()
self._futures[key] = future
self._results[key] = container if container is not None else []
if contract:
self._reqId2Contract[key] = contract
return future
def _endReq(self, key, result=None, success=True):
"""
Finish the future of corresponding key with the given result.
If no result is given then it will be popped of the general results.
"""
future = self._futures.pop(key, None)
self._reqId2Contract.pop(key, None)
if future:
if result is None:
result = self._results.pop(key, [])
if not future.done():
if success:
future.set_result(result)
else:
future.set_exception(result)
def startTicker(
self, reqId: int, contract: Contract, tickType: Union[int, str]):
"""
Start a tick request that has the reqId associated with the contract.
Return the ticker.
"""
ticker = self.tickers.get(id(contract))
if not ticker:
ticker = Ticker(
contract=contract, ticks=[], tickByTicks=[],
domBids=[], domAsks=[], domTicks=[])
self.tickers[id(contract)] = ticker
self.reqId2Ticker[reqId] = ticker
self._reqId2Contract[reqId] = contract
self.ticker2ReqId[tickType][ticker] = reqId
return ticker
def endTicker(self, ticker: Ticker, tickType: Union[int, str]):
reqId = self.ticker2ReqId[tickType].pop(ticker, 0)
self._reqId2Contract.pop(reqId, None)
return reqId
def startSubscription(self, reqId, subscriber, contract=None):
"""Register a live subscription."""
self._reqId2Contract[reqId] = contract
self.reqId2Subscriber[reqId] = subscriber
def endSubscription(self, subscriber):
"""Unregister a live subscription."""
self._reqId2Contract.pop(subscriber.reqId, None)
self.reqId2Subscriber.pop(subscriber.reqId, None)
def orderKey(self, clientId: int, orderId: int, permId: int) -> \
OrderKeyType:
key: OrderKeyType
if orderId <= 0:
# order is placed manually from TWS
key = permId
else:
key = (clientId, orderId)
return key
def setTimeout(self, timeout: float):
self.lastTime = datetime.now(timezone.utc)
if self._timeoutHandle:
self._timeoutHandle.cancel()
self._timeoutHandle = None
self._timeout = timeout
if timeout:
self._setTimer(timeout)
def _setTimer(self, delay: float = 0):
if not self.lastTime:
return
now = datetime.now(timezone.utc)
diff = (now - self.lastTime).total_seconds()
if not delay:
delay = self._timeout - diff
if delay > 0:
loop = asyncio.get_event_loop()
self._timeoutHandle = loop.call_later(delay, self._setTimer)
else:
self._logger.debug('Timeout')
self.setTimeout(0)
self.ib.timeoutEvent.emit(diff)
# wrapper methods
def connectAck(self):
pass
def nextValidId(self, reqId: int):
pass
def managedAccounts(self, accountsList: str):
self.accounts = [a for a in accountsList.split(',') if a]
def updateAccountTime(self, timestamp: str):
pass
def updateAccountValue(
self, tag: str, val: str, currency: str, account: str):
key = (account, tag, currency, '')
acctVal = AccountValue(account, tag, val, currency, '')
self.accountValues[key] = acctVal
self.ib.accountValueEvent.emit(acctVal)
def accountDownloadEnd(self, _account: str):
# sent after updateAccountValue and updatePortfolio both finished
self._endReq('accountValues')
def accountUpdateMulti(
self, reqId: int, account: str, modelCode: str, tag: str, val: str,
currency: str):
key = (account, tag, currency, modelCode)
acctVal = AccountValue(account, tag, val, currency, modelCode)
self.accountValues[key] = acctVal
self.ib.accountValueEvent.emit(acctVal)
def accountUpdateMultiEnd(self, reqId: int):
self._endReq(reqId)
def accountSummary(
self, _reqId: int, account: str, tag: str, value: str,
currency: str):
key = (account, tag, currency)
acctVal = AccountValue(account, tag, value, currency, '')
self.acctSummary[key] = acctVal
self.ib.accountSummaryEvent.emit(acctVal)
def accountSummaryEnd(self, reqId: int):
self._endReq(reqId)
def updatePortfolio(
self, contract: Contract, posSize: float, marketPrice: float,
marketValue: float, averageCost: float, unrealizedPNL: float,
realizedPNL: float, account: str):
contract = Contract.create(**dataclassAsDict(contract))
portfItem = PortfolioItem(
contract, posSize, marketPrice, marketValue,
averageCost, unrealizedPNL, realizedPNL, account)
portfolioItems = self.portfolio[account]
if posSize == 0:
portfolioItems.pop(contract.conId, None)
else:
portfolioItems[contract.conId] = portfItem
self._logger.info(f'updatePortfolio: {portfItem}')
self.ib.updatePortfolioEvent.emit(portfItem)
def position(
self, account: str, contract: Contract, posSize: float,
avgCost: float):
contract = Contract.create(**dataclassAsDict(contract))
position = Position(account, contract, posSize, avgCost)
positions = self.positions[account]
if posSize == 0:
positions.pop(contract.conId, None)
else:
positions[contract.conId] = position
self._logger.info(f'position: {position}')
results = self._results.get('positions')
if results is not None:
results.append(position)
self.ib.positionEvent.emit(position)
def positionEnd(self):
self._endReq('positions')
def pnl(
self, reqId: int, dailyPnL: float, unrealizedPnL: float,
realizedPnL: float):
pnl = self.reqId2PnL.get(reqId)
if not pnl:
return
pnl.dailyPnL = dailyPnL
pnl.unrealizedPnL = unrealizedPnL
pnl.realizedPnL = realizedPnL
self.ib.pnlEvent.emit(pnl)
def pnlSingle(
self, reqId: int, pos: int, dailyPnL: float, unrealizedPnL: float,
realizedPnL: float, value: float):
pnlSingle = self.reqId2PnlSingle.get(reqId)
if not pnlSingle:
return
pnlSingle.position = pos
pnlSingle.dailyPnL = dailyPnL
pnlSingle.unrealizedPnL = unrealizedPnL
pnlSingle.realizedPnL = realizedPnL
pnlSingle.value = value
self.ib.pnlSingleEvent.emit(pnlSingle)
def openOrder(
self, orderId: int, contract: Contract, order: Order,
orderState: OrderState):
"""
This wrapper is called to:
* feed in open orders at startup;
* feed in open orders or order updates from other clients and TWS
if clientId=master id;
* feed in manual orders and order updates from TWS if clientId=0;
* handle openOrders and allOpenOrders responses.
"""
if order.whatIf:
# response to whatIfOrder
if orderState.commissionCurrency:
self._endReq(order.orderId, orderState)
else:
key = self.orderKey(order.clientId, order.orderId, order.permId)
trade = self.trades.get(key)
if trade:
trade.order.permId = order.permId
trade.order.totalQuantity = order.totalQuantity
trade.order.lmtPrice = order.lmtPrice
trade.order.auxPrice = order.auxPrice
trade.order.orderType = order.orderType
else:
# ignore '?' values in the order
order = Order(**{
k: v for k, v in dataclassAsDict(order).items()
if v != '?'})
contract = Contract.create(**dataclassAsDict(contract))
orderStatus = OrderStatus(
orderId=orderId, status=orderState.status)
trade = Trade(contract, order, orderStatus, [], [])
self.trades[key] = trade
self._logger.info(f'openOrder: {trade}')
self.permId2Trade.setdefault(order.permId, trade)
results = self._results.get('openOrders')
if results is None:
self.ib.openOrderEvent.emit(trade)
else:
# response to reqOpenOrders or reqAllOpenOrders
results.append(order)
# make sure that the client issues order ids larger then any
# order id encountered (even from other clients) to avoid
# "Duplicate order id" error
self.ib.client.updateReqId(orderId + 1)
def openOrderEnd(self):
self._endReq('openOrders')
def completedOrder(
self, contract: Contract, order: Order, orderState: OrderState):
contract = Contract.create(**dataclassAsDict(contract))
orderStatus = OrderStatus(
orderId=order.orderId, status=orderState.status)
trade = Trade(contract, order, orderStatus, [], [])
self._results['completedOrders'].append(trade)
if order.permId not in self.permId2Trade:
self.trades[order.permId] = trade
self.permId2Trade[order.permId] = trade
def completedOrdersEnd(self):
self._endReq('completedOrders')
def orderStatus(
self, orderId: int, status: str, filled: float, remaining: float,
avgFillPrice: float, permId: int, parentId: int,
lastFillPrice: float, clientId: int, whyHeld: str,
mktCapPrice: float = 0.0):
key = self.orderKey(clientId, orderId, permId)
trade = self.trades.get(key)
if trade:
msg: Optional[str]
oldStatus = trade.orderStatus.status
new = dict(
status=status, filled=filled,
remaining=remaining, avgFillPrice=avgFillPrice,
permId=permId, parentId=parentId,
lastFillPrice=lastFillPrice, clientId=clientId,
whyHeld=whyHeld, mktCapPrice=mktCapPrice)
curr = dataclassAsDict(trade.orderStatus)
isChanged = curr != {**curr, **new}
if isChanged:
dataclassUpdate(trade.orderStatus, **new)
msg = ''
elif (status == 'Submitted' and trade.log
and trade.log[-1].message == 'Modify'):
# order modifications are acknowledged
msg = 'Modified'
else:
msg = None
if msg is not None:
logEntry = TradeLogEntry(self.lastTime, status, msg)
trade.log.append(logEntry)
self._logger.info(f'orderStatus: {trade}')
self.ib.orderStatusEvent.emit(trade)
trade.statusEvent.emit(trade)
if status != oldStatus:
if status == OrderStatus.Filled:
trade.filledEvent.emit(trade)
elif status == OrderStatus.Cancelled:
trade.cancelledEvent.emit(trade)
else:
self._logger.error(
'orderStatus: No order found for '
'orderId %s and clientId %s', orderId, clientId)
def execDetails(
self, reqId: int, contract: Contract, execution: Execution):
"""
This wrapper handles both live fills and responses to
reqExecutions.
"""
self._logger.info(f'execDetails {execution}')
if execution.orderId == UNSET_INTEGER:
# bug in TWS: executions of manual orders have unset value
execution.orderId = 0
trade = self.permId2Trade.get(execution.permId)
if not trade:
key = self.orderKey(
execution.clientId, execution.orderId, execution.permId)
trade = self.trades.get(key)
if trade and contract == trade.contract:
contract = trade.contract
else:
contract = Contract.create(**dataclassAsDict(contract))
execId = execution.execId
isLive = reqId not in self._futures
time = self.lastTime if isLive else execution.time
fill = Fill(contract, execution, CommissionReport(), time)
if execId not in self.fills:
# first time we see this execution so add it
self.fills[execId] = fill
if trade:
trade.fills.append(fill)
logEntry = TradeLogEntry(
time,
trade.orderStatus.status,
f'Fill {execution.shares}@{execution.price}')
trade.log.append(logEntry)
if isLive:
self._logger.info(f'execDetails: {fill}')
self.ib.execDetailsEvent.emit(trade, fill)
trade.fillEvent(trade, fill)
if not isLive:
self._results[reqId].append(fill)
def execDetailsEnd(self, reqId: int):
self._endReq(reqId)
def commissionReport(self, commissionReport: CommissionReport):
if commissionReport.yield_ == UNSET_DOUBLE:
commissionReport.yield_ = 0.0
if commissionReport.realizedPNL == UNSET_DOUBLE:
commissionReport.realizedPNL = 0.0
fill = self.fills.get(commissionReport.execId)
if fill:
report = dataclassUpdate(fill.commissionReport, commissionReport)
self._logger.info(f'commissionReport: {report}')
trade = self.permId2Trade.get(fill.execution.permId)
if trade:
self.ib.commissionReportEvent.emit(trade, fill, report)
trade.commissionReportEvent.emit(trade, fill, report)
else:
# this is not a live execution and the order was filled
# before this connection started
pass
else:
# commission report is not for this client
pass
def orderBound(self, reqId: int, apiClientId: int, apiOrderId: int):
pass
def contractDetails(self, reqId: int, contractDetails: ContractDetails):
self._results[reqId].append(contractDetails)
bondContractDetails = contractDetails
def contractDetailsEnd(self, reqId: int):
self._endReq(reqId)
def symbolSamples(
self, reqId: int, contractDescriptions: List[ContractDescription]):
self._endReq(reqId, contractDescriptions)
def marketRule(
self, marketRuleId: int, priceIncrements: List[PriceIncrement]):
self._endReq(f'marketRule-{marketRuleId}', priceIncrements)
def marketDataType(self, reqId: int, marketDataId: int):
self.reqId2MarketDataType[reqId] = marketDataId
def realtimeBar(
self, reqId: int, time: int, open_: float, high: float, low: float,
close: float, volume: int, wap: float, count: int):
dt = datetime.fromtimestamp(time, timezone.utc)
bar = RealTimeBar(dt, -1, open_, high, low, close, volume, wap, count)
bars = self.reqId2Subscriber.get(reqId)
if bars is not None:
bars.append(bar)
self.ib.barUpdateEvent.emit(bars, True)
bars.updateEvent.emit(bars, True)
def historicalData(self, reqId: int, bar: BarData):
results = self._results.get(reqId)
if results is not None:
bar.date = parseIBDatetime(bar.date) # type: ignore
results.append(bar)
def historicalDataEnd(self, reqId, _start: str, _end: str):
self._endReq(reqId)
def historicalDataUpdate(self, reqId: int, bar: BarData):
bars = self.reqId2Subscriber.get(reqId)
if not bars:
return
bar.date = parseIBDatetime(bar.date) # type: ignore
lastDate = bars[-1].date
if bar.date < lastDate:
return
hasNewBar = len(bars) == 0 or bar.date > lastDate
if hasNewBar:
bars.append(bar)
elif bars[-1] != bar:
bars[-1] = bar
else:
return
self.ib.barUpdateEvent.emit(bars, hasNewBar)
bars.updateEvent.emit(bars, hasNewBar)
def headTimestamp(self, reqId: int, headTimestamp: str):
try:
dt = parseIBDatetime(headTimestamp)
self._endReq(reqId, dt)
except ValueError as exc:
self._endReq(reqId, exc, False)
def historicalTicks(
self, reqId: int, ticks: List[HistoricalTick], done: bool):
result = self._results.get(reqId)
if result is not None:
result += ticks
if done:
self._endReq(reqId)
def historicalTicksBidAsk(
self, reqId: int, ticks: List[HistoricalTickBidAsk], done: bool):
result = self._results.get(reqId)
if result is not None:
result += ticks
if done:
self._endReq(reqId)
def historicalTicksLast(
self, reqId: int, ticks: List[HistoricalTickLast], done: bool):
result = self._results.get(reqId)
if result is not None:
result += ticks
if done:
self._endReq(reqId)
# additional wrapper method provided by Client
def priceSizeTick(
self, reqId: int, tickType: int, price: float, size: int):
ticker = self.reqId2Ticker.get(reqId)
if not ticker:
self._logger.error(f'priceSizeTick: Unknown reqId: {reqId}')
return
# https://interactivebrokers.github.io/tws-api/tick_types.html
if tickType in (1, 66):
if price == ticker.bid and size == ticker.bidSize:
return
if price != ticker.bid:
ticker.prevBid = ticker.bid
ticker.bid = price
if size != ticker.bidSize:
ticker.prevBidSize = ticker.bidSize
ticker.bidSize = size
elif tickType in (2, 67):
if price == ticker.ask and size == ticker.askSize:
return
if price != ticker.ask:
ticker.prevAsk = ticker.ask
ticker.ask = price
if size != ticker.askSize:
ticker.prevAskSize = ticker.askSize
ticker.askSize = size
elif tickType in (4, 68):
if price != ticker.last:
ticker.prevLast = ticker.last
ticker.last = price
if size != ticker.lastSize:
ticker.prevLastSize = ticker.lastSize
ticker.lastSize = size
elif tickType in (6, 72):
ticker.high = price
elif tickType in (7, 73):
ticker.low = price
elif tickType in (9, 75):
ticker.close = price
elif tickType in (14, 76):
ticker.open = price
elif tickType == 15:
ticker.low13week = price
elif tickType == 16:
ticker.high13week = price
elif tickType == 17:
ticker.low26week = price
elif tickType == 18:
ticker.high26week = price
elif tickType == 19:
ticker.low52week = price
elif tickType == 20:
ticker.high52week = price
elif tickType == 35:
ticker.auctionPrice = price
elif tickType == 37:
ticker.markPrice = price
elif tickType == 50:
ticker.bidYield = price
elif tickType == 51:
ticker.askYield = price
elif tickType == 52:
ticker.lastYield = price
if price or size:
tick = TickData(self.lastTime, tickType, price, size)
ticker.ticks.append(tick)
ticker.marketDataType = self.reqId2MarketDataType.get(reqId, 0)
self.pendingTickers.add(ticker)
def tickSize(self, reqId: int, tickType: int, size: int):
ticker = self.reqId2Ticker.get(reqId)
if not ticker:
self._logger.error(f'tickSize: Unknown reqId: {reqId}')
return
price = -1.0
# https://interactivebrokers.github.io/tws-api/tick_types.html
if tickType in (0, 69):
if size == ticker.bidSize:
return
price = ticker.bid
ticker.prevBidSize = ticker.bidSize
ticker.bidSize = size
elif tickType in (3, 70):
if size == ticker.askSize:
return
price = ticker.ask
ticker.prevAskSize = ticker.askSize
ticker.askSize = size
elif tickType in (5, 71):
price = ticker.last
if isNan(price):
return
if size != ticker.lastSize:
ticker.prevLastSize = ticker.lastSize
ticker.lastSize = size
elif tickType in (8, 74):
ticker.volume = size
elif tickType == 21:
ticker.avVolume = size
elif tickType == 27:
ticker.callOpenInterest = size
elif tickType == 28:
ticker.putOpenInterest = size
elif tickType == 29:
ticker.callVolume = size
elif tickType == 30:
ticker.putVolume = size
elif tickType == 34:
ticker.auctionVolume = size
elif tickType == 36:
ticker.auctionImbalance = size
elif tickType == 86:
ticker.futuresOpenInterest = size
elif tickType == 87:
ticker.avOptionVolume = size
elif tickType == 89:
ticker.shortableShares = size
if price or size:
tick = TickData(self.lastTime, tickType, price, size)
ticker.ticks.append(tick)
ticker.marketDataType = self.reqId2MarketDataType.get(reqId, 0)
self.pendingTickers.add(ticker)
def tickSnapshotEnd(self, reqId: int):
self._endReq(reqId)
def tickByTickAllLast(
self, reqId: int, tickType: int, time: int, price: float,
size: int, tickAttribLast: TickAttribLast,
exchange, specialConditions):
ticker = self.reqId2Ticker.get(reqId)
if not ticker:
self._logger.error(f'tickByTickAllLast: Unknown reqId: {reqId}')
return
if price != ticker.last:
ticker.prevLast = ticker.last
ticker.last = price
if size != ticker.lastSize:
ticker.prevLastSize = ticker.lastSize
ticker.lastSize = size
tick = TickByTickAllLast(
tickType, self.lastTime, price, size, tickAttribLast,
exchange, specialConditions)
ticker.tickByTicks.append(tick)
self.pendingTickers.add(ticker)
def tickByTickBidAsk(
self, reqId: int, time: int, bidPrice: float, askPrice: float,
bidSize: int, askSize: int, tickAttribBidAsk: TickAttribBidAsk):
ticker = self.reqId2Ticker.get(reqId)
if not ticker:
self._logger.error(f'tickByTickBidAsk: Unknown reqId: {reqId}')
return
if bidPrice != ticker.bid:
ticker.prevBid = ticker.bid
ticker.bid = bidPrice
if bidSize != ticker.bidSize:
ticker.prevBidSize = ticker.bidSize
ticker.bidSize = bidSize
if askPrice != ticker.ask:
ticker.prevAsk = ticker.ask
ticker.ask = askPrice
if askSize != ticker.askSize:
ticker.prevAskSize = ticker.askSize
ticker.askSize = askSize
tick = TickByTickBidAsk(
self.lastTime, bidPrice, askPrice, bidSize, askSize,
tickAttribBidAsk)
ticker.tickByTicks.append(tick)
self.pendingTickers.add(ticker)
def tickByTickMidPoint(self, reqId: int, time: int, midPoint: float):
ticker = self.reqId2Ticker.get(reqId)
if not ticker:
self._logger.error(f'tickByTickMidPoint: Unknown reqId: {reqId}')
return
tick = TickByTickMidPoint(self.lastTime, midPoint)
ticker.tickByTicks.append(tick)
self.pendingTickers.add(ticker)
def tickString(self, reqId: int, tickType: int, value: str):
ticker = self.reqId2Ticker.get(reqId)
if not ticker:
return
try:
if tickType == 47:
# https://interactivebrokers.github.io/tws-api/fundamental_ratios_tags.html
d = dict(t.split('=') # type: ignore
for t in value.split(';') if t) # type: ignore
for k, v in d.items():
with suppress(ValueError):
if v == '-99999.99':
v = 'nan'
d[k] = float(v)
d[k] = int(v)
ticker.fundamentalRatios = FundamentalRatios(**d)
elif tickType in (48, 77):
# RT Volume or RT Trade Volume string format:
# price;size;ms since epoch;total volume;VWAP;single trade
# example:
# 701.28;1;1348075471534;67854;701.46918464;true
priceStr, sizeStr, rtTime, volume, vwap, _ = value.split(';')
if volume:
if tickType == 48:
ticker.rtVolume = int(volume)
elif tickType == 77:
ticker.rtTradeVolume = int(volume)
if vwap:
ticker.vwap = float(vwap)
if rtTime:
ticker.rtTime = datetime.fromtimestamp(
int(rtTime) / 1000, timezone.utc)
if priceStr == '':
return
price = float(priceStr)
size = int(sizeStr)
if price and size:
if ticker.prevLast != ticker.last:
ticker.prevLast = ticker.last
ticker.last = price
if ticker.prevLastSize != ticker.lastSize:
ticker.prevLastSize = ticker.lastSize
ticker.lastSize = size
tick = TickData(self.lastTime, tickType, price, size)
ticker.ticks.append(tick)
elif tickType == 59:
# Dividend tick:
# https://interactivebrokers.github.io/tws-api/tick_types.html#ib_dividends
# example value: '0.83,0.92,20130219,0.23'
past12, next12, nextDate, nextAmount = value.split(',')
ticker.dividends = Dividends(
float(past12) if past12 else None,
float(next12) if next12 else None,
parseIBDatetime(nextDate) if nextDate else None,
float(nextAmount) if nextAmount else None)
self.pendingTickers.add(ticker)
except ValueError:
self._logger.error(
f'tickString with tickType {tickType}: '
f'malformed value: {value!r}')
def tickGeneric(self, reqId: int, tickType: int, value: float):
ticker = self.reqId2Ticker.get(reqId)
if not ticker:
return
try:
value = float(value)
except ValueError:
self._logger.error(f'genericTick: malformed value: {value!r}')
return
if tickType == 23:
ticker.histVolatility = value
elif tickType == 24:
ticker.impliedVolatility = value
elif tickType == 31:
ticker.indexFuturePremium = value
elif tickType == 49:
ticker.halted = value
elif tickType == 54:
ticker.tradeCount = value
elif tickType == 55:
ticker.tradeRate = value
elif tickType == 56:
ticker.volumeRate = value
elif tickType == 58:
ticker.rtHistVolatility = value
tick = TickData(self.lastTime, tickType, value, 0)
ticker.ticks.append(tick)
self.pendingTickers.add(ticker)
def tickReqParams(
self, reqId: int, minTick: float, bboExchange: str,
snapshotPermissions: int):
pass
def mktDepthExchanges(
self, depthMktDataDescriptions: List[DepthMktDataDescription]):
self._endReq('mktDepthExchanges', depthMktDataDescriptions)
def updateMktDepth(
self, reqId: int, position: int, operation: int, side: int,
price: float, size: int):
self.updateMktDepthL2(
reqId, position, '', operation, side, price, size)
def updateMktDepthL2(
self, reqId: int, position: int, marketMaker: str, operation: int,
side: int, price: float, size: int, isSmartDepth: bool = False):
# operation: 0 = insert, 1 = update, 2 = delete
# side: 0 = ask, 1 = bid
ticker = self.reqId2Ticker[reqId]
dom = ticker.domBids if side else ticker.domAsks
if operation == 0:
dom.insert(position, DOMLevel(price, size, marketMaker))
elif operation == 1:
dom[position] = DOMLevel(price, size, marketMaker)
elif operation == 2:
if position < len(dom):
level = dom.pop(position)
price = level.price
size = 0
tick = MktDepthData(
self.lastTime, position, marketMaker, operation, side, price, size)
ticker.domTicks.append(tick)
self.pendingTickers.add(ticker)
def tickOptionComputation(
self, reqId: int, tickType: int, impliedVol: float, delta: float,
optPrice: float, pvDividend: float, gamma: float, vega: float,
theta: float, undPrice: float):
comp = OptionComputation(
impliedVol, delta, optPrice, pvDividend,
gamma, vega, theta, undPrice)
ticker = self.reqId2Ticker.get(reqId)
if ticker:
# reply from reqMktData
# https://interactivebrokers.github.io/tws-api/tick_types.html
if tickType in (10, 80):
ticker.bidGreeks = comp
elif tickType in (11, 81):
ticker.askGreeks = comp
elif tickType in (12, 82):
ticker.lastGreeks = comp
elif tickType in (13, 83):
ticker.modelGreeks = comp
self.pendingTickers.add(ticker)
elif reqId in self._futures:
# reply from calculateImpliedVolatility or calculateOptionPrice
self._endReq(reqId, comp)
else:
self._logger.error(
f'tickOptionComputation: Unknown reqId: {reqId}')
def fundamentalData(self, reqId: int, data: str):
self._endReq(reqId, data)
def scannerParameters(self, xml: str):
self._endReq('scannerParams', xml)
def scannerData(
self, reqId: int, rank: int,
contractDetails: ContractDetails,
distance: str, benchmark: str, projection: str,
legsStr: str):
data = ScanData(
rank, contractDetails, distance, benchmark, projection, legsStr)
dataList = self.reqId2Subscriber.get(reqId)
if dataList is None:
dataList = self._results.get(reqId)
if dataList is not None:
if rank == 0:
dataList.clear()
dataList.append(data)
def scannerDataEnd(self, reqId: int):
dataList = self._results.get(reqId)
if dataList is not None:
self._endReq(reqId)
else:
dataList = self.reqId2Subscriber.get(reqId)
if dataList is not None:
self.ib.scannerDataEvent.emit(dataList)
dataList.updateEvent.emit(dataList)
def histogramData(self, reqId: int, items: List[HistogramData]):
result = [HistogramData(item.price, item.count) for item in items]
self._endReq(reqId, result)
def securityDefinitionOptionParameter(
self, reqId: int, exchange: str, underlyingConId: int,
tradingClass: str, multiplier: str, expirations: List[str],
strikes: List[float]):
chain = OptionChain(
exchange, underlyingConId, tradingClass, multiplier,
expirations, strikes)
self._results[reqId].append(chain)
def securityDefinitionOptionParameterEnd(self, reqId: int):
self._endReq(reqId)
def newsProviders(self, newsProviders: List[NewsProvider]):
newsProviders = [
NewsProvider(code=p.code, name=p.name)
for p in newsProviders]
self._endReq('newsProviders', newsProviders)
def tickNews(
self, _reqId: int, timeStamp: int, providerCode: str,
articleId: str, headline: str, extraData: str):
news = NewsTick(
timeStamp, providerCode, articleId, headline, extraData)
self.newsTicks.append(news)
self.ib.tickNewsEvent.emit(news)
def newsArticle(self, reqId: int, articleType: int, articleText: str):
article = NewsArticle(articleType, articleText)
self._endReq(reqId, article)
def historicalNews(
self, reqId: int, time: str, providerCode: str,
articleId: str, headline: str):
dt = parseIBDatetime(time)
dt = cast(datetime, dt)
article = HistoricalNews(dt, providerCode, articleId, headline)
self._results[reqId].append(article)
def historicalNewsEnd(self, reqId, _hasMore: bool):
self._endReq(reqId)
def updateNewsBulletin(
self, msgId: int, msgType: int, message: str,
origExchange: str):
bulletin = NewsBulletin(msgId, msgType, message, origExchange)
self.msgId2NewsBulletin[msgId] = bulletin
self.ib.newsBulletinEvent.emit(bulletin)
def receiveFA(self, _faDataType: int, faXmlData: str):
self._endReq('requestFA', faXmlData)
def currentTime(self, time: int):
dt = datetime.fromtimestamp(time, timezone.utc)
self._endReq('currentTime', dt)
def tickEFP(
self, reqId: int, tickType: int, basisPoints: float,
formattedBasisPoints: str, totalDividends: float,
holdDays: int, futureLastTradeDate: str, dividendImpact: float,
dividendsToLastTradeDate: float):
pass
def error(self, reqId: int, errorCode: int, errorString: str):
# https://interactivebrokers.github.io/tws-api/message_codes.html
warningCodes = {165, 202, 399, 404, 434, 492, 10167}
isWarning = errorCode in warningCodes or 2100 <= errorCode < 2200
msg = (
f'{"Warning" if isWarning else "Error"} '
f'{errorCode}, reqId {reqId}: {errorString}')
contract = self._reqId2Contract.get(reqId)
if contract:
msg += f', contract: {contract}'
if isWarning:
self._logger.info(msg)
else:
self._logger.error(msg)
if reqId in self._futures:
# the request failed
if self.ib.RaiseRequestErrors:
error = RequestError(reqId, errorCode, errorString)
self._endReq(reqId, error, success=False)
else:
self._endReq(reqId)
elif (self.clientId, reqId) in self.trades:
# something is wrong with the order, cancel it
trade = self.trades[(self.clientId, reqId)]
if not trade.isDone():
status = trade.orderStatus.status = OrderStatus.Cancelled
logEntry = TradeLogEntry(self.lastTime, status, msg)
trade.log.append(logEntry)
self._logger.warning(f'Canceled order: {trade}')
self.ib.orderStatusEvent.emit(trade)
trade.statusEvent.emit(trade)
trade.cancelledEvent.emit(trade)
if errorCode == 165:
# for scan data subscription there are no longer matching results
dataList = self.reqId2Subscriber.get(reqId)
if dataList:
dataList.clear()
dataList.updateEvent.emit(dataList)
elif errorCode == 317:
# Market depth data has been RESET
ticker = self.reqId2Ticker.get(reqId)
if ticker:
# clear all DOM levels
ticker.domTicks += [MktDepthData(
self.lastTime, 0, '', 2, 0, level.price, 0)
for level in ticker.domAsks]
ticker.domTicks += [MktDepthData(
self.lastTime, 0, '', 2, 1, level.price, 0)
for level in ticker.domBids]
ticker.domAsks.clear()
ticker.domBids.clear()
self.pendingTickers.add(ticker)
elif errorCode == 10225:
# Bust event occurred, current subscription is deactivated.
# Please resubscribe real-time bars immediately
bars = self.reqId2Subscriber.get(reqId)
if isinstance(bars, RealTimeBarList):
self.ib.client.cancelRealTimeBars(reqId)
self.ib.client.reqRealTimeBars(
reqId, bars.contract, bars.barSize, bars.whatToShow,
bars.useRTH, bars.realTimeBarsOptions)
elif isinstance(bars, BarDataList):
self.ib.client.cancelHistoricalData(reqId)
self.ib.client.reqHistoricalData(
reqId, bars.contract, bars.endDateTime,
bars.durationStr, bars.barSizeSetting, bars.whatToShow,
bars.useRTH, bars.formatDate, bars.keepUpToDate,
bars.chartOptions)
self.ib.errorEvent.emit(reqId, errorCode, errorString, contract)
def tcpDataArrived(self):
self.lastTime = datetime.now(timezone.utc)
for ticker in self.pendingTickers:
ticker.ticks = []
ticker.tickByTicks = []
ticker.domTicks = []
self.pendingTickers = set()
def tcpDataProcessed(self):
self.ib.updateEvent.emit()
if self.pendingTickers:
for ticker in self.pendingTickers:
ticker.time = self.lastTime
ticker.updateEvent.emit(ticker)
self.ib.pendingTickersEvent.emit(self.pendingTickers)
| bsd-2-clause |
kaedroho/django | django/template/loaders/cached.py | 42 | 3505 | """
Wrapper class that takes a list of template loaders as an argument and attempts
to load templates from them in order, caching the result.
"""
import hashlib
from django.template import TemplateDoesNotExist
from django.template.backends.django import copy_exception
from .base import Loader as BaseLoader
class Loader(BaseLoader):
def __init__(self, engine, loaders):
self.get_template_cache = {}
self.loaders = engine.get_template_loaders(loaders)
super().__init__(engine)
def get_contents(self, origin):
return origin.loader.get_contents(origin)
def get_template(self, template_name, skip=None):
"""
Perform the caching that gives this loader its name. Often many of the
templates attempted will be missing, so memory use is of concern here.
To keep it in check, caching behavior is a little complicated when a
template is not found. See ticket #26306 for more details.
With template debugging disabled, cache the TemplateDoesNotExist class
for every missing template and raise a new instance of it after
fetching it from the cache.
With template debugging enabled, a unique TemplateDoesNotExist object
is cached for each missing template to preserve debug data. When
raising an exception, Python sets __traceback__, __context__, and
__cause__ attributes on it. Those attributes can contain references to
all sorts of objects up the call chain and caching them creates a
memory leak. Thus, unraised copies of the exceptions are cached and
copies of those copies are raised after they're fetched from the cache.
"""
key = self.cache_key(template_name, skip)
cached = self.get_template_cache.get(key)
if cached:
if isinstance(cached, type) and issubclass(cached, TemplateDoesNotExist):
raise cached(template_name)
elif isinstance(cached, TemplateDoesNotExist):
raise copy_exception(cached)
return cached
try:
template = super().get_template(template_name, skip)
except TemplateDoesNotExist as e:
self.get_template_cache[key] = copy_exception(e) if self.engine.debug else TemplateDoesNotExist
raise
else:
self.get_template_cache[key] = template
return template
def get_template_sources(self, template_name):
for loader in self.loaders:
yield from loader.get_template_sources(template_name)
def cache_key(self, template_name, skip=None):
"""
Generate a cache key for the template name and skip.
If skip is provided, only origins that match template_name are included
in the cache key. This ensures each template is only parsed and cached
once if contained in different extend chains like:
x -> a -> a
y -> a -> a
z -> a -> a
"""
skip_prefix = ''
if skip:
matching = [origin.name for origin in skip if origin.template_name == template_name]
if matching:
skip_prefix = self.generate_hash(matching)
return '-'.join(s for s in (str(template_name), skip_prefix) if s)
def generate_hash(self, values):
return hashlib.sha1('|'.join(values).encode()).hexdigest()
def reset(self):
"Empty the template cache."
self.get_template_cache.clear()
| bsd-3-clause |
apporc/cinder | cinder/tests/unit/api/v2/stubs.py | 3 | 9006 | # Copyright 2010 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import iso8601
from cinder import exception as exc
from cinder import objects
from cinder.tests.unit import fake_volume
FAKE_UUID = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
FAKE_UUIDS = {}
TEST_SNAPSHOT_UUID = '00000000-0000-0000-0000-000000000001'
DEFAULT_VOL_NAME = "displayname"
DEFAULT_VOL_DESCRIPTION = "displaydesc"
DEFAULT_VOL_SIZE = 1
DEFAULT_VOL_TYPE = "vol_type_name"
DEFAULT_VOL_STATUS = "fakestatus"
DEFAULT_VOL_ID = '1'
# TODO(vbala): api.v1 tests use hard-coded "fakeaz" for verifying
# post-conditions. Update value to "zone1:host1" once we remove
# api.v1 tests and use it in api.v2 tests.
DEFAULT_AZ = "fakeaz"
def stub_volume(id, **kwargs):
volume = {
'id': id,
'user_id': 'fakeuser',
'project_id': 'fakeproject',
'host': 'fakehost',
'size': DEFAULT_VOL_SIZE,
'availability_zone': DEFAULT_AZ,
'status': DEFAULT_VOL_STATUS,
'migration_status': None,
'attach_status': 'attached',
'name': 'vol name',
'display_name': DEFAULT_VOL_NAME,
'display_description': DEFAULT_VOL_DESCRIPTION,
'updated_at': datetime.datetime(1900, 1, 1, 1, 1, 1,
tzinfo=iso8601.iso8601.Utc()),
'created_at': datetime.datetime(1900, 1, 1, 1, 1, 1,
tzinfo=iso8601.iso8601.Utc()),
'snapshot_id': None,
'source_volid': None,
'volume_type_id': '3e196c20-3c06-11e2-81c1-0800200c9a66',
'encryption_key_id': None,
'volume_admin_metadata': [{'key': 'attached_mode', 'value': 'rw'},
{'key': 'readonly', 'value': 'False'}],
'bootable': False,
'launched_at': datetime.datetime(1900, 1, 1, 1, 1, 1,
tzinfo=iso8601.iso8601.Utc()),
'volume_type': {'name': DEFAULT_VOL_TYPE},
'replication_status': 'disabled',
'replication_extended_status': None,
'replication_driver_data': None,
'volume_attachment': [],
'multiattach': False,
}
volume.update(kwargs)
if kwargs.get('volume_glance_metadata', None):
volume['bootable'] = True
if kwargs.get('attach_status') == 'detached':
del volume['volume_admin_metadata'][0]
return volume
def stub_volume_create(self, context, size, name, description, snapshot=None,
**param):
vol = stub_volume(DEFAULT_VOL_ID)
vol['size'] = size
vol['display_name'] = name
vol['display_description'] = description
source_volume = param.get('source_volume') or {}
vol['source_volid'] = source_volume.get('id')
vol['bootable'] = False
vol['volume_attachment'] = []
try:
vol['snapshot_id'] = snapshot['id']
except (KeyError, TypeError):
vol['snapshot_id'] = None
vol['availability_zone'] = param.get('availability_zone', 'fakeaz')
return vol
def stub_volume_api_create(self, context, *args, **kwargs):
vol = stub_volume_create(self, context, *args, **kwargs)
return fake_volume.fake_volume_obj(context, **vol)
def stub_image_service_detail(self, context, **kwargs):
filters = kwargs.get('filters', {'name': ''})
if filters['name'] == "Fedora-x86_64-20-20140618-sda":
return [{'id': "c905cedb-7281-47e4-8a62-f26bc5fc4c77"}]
elif filters['name'] == "multi":
return [{'id': "c905cedb-7281-47e4-8a62-f26bc5fc4c77"},
{'id': "c905cedb-abcd-47e4-8a62-f26bc5fc4c77"}]
return []
def stub_volume_create_from_image(self, context, size, name, description,
snapshot, volume_type, metadata,
availability_zone):
vol = stub_volume('1')
vol['status'] = 'creating'
vol['size'] = size
vol['display_name'] = name
vol['display_description'] = description
vol['availability_zone'] = 'cinder'
vol['bootable'] = False
return vol
def stub_volume_update(self, context, *args, **param):
pass
def stub_volume_delete(self, context, *args, **param):
pass
def stub_volume_get(self, context, volume_id, viewable_admin_meta=False):
if viewable_admin_meta:
return stub_volume(volume_id)
else:
volume = stub_volume(volume_id)
del volume['volume_admin_metadata']
return volume
def stub_volume_get_notfound(self, context,
volume_id, viewable_admin_meta=False):
raise exc.VolumeNotFound(volume_id)
def stub_volume_get_db(context, volume_id):
if context.is_admin:
return stub_volume(volume_id)
else:
volume = stub_volume(volume_id)
del volume['volume_admin_metadata']
return volume
def stub_volume_api_get(self, context, volume_id, viewable_admin_meta=False):
vol = stub_volume(volume_id)
return fake_volume.fake_volume_obj(context, **vol)
def stub_volume_get_all(context, search_opts=None, marker=None, limit=None,
sort_keys=None, sort_dirs=None, filters=None,
viewable_admin_meta=False, offset=None):
return [stub_volume(100, project_id='fake'),
stub_volume(101, project_id='superfake'),
stub_volume(102, project_id='superduperfake')]
def stub_volume_get_all_by_project(self, context, marker, limit,
sort_keys=None, sort_dirs=None,
filters=None,
viewable_admin_meta=False, offset=None):
filters = filters or {}
return [stub_volume_get(self, context, '1', viewable_admin_meta=True)]
def stub_volume_api_get_all_by_project(self, context, marker, limit,
sort_keys=None, sort_dirs=None,
filters=None,
viewable_admin_meta=False,
offset=None):
filters = filters or {}
vol = stub_volume_get(self, context, '1',
viewable_admin_meta=viewable_admin_meta)
vol_obj = fake_volume.fake_volume_obj(context, **vol)
return objects.VolumeList(objects=[vol_obj])
def stub_snapshot(id, **kwargs):
snapshot = {'id': id,
'volume_id': 12,
'status': 'available',
'volume_size': 100,
'created_at': None,
'display_name': 'Default name',
'display_description': 'Default description',
'project_id': 'fake',
'snapshot_metadata': []}
snapshot.update(kwargs)
return snapshot
def stub_snapshot_get_all(context, filters=None, marker=None, limit=None,
sort_keys=None, sort_dirs=None, offset=None):
return [stub_snapshot(100, project_id='fake'),
stub_snapshot(101, project_id='superfake'),
stub_snapshot(102, project_id='superduperfake')]
def stub_snapshot_get_all_by_project(context, project_id, filters=None,
marker=None, limit=None, sort_keys=None,
sort_dirs=None, offset=None):
return [stub_snapshot(1)]
def stub_snapshot_update(self, context, *args, **param):
pass
def stub_service_get_all_by_topic(context, topic, disabled=None):
return [{'availability_zone': "zone1:host1", "disabled": 0}]
def stub_snapshot_get(self, context, snapshot_id):
if snapshot_id != TEST_SNAPSHOT_UUID:
raise exc.SnapshotNotFound(snapshot_id=snapshot_id)
return stub_snapshot(snapshot_id)
def stub_consistencygroup_get_notfound(self, context, cg_id):
raise exc.ConsistencyGroupNotFound(consistencygroup_id=cg_id)
def stub_volume_type_get(context, id, *args, **kwargs):
return {'id': id,
'name': 'vol_type_name',
'description': 'A fake volume type',
'is_public': True,
'projects': [],
'extra_specs': {},
'created_at': None,
'deleted_at': None,
'updated_at': None,
'deleted': False}
def stub_volume_admin_metadata_get(context, volume_id, **kwargs):
admin_meta = {'attached_mode': 'rw', 'readonly': 'False'}
if kwargs.get('attach_status') == 'detached':
del admin_meta['attached_mode']
return admin_meta
| apache-2.0 |
miguelinux/vbox | src/VBox/ValidationKit/testmanager/db/partial-db-dump.py | 1 | 14427 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# $Id: partial-db-dump.py $
# pylint: disable=C0301
"""
Utility for dumping the last X days of data.
"""
__copyright__ = \
"""
Copyright (C) 2012-2016 Oracle Corporation
This file is part of VirtualBox Open Source Edition (OSE), as
available from http://www.virtualbox.org. This file is free software;
you can redistribute it and/or modify it under the terms of the GNU
General Public License (GPL) as published by the Free Software
Foundation, in version 2 as it comes in the "COPYING" file of the
VirtualBox OSE distribution. VirtualBox OSE is distributed in the
hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
The contents of this file may alternatively be used under the terms
of the Common Development and Distribution License Version 1.0
(CDDL) only, as it comes in the "COPYING.CDDL" file of the
VirtualBox OSE distribution, in which case the provisions of the
CDDL are applicable instead of those of the GPL.
You may elect to license modified versions of this file under the
terms and conditions of either the GPL or the CDDL or both.
"""
__version__ = "$Revision: 107722 $"
# Standard python imports
import sys;
import os;
import zipfile;
from optparse import OptionParser;
import xml.etree.ElementTree as ET;
# Add Test Manager's modules path
g_ksTestManagerDir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))));
sys.path.append(g_ksTestManagerDir);
# Test Manager imports
from testmanager.core.db import TMDatabaseConnection;
from common import utils;
class PartialDbDump(object): # pylint: disable=R0903
"""
Dumps or loads the last X days of database data.
This is a useful tool when hacking on the test manager locally. You can get
a small sample from the last few days from the production test manager server
without spending hours dumping, downloading, and loading the whole database
(because it is gigantic).
"""
def __init__(self):
"""
Parse command line.
"""
oParser = OptionParser()
oParser.add_option('-q', '--quiet', dest = 'fQuiet', action = 'store_true',
help = 'Quiet execution');
oParser.add_option('-f', '--filename', dest = 'sFilename', metavar = '<filename>',
default = 'partial-db-dump.zip', help = 'The name of the partial database zip file to write/load.');
oParser.add_option('-t', '--tmp-file', dest = 'sTempFile', metavar = '<temp-file>',
default = '/tmp/tm-partial-db-dump.pgtxt',
help = 'Name of temporary file for duping tables. Must be absolute');
oParser.add_option('--days-to-dump', dest = 'cDays', metavar = '<days>', type = 'int', default = 14,
help = 'How many days to dump (counting backward from current date).');
oParser.add_option('--load-dump-into-database', dest = 'fLoadDumpIntoDatabase', action = 'store_true',
default = False, help = 'For loading instead of dumping.');
(self.oConfig, _) = oParser.parse_args();
##
# Tables dumped in full because they're either needed in full or they normally
# aren't large enough to bother reducing.
kasTablesToDumpInFull = [
'Users',
'BuildBlacklist',
'BuildCategories',
'BuildSources',
'FailureCategories',
'FailureReasons',
'GlobalResources',
'Testcases',
'TestcaseArgs',
'TestcaseDeps',
'TestcaseGlobalRsrcDeps',
'TestGroups',
'TestGroupMembers',
'SchedGroups',
'SchedGroupMembers', # ?
'SchedQueues',
'Builds', # ??
'VcsRevisions', # ?
'TestResultStrTab', # 36K rows, never mind complicated then.
];
##
# Tables where we only dump partial info (the TestResult* tables are rather
# gigantic).
kasTablesToPartiallyDump = [
'TestBoxes', # 2016-05-25: ca. 641 MB
'TestSets', # 2016-05-25: ca. 525 MB
'TestResults', # 2016-05-25: ca. 13 GB
'TestResultFiles', # 2016-05-25: ca. 87 MB
'TestResultMsgs', # 2016-05-25: ca. 29 MB
'TestResultValues', # 2016-05-25: ca. 3728 MB
'TestResultFailures',
'SystemLog',
];
def _doCopyTo(self, sTable, oZipFile, oDb, sSql, aoArgs = None):
""" Does one COPY TO job. """
print 'Dumping %s...' % (sTable,);
if aoArgs is not None:
sSql = oDb.formatBindArgs(sSql, aoArgs);
oFile = open(self.oConfig.sTempFile, 'w');
oDb.copyExpert(sSql, oFile);
cRows = oDb.getRowCount();
oFile.close();
print '... %s rows.' % (cRows,);
oZipFile.write(self.oConfig.sTempFile, sTable);
return True;
def _doDump(self, oDb):
""" Does the dumping of the database. """
oZipFile = zipfile.ZipFile(self.oConfig.sFilename, 'w', zipfile.ZIP_DEFLATED);
oDb.begin();
# Dumping full tables is simple.
for sTable in self.kasTablesToDumpInFull:
self._doCopyTo(sTable, oZipFile, oDb, 'COPY ' + sTable + ' TO STDOUT WITH (FORMAT TEXT)');
# Figure out how far back we need to go.
oDb.execute('SELECT CURRENT_TIMESTAMP - INTERVAL \'%s days\'' % (self.oConfig.cDays,));
tsEffective = oDb.fetchOne()[0];
oDb.execute('SELECT CURRENT_TIMESTAMP - INTERVAL \'%s days\'' % (self.oConfig.cDays + 2,));
tsEffectiveSafe = oDb.fetchOne()[0];
print 'Going back to: %s (safe: %s)' % (tsEffective, tsEffectiveSafe);
# We dump test boxes back to the safe timestamp because the test sets may
# use slightly dated test box references and we don't wish to have dangling
# references when loading.
for sTable in [ 'TestBoxes', ]:
self._doCopyTo(sTable, oZipFile, oDb,
'COPY (SELECT * FROM ' + sTable + ' WHERE tsExpire >= %s) TO STDOUT WITH (FORMAT TEXT)',
(tsEffectiveSafe,));
# The test results needs to start with test sets and then dump everything
# releated to them. So, figure the lowest (oldest) test set ID we'll be
# dumping first.
oDb.execute('SELECT idTestSet FROM TestSets WHERE tsCreated >= %s', (tsEffective, ));
idFirstTestSet = 0;
if oDb.getRowCount() > 0:
idFirstTestSet = oDb.fetchOne()[0];
print 'First test set ID: %s' % (idFirstTestSet,);
oDb.execute('SELECT MAX(idTestSet) FROM TestSets WHERE tsCreated >= %s', (tsEffective, ));
idLastTestSet = 0;
if oDb.getRowCount() > 0:
idLastTestSet = oDb.fetchOne()[0];
print 'Last test set ID: %s' % (idLastTestSet,);
oDb.execute('SELECT MAX(idTestResult) FROM TestResults WHERE tsCreated >= %s', (tsEffective, ));
idLastTestResult = 0;
if oDb.getRowCount() > 0:
idLastTestResult = oDb.fetchOne()[0];
print 'Last test result ID: %s' % (idLastTestResult,);
# Tables with idTestSet member.
for sTable in [ 'TestSets', 'TestResults', 'TestResultValues' ]:
self._doCopyTo(sTable, oZipFile, oDb,
'COPY (SELECT *\n'
' FROM ' + sTable + '\n'
' WHERE idTestSet >= %s\n'
' AND idTestSet <= %s\n'
' AND idTestResult <= %s\n'
') TO STDOUT WITH (FORMAT TEXT)'
, ( idFirstTestSet, idLastTestSet, idLastTestResult,));
# Tables where we have to go via TestResult.
for sTable in [ 'TestResultFiles', 'TestResultMsgs', 'TestResultFailures' ]:
self._doCopyTo(sTable, oZipFile, oDb,
'COPY (SELECT it.*\n'
' FROM ' + sTable + ' it, TestResults tr\n'
' WHERE tr.idTestSet >= %s\n'
' AND tr.idTestSet <= %s\n'
' AND tr.idTestResult <= %s\n'
' AND tr.tsCreated >= %s\n' # performance hack.
' AND it.idTestResult = tr.idTestResult\n'
') TO STDOUT WITH (FORMAT TEXT)'
, ( idFirstTestSet, idLastTestSet, idLastTestResult, tsEffective,));
# Tables which goes exclusively by tsCreated.
for sTable in [ 'SystemLog', ]:
self._doCopyTo(sTable, oZipFile, oDb,
'COPY (SELECT * FROM ' + sTable + ' WHERE tsCreated >= %s) TO STDOUT WITH (FORMAT TEXT)',
(tsEffective,));
oZipFile.close();
print "Done!";
return 0;
def _doLoad(self, oDb):
""" Does the loading of the dumped data into the database. """
oZipFile = zipfile.ZipFile(self.oConfig.sFilename, 'r');
asTablesInLoadOrder = [
'Users',
'BuildBlacklist',
'BuildCategories',
'BuildSources',
'FailureCategories',
'FailureReasons',
'GlobalResources',
'Testcases',
'TestcaseArgs',
'TestcaseDeps',
'TestcaseGlobalRsrcDeps',
'TestGroups',
'TestGroupMembers',
'SchedGroups',
'TestBoxes',
'SchedGroupMembers',
'SchedQueues',
'Builds',
'SystemLog',
'VcsRevisions',
'TestResultStrTab',
'TestSets',
'TestResults',
'TestResultFiles',
'TestResultMsgs',
'TestResultValues',
'TestResultFailures',
];
assert len(asTablesInLoadOrder) == len(self.kasTablesToDumpInFull) + len(self.kasTablesToPartiallyDump);
oDb.begin();
oDb.execute('SET CONSTRAINTS ALL DEFERRED;');
print 'Checking if the database looks empty...\n'
for sTable in asTablesInLoadOrder + [ 'TestBoxStatuses', 'GlobalResourceStatuses' ]:
oDb.execute('SELECT COUNT(*) FROM ' + sTable);
cRows = oDb.fetchOne()[0];
cMaxRows = 0;
if sTable in [ 'SchedGroups', 'TestResultStrTab', 'Users' ]: cMaxRows = 1;
if cRows > cMaxRows:
print 'error: Table %s has %u rows which is more than %u - refusing to delete and load.' \
% (sTable, cRows, cMaxRows,);
print 'info: Please drop and recreate the database before loading!'
return 1;
print 'Dropping default table content...\n'
for sTable in [ 'SchedGroups', 'TestResultStrTab', 'Users']:
oDb.execute('DELETE FROM ' + sTable);
oDb.execute('ALTER TABLE TestSets DROP CONSTRAINT IF EXISTS TestSets_idTestResult_fkey');
for sTable in asTablesInLoadOrder:
print 'Loading %s...' % (sTable,);
oFile = oZipFile.open(sTable);
oDb.copyExpert('COPY ' + sTable + ' FROM STDIN WITH (FORMAT TEXT)', oFile);
cRows = oDb.getRowCount();
print '... %s rows.' % (cRows,);
oDb.execute('ALTER TABLE TestSets ADD FOREIGN KEY (idTestResult) REFERENCES TestResults(idTestResult)');
oDb.commit();
# Correct sequences.
atSequences = [
( 'UserIdSeq', 'Users', 'uid' ),
( 'GlobalResourceIdSeq', 'GlobalResources', 'idGlobalRsrc' ),
( 'BuildSourceIdSeq', 'BuildSources', 'idBuildSrc' ),
( 'TestCaseIdSeq', 'TestCases', 'idTestCase' ),
( 'TestCaseGenIdSeq', 'TestCases', 'idGenTestCase' ),
( 'TestCaseArgsIdSeq', 'TestCaseArgs', 'idTestCaseArgs' ),
( 'TestCaseArgsGenIdSeq', 'TestCaseArgs', 'idGenTestCaseArgs' ),
( 'TestGroupIdSeq', 'TestGroups', 'idTestGroup' ),
( 'SchedGroupIdSeq', 'SchedGroups', 'idSchedGroup' ),
( 'TestBoxIdSeq', 'TestBoxes', 'idTestBox' ),
( 'TestBoxGenIdSeq', 'TestBoxes', 'idGenTestBox' ),
( 'FailureCategoryIdSeq', 'FailureCategories', 'idFailureCategory' ),
( 'FailureReasonIdSeq', 'FailureReasons', 'idFailureReason' ),
( 'BuildBlacklistIdSeq', 'BuildBlacklist', 'idBlacklisting' ),
( 'BuildCategoryIdSeq', 'BuildCategories', 'idBuildCategory' ),
( 'BuildIdSeq', 'Builds', 'idBuild' ),
( 'TestResultStrTabIdSeq', 'TestResultStrTab', 'idStr' ),
( 'TestResultIdSeq', 'TestResults', 'idTestResult' ),
( 'TestResultValueIdSeq', 'TestResultValues', 'idTestResultValue' ),
( 'TestResultFileId', 'TestResultFiles', 'idTestResultFile' ),
( 'TestResultMsgIdSeq', 'TestResultMsgs', 'idTestResultMsg' ),
( 'TestSetIdSeq', 'TestSets', 'idTestSet' ),
( 'SchedQueueItemIdSeq', 'SchedQueues', 'idItem' ),
];
for (sSeq, sTab, sCol) in atSequences:
oDb.execute('SELECT MAX(%s) FROM %s' % (sCol, sTab,));
idMax = oDb.fetchOne()[0];
print '%s: idMax=%s' % (sSeq, idMax);
if idMax is not None:
oDb.execute('SELECT setval(\'%s\', %s)' % (sSeq, idMax));
# Last step.
print 'Analyzing...'
oDb.execute('ANALYZE');
oDb.commit();
print 'Done!'
return 0;
def main(self):
"""
Main function.
"""
oDb = TMDatabaseConnection();
if self.oConfig.fLoadDumpIntoDatabase is not True:
rc = self._doDump(oDb);
else:
rc = self._doLoad(oDb);
oDb.close();
return 0;
if __name__ == '__main__':
sys.exit(PartialDbDump().main());
| gpl-2.0 |
YOTOV-LIMITED/kitsune | kitsune/users/monkeypatch.py | 23 | 1200 | from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from kitsune.sumo.urlresolvers import reverse
def _activate_users(admin, request, qs):
num = qs.update(is_active=True)
msg = '%s users activated.' % num if num != 1 else 'One user activated.'
admin.message_user(request, msg)
_activate_users.short_description = u'Activate selected users'
def _deactivate_users(admin, request, qs):
num = qs.update(is_active=False)
msg = ('%s users deactivated.' % num if num != 1 else
'One user deactivated.')
admin.message_user(request, msg)
_deactivate_users.short_description = u'Deactivate selected users'
def patch_user_admin():
"""Prevent User objects from being deleted, even by superusers."""
if not getattr(UserAdmin, '_monkeyed', False):
UserAdmin._monkeyed = True
UserAdmin.actions = [_activate_users, _deactivate_users]
def patch_user_model():
"""Add a more accurate User.get_absolute_url."""
def get_absolute_url(self):
return reverse('users.profile', args=[self.pk])
User.get_absolute_url = get_absolute_url
def patch_all():
patch_user_admin()
patch_user_model()
| bsd-3-clause |
jrowan/zulip | zerver/webhooks/circleci/tests.py | 43 | 1806 | # -*- coding: utf-8 -*-
from zerver.lib.test_classes import WebhookTestCase
class CircleCiHookTests(WebhookTestCase):
STREAM_NAME = 'circleci'
URL_TEMPLATE = u"/api/v1/external/circleci?stream={stream}&api_key={api_key}"
FIXTURE_DIR_NAME = 'circleci'
def test_circleci_build_in_success_status(self):
# type: () -> None
expected_subject = u"RepoName"
expected_message = u"[Build](https://circleci.com/gh/username/project/build_number) triggered by username on master branch succeeded."
self.send_and_test_stream_message('build_passed', expected_subject, expected_message)
def test_circleci_build_in_failed_status(self):
# type: () -> None
expected_subject = u"RepoName"
expected_message = u"[Build](https://circleci.com/gh/username/project/build_number) triggered by username on master branch failed."
self.send_and_test_stream_message('build_failed', expected_subject, expected_message)
def test_circleci_build_in_failed_status_when_previous_build_failed_too(self):
# type: () -> None
expected_subject = u"RepoName"
expected_message = u"[Build](https://circleci.com/gh/username/project/build_number) triggered by username on master branch is still failing."
self.send_and_test_stream_message('build_failed_when_previous_build_failed', expected_subject, expected_message)
def test_circleci_build_in_success_status_when_previous_build_failed_too(self):
# type: () -> None
expected_subject = u"RepoName"
expected_message = u"[Build](https://circleci.com/gh/username/project/build_number) triggered by username on master branch fixed."
self.send_and_test_stream_message('build_passed_when_previous_build_failed', expected_subject, expected_message)
| apache-2.0 |
PaulKinlan/cli-caniuse | site/app/scripts/bower_components/jsrepl-build/extern/python/unclosured/lib/python2.7/xml/sax/saxutils.py | 111 | 9823 | """\
A library of useful helper classes to the SAX classes, for the
convenience of application and driver writers.
"""
import os, urlparse, urllib, types
import handler
import xmlreader
try:
_StringTypes = [types.StringType, types.UnicodeType]
except AttributeError:
_StringTypes = [types.StringType]
# See whether the xmlcharrefreplace error handler is
# supported
try:
from codecs import xmlcharrefreplace_errors
_error_handling = "xmlcharrefreplace"
del xmlcharrefreplace_errors
except ImportError:
_error_handling = "strict"
def __dict_replace(s, d):
"""Replace substrings of a string using a dictionary."""
for key, value in d.items():
s = s.replace(key, value)
return s
def escape(data, entities={}):
"""Escape &, <, and > in a string of data.
You can escape other strings of data by passing a dictionary as
the optional entities parameter. The keys and values must all be
strings; each key will be replaced with its corresponding value.
"""
# must do ampersand first
data = data.replace("&", "&")
data = data.replace(">", ">")
data = data.replace("<", "<")
if entities:
data = __dict_replace(data, entities)
return data
def unescape(data, entities={}):
"""Unescape &, <, and > in a string of data.
You can unescape other strings of data by passing a dictionary as
the optional entities parameter. The keys and values must all be
strings; each key will be replaced with its corresponding value.
"""
data = data.replace("<", "<")
data = data.replace(">", ">")
if entities:
data = __dict_replace(data, entities)
# must do ampersand last
return data.replace("&", "&")
def quoteattr(data, entities={}):
"""Escape and quote an attribute value.
Escape &, <, and > in a string of data, then quote it for use as
an attribute value. The \" character will be escaped as well, if
necessary.
You can escape other strings of data by passing a dictionary as
the optional entities parameter. The keys and values must all be
strings; each key will be replaced with its corresponding value.
"""
entities = entities.copy()
entities.update({'\n': ' ', '\r': ' ', '\t':'	'})
data = escape(data, entities)
if '"' in data:
if "'" in data:
data = '"%s"' % data.replace('"', """)
else:
data = "'%s'" % data
else:
data = '"%s"' % data
return data
class XMLGenerator(handler.ContentHandler):
def __init__(self, out=None, encoding="iso-8859-1"):
if out is None:
import sys
out = sys.stdout
handler.ContentHandler.__init__(self)
self._out = out
self._ns_contexts = [{}] # contains uri -> prefix dicts
self._current_context = self._ns_contexts[-1]
self._undeclared_ns_maps = []
self._encoding = encoding
def _write(self, text):
if isinstance(text, str):
self._out.write(text)
else:
self._out.write(text.encode(self._encoding, _error_handling))
def _qname(self, name):
"""Builds a qualified name from a (ns_url, localname) pair"""
if name[0]:
# Per http://www.w3.org/XML/1998/namespace, The 'xml' prefix is
# bound by definition to http://www.w3.org/XML/1998/namespace. It
# does not need to be declared and will not usually be found in
# self._current_context.
if 'http://www.w3.org/XML/1998/namespace' == name[0]:
return 'xml:' + name[1]
# The name is in a non-empty namespace
prefix = self._current_context[name[0]]
if prefix:
# If it is not the default namespace, prepend the prefix
return prefix + ":" + name[1]
# Return the unqualified name
return name[1]
# ContentHandler methods
def startDocument(self):
self._write('<?xml version="1.0" encoding="%s"?>\n' %
self._encoding)
def startPrefixMapping(self, prefix, uri):
self._ns_contexts.append(self._current_context.copy())
self._current_context[uri] = prefix
self._undeclared_ns_maps.append((prefix, uri))
def endPrefixMapping(self, prefix):
self._current_context = self._ns_contexts[-1]
del self._ns_contexts[-1]
def startElement(self, name, attrs):
self._write('<' + name)
for (name, value) in attrs.items():
self._write(' %s=%s' % (name, quoteattr(value)))
self._write('>')
def endElement(self, name):
self._write('</%s>' % name)
def startElementNS(self, name, qname, attrs):
self._write('<' + self._qname(name))
for prefix, uri in self._undeclared_ns_maps:
if prefix:
self._out.write(' xmlns:%s="%s"' % (prefix, uri))
else:
self._out.write(' xmlns="%s"' % uri)
self._undeclared_ns_maps = []
for (name, value) in attrs.items():
self._write(' %s=%s' % (self._qname(name), quoteattr(value)))
self._write('>')
def endElementNS(self, name, qname):
self._write('</%s>' % self._qname(name))
def characters(self, content):
self._write(escape(content))
def ignorableWhitespace(self, content):
self._write(content)
def processingInstruction(self, target, data):
self._write('<?%s %s?>' % (target, data))
class XMLFilterBase(xmlreader.XMLReader):
"""This class is designed to sit between an XMLReader and the
client application's event handlers. By default, it does nothing
but pass requests up to the reader and events on to the handlers
unmodified, but subclasses can override specific methods to modify
the event stream or the configuration requests as they pass
through."""
def __init__(self, parent = None):
xmlreader.XMLReader.__init__(self)
self._parent = parent
# ErrorHandler methods
def error(self, exception):
self._err_handler.error(exception)
def fatalError(self, exception):
self._err_handler.fatalError(exception)
def warning(self, exception):
self._err_handler.warning(exception)
# ContentHandler methods
def setDocumentLocator(self, locator):
self._cont_handler.setDocumentLocator(locator)
def startDocument(self):
self._cont_handler.startDocument()
def endDocument(self):
self._cont_handler.endDocument()
def startPrefixMapping(self, prefix, uri):
self._cont_handler.startPrefixMapping(prefix, uri)
def endPrefixMapping(self, prefix):
self._cont_handler.endPrefixMapping(prefix)
def startElement(self, name, attrs):
self._cont_handler.startElement(name, attrs)
def endElement(self, name):
self._cont_handler.endElement(name)
def startElementNS(self, name, qname, attrs):
self._cont_handler.startElementNS(name, qname, attrs)
def endElementNS(self, name, qname):
self._cont_handler.endElementNS(name, qname)
def characters(self, content):
self._cont_handler.characters(content)
def ignorableWhitespace(self, chars):
self._cont_handler.ignorableWhitespace(chars)
def processingInstruction(self, target, data):
self._cont_handler.processingInstruction(target, data)
def skippedEntity(self, name):
self._cont_handler.skippedEntity(name)
# DTDHandler methods
def notationDecl(self, name, publicId, systemId):
self._dtd_handler.notationDecl(name, publicId, systemId)
def unparsedEntityDecl(self, name, publicId, systemId, ndata):
self._dtd_handler.unparsedEntityDecl(name, publicId, systemId, ndata)
# EntityResolver methods
def resolveEntity(self, publicId, systemId):
return self._ent_handler.resolveEntity(publicId, systemId)
# XMLReader methods
def parse(self, source):
self._parent.setContentHandler(self)
self._parent.setErrorHandler(self)
self._parent.setEntityResolver(self)
self._parent.setDTDHandler(self)
self._parent.parse(source)
def setLocale(self, locale):
self._parent.setLocale(locale)
def getFeature(self, name):
return self._parent.getFeature(name)
def setFeature(self, name, state):
self._parent.setFeature(name, state)
def getProperty(self, name):
return self._parent.getProperty(name)
def setProperty(self, name, value):
self._parent.setProperty(name, value)
# XMLFilter methods
def getParent(self):
return self._parent
def setParent(self, parent):
self._parent = parent
# --- Utility functions
def prepare_input_source(source, base = ""):
"""This function takes an InputSource and an optional base URL and
returns a fully resolved InputSource object ready for reading."""
if type(source) in _StringTypes:
source = xmlreader.InputSource(source)
elif hasattr(source, "read"):
f = source
source = xmlreader.InputSource()
source.setByteStream(f)
if hasattr(f, "name"):
source.setSystemId(f.name)
if source.getByteStream() is None:
sysid = source.getSystemId()
basehead = os.path.dirname(os.path.normpath(base))
sysidfilename = os.path.join(basehead, sysid)
if os.path.isfile(sysidfilename):
source.setSystemId(sysidfilename)
f = open(sysidfilename, "rb")
else:
source.setSystemId(urlparse.urljoin(base, sysid))
f = urllib.urlopen(source.getSystemId())
source.setByteStream(f)
return source
| apache-2.0 |
cpcloud/logpy | examples/user-classes.py | 1 | 1183 | from account import Account
from logpy import unifiable, run, var, eq, membero, variables
from logpy.core import lall
from logpy.arith import add, gt, sub
unifiable(Account) # Register Account class
accounts = (Account('Adam', 'Smith', 1, 20),
Account('Carl', 'Marx', 2, 3),
Account('John', 'Rockefeller', 3, 1000))
# variables are arbitrary Python objects, not LogPy Var objects
first = 'FIRST'
last = 'LAST'
ident = -1111
balance = -2222
newbalance = -3333
vars = {first, last, ident, balance, newbalance}
# Describe a couple of transformations on accounts
source = Account(first, last, ident, balance)
target = Account(first, last, ident, newbalance)
theorists = ('Adam', 'Carl')
# Give $10 to theorists
theorist_bonus = lall((membero, source, accounts),
(membero, first, theorists),
(add, 10, balance, newbalance))
# Take $10 from anyone with more than $100
tax_the_rich = lall((membero, source, accounts),
(gt, balance, 100),
(sub, balance, 10, newbalance))
with variables(*vars):
print run(0, target, tax_the_rich)
print run(0, target, theorist_bonus)
| bsd-3-clause |
lancezlin/ml_template_py | lib/python2.7/site-packages/sklearn/datasets/tests/test_samples_generator.py | 181 | 15664 | from __future__ import division
from collections import defaultdict
from functools import partial
import numpy as np
import scipy.sparse as sp
from sklearn.externals.six.moves import zip
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import assert_raises
from sklearn.datasets import make_classification
from sklearn.datasets import make_multilabel_classification
from sklearn.datasets import make_hastie_10_2
from sklearn.datasets import make_regression
from sklearn.datasets import make_blobs
from sklearn.datasets import make_friedman1
from sklearn.datasets import make_friedman2
from sklearn.datasets import make_friedman3
from sklearn.datasets import make_low_rank_matrix
from sklearn.datasets import make_sparse_coded_signal
from sklearn.datasets import make_sparse_uncorrelated
from sklearn.datasets import make_spd_matrix
from sklearn.datasets import make_swiss_roll
from sklearn.datasets import make_s_curve
from sklearn.datasets import make_biclusters
from sklearn.datasets import make_checkerboard
from sklearn.utils.validation import assert_all_finite
def test_make_classification():
X, y = make_classification(n_samples=100, n_features=20, n_informative=5,
n_redundant=1, n_repeated=1, n_classes=3,
n_clusters_per_class=1, hypercube=False,
shift=None, scale=None, weights=[0.1, 0.25],
random_state=0)
assert_equal(X.shape, (100, 20), "X shape mismatch")
assert_equal(y.shape, (100,), "y shape mismatch")
assert_equal(np.unique(y).shape, (3,), "Unexpected number of classes")
assert_equal(sum(y == 0), 10, "Unexpected number of samples in class #0")
assert_equal(sum(y == 1), 25, "Unexpected number of samples in class #1")
assert_equal(sum(y == 2), 65, "Unexpected number of samples in class #2")
def test_make_classification_informative_features():
"""Test the construction of informative features in make_classification
Also tests `n_clusters_per_class`, `n_classes`, `hypercube` and
fully-specified `weights`.
"""
# Create very separate clusters; check that vertices are unique and
# correspond to classes
class_sep = 1e6
make = partial(make_classification, class_sep=class_sep, n_redundant=0,
n_repeated=0, flip_y=0, shift=0, scale=1, shuffle=False)
for n_informative, weights, n_clusters_per_class in [(2, [1], 1),
(2, [1/3] * 3, 1),
(2, [1/4] * 4, 1),
(2, [1/2] * 2, 2),
(2, [3/4, 1/4], 2),
(10, [1/3] * 3, 10)
]:
n_classes = len(weights)
n_clusters = n_classes * n_clusters_per_class
n_samples = n_clusters * 50
for hypercube in (False, True):
X, y = make(n_samples=n_samples, n_classes=n_classes,
weights=weights, n_features=n_informative,
n_informative=n_informative,
n_clusters_per_class=n_clusters_per_class,
hypercube=hypercube, random_state=0)
assert_equal(X.shape, (n_samples, n_informative))
assert_equal(y.shape, (n_samples,))
# Cluster by sign, viewed as strings to allow uniquing
signs = np.sign(X)
signs = signs.view(dtype='|S{0}'.format(signs.strides[0]))
unique_signs, cluster_index = np.unique(signs,
return_inverse=True)
assert_equal(len(unique_signs), n_clusters,
"Wrong number of clusters, or not in distinct "
"quadrants")
clusters_by_class = defaultdict(set)
for cluster, cls in zip(cluster_index, y):
clusters_by_class[cls].add(cluster)
for clusters in clusters_by_class.values():
assert_equal(len(clusters), n_clusters_per_class,
"Wrong number of clusters per class")
assert_equal(len(clusters_by_class), n_classes,
"Wrong number of classes")
assert_array_almost_equal(np.bincount(y) / len(y) // weights,
[1] * n_classes,
err_msg="Wrong number of samples "
"per class")
# Ensure on vertices of hypercube
for cluster in range(len(unique_signs)):
centroid = X[cluster_index == cluster].mean(axis=0)
if hypercube:
assert_array_almost_equal(np.abs(centroid),
[class_sep] * n_informative,
decimal=0,
err_msg="Clusters are not "
"centered on hypercube "
"vertices")
else:
assert_raises(AssertionError,
assert_array_almost_equal,
np.abs(centroid),
[class_sep] * n_informative,
decimal=0,
err_msg="Clusters should not be cenetered "
"on hypercube vertices")
assert_raises(ValueError, make, n_features=2, n_informative=2, n_classes=5,
n_clusters_per_class=1)
assert_raises(ValueError, make, n_features=2, n_informative=2, n_classes=3,
n_clusters_per_class=2)
def test_make_multilabel_classification_return_sequences():
for allow_unlabeled, min_length in zip((True, False), (0, 1)):
X, Y = make_multilabel_classification(n_samples=100, n_features=20,
n_classes=3, random_state=0,
return_indicator=False,
allow_unlabeled=allow_unlabeled)
assert_equal(X.shape, (100, 20), "X shape mismatch")
if not allow_unlabeled:
assert_equal(max([max(y) for y in Y]), 2)
assert_equal(min([len(y) for y in Y]), min_length)
assert_true(max([len(y) for y in Y]) <= 3)
def test_make_multilabel_classification_return_indicator():
for allow_unlabeled, min_length in zip((True, False), (0, 1)):
X, Y = make_multilabel_classification(n_samples=25, n_features=20,
n_classes=3, random_state=0,
allow_unlabeled=allow_unlabeled)
assert_equal(X.shape, (25, 20), "X shape mismatch")
assert_equal(Y.shape, (25, 3), "Y shape mismatch")
assert_true(np.all(np.sum(Y, axis=0) > min_length))
# Also test return_distributions and return_indicator with True
X2, Y2, p_c, p_w_c = make_multilabel_classification(
n_samples=25, n_features=20, n_classes=3, random_state=0,
allow_unlabeled=allow_unlabeled, return_distributions=True)
assert_array_equal(X, X2)
assert_array_equal(Y, Y2)
assert_equal(p_c.shape, (3,))
assert_almost_equal(p_c.sum(), 1)
assert_equal(p_w_c.shape, (20, 3))
assert_almost_equal(p_w_c.sum(axis=0), [1] * 3)
def test_make_multilabel_classification_return_indicator_sparse():
for allow_unlabeled, min_length in zip((True, False), (0, 1)):
X, Y = make_multilabel_classification(n_samples=25, n_features=20,
n_classes=3, random_state=0,
return_indicator='sparse',
allow_unlabeled=allow_unlabeled)
assert_equal(X.shape, (25, 20), "X shape mismatch")
assert_equal(Y.shape, (25, 3), "Y shape mismatch")
assert_true(sp.issparse(Y))
def test_make_hastie_10_2():
X, y = make_hastie_10_2(n_samples=100, random_state=0)
assert_equal(X.shape, (100, 10), "X shape mismatch")
assert_equal(y.shape, (100,), "y shape mismatch")
assert_equal(np.unique(y).shape, (2,), "Unexpected number of classes")
def test_make_regression():
X, y, c = make_regression(n_samples=100, n_features=10, n_informative=3,
effective_rank=5, coef=True, bias=0.0,
noise=1.0, random_state=0)
assert_equal(X.shape, (100, 10), "X shape mismatch")
assert_equal(y.shape, (100,), "y shape mismatch")
assert_equal(c.shape, (10,), "coef shape mismatch")
assert_equal(sum(c != 0.0), 3, "Unexpected number of informative features")
# Test that y ~= np.dot(X, c) + bias + N(0, 1.0).
assert_almost_equal(np.std(y - np.dot(X, c)), 1.0, decimal=1)
# Test with small number of features.
X, y = make_regression(n_samples=100, n_features=1) # n_informative=3
assert_equal(X.shape, (100, 1))
def test_make_regression_multitarget():
X, y, c = make_regression(n_samples=100, n_features=10, n_informative=3,
n_targets=3, coef=True, noise=1., random_state=0)
assert_equal(X.shape, (100, 10), "X shape mismatch")
assert_equal(y.shape, (100, 3), "y shape mismatch")
assert_equal(c.shape, (10, 3), "coef shape mismatch")
assert_array_equal(sum(c != 0.0), 3,
"Unexpected number of informative features")
# Test that y ~= np.dot(X, c) + bias + N(0, 1.0)
assert_almost_equal(np.std(y - np.dot(X, c)), 1.0, decimal=1)
def test_make_blobs():
cluster_stds = np.array([0.05, 0.2, 0.4])
cluster_centers = np.array([[0.0, 0.0], [1.0, 1.0], [0.0, 1.0]])
X, y = make_blobs(random_state=0, n_samples=50, n_features=2,
centers=cluster_centers, cluster_std=cluster_stds)
assert_equal(X.shape, (50, 2), "X shape mismatch")
assert_equal(y.shape, (50,), "y shape mismatch")
assert_equal(np.unique(y).shape, (3,), "Unexpected number of blobs")
for i, (ctr, std) in enumerate(zip(cluster_centers, cluster_stds)):
assert_almost_equal((X[y == i] - ctr).std(), std, 1, "Unexpected std")
def test_make_friedman1():
X, y = make_friedman1(n_samples=5, n_features=10, noise=0.0,
random_state=0)
assert_equal(X.shape, (5, 10), "X shape mismatch")
assert_equal(y.shape, (5,), "y shape mismatch")
assert_array_almost_equal(y,
10 * np.sin(np.pi * X[:, 0] * X[:, 1])
+ 20 * (X[:, 2] - 0.5) ** 2
+ 10 * X[:, 3] + 5 * X[:, 4])
def test_make_friedman2():
X, y = make_friedman2(n_samples=5, noise=0.0, random_state=0)
assert_equal(X.shape, (5, 4), "X shape mismatch")
assert_equal(y.shape, (5,), "y shape mismatch")
assert_array_almost_equal(y,
(X[:, 0] ** 2
+ (X[:, 1] * X[:, 2] - 1
/ (X[:, 1] * X[:, 3])) ** 2) ** 0.5)
def test_make_friedman3():
X, y = make_friedman3(n_samples=5, noise=0.0, random_state=0)
assert_equal(X.shape, (5, 4), "X shape mismatch")
assert_equal(y.shape, (5,), "y shape mismatch")
assert_array_almost_equal(y, np.arctan((X[:, 1] * X[:, 2]
- 1 / (X[:, 1] * X[:, 3]))
/ X[:, 0]))
def test_make_low_rank_matrix():
X = make_low_rank_matrix(n_samples=50, n_features=25, effective_rank=5,
tail_strength=0.01, random_state=0)
assert_equal(X.shape, (50, 25), "X shape mismatch")
from numpy.linalg import svd
u, s, v = svd(X)
assert_less(sum(s) - 5, 0.1, "X rank is not approximately 5")
def test_make_sparse_coded_signal():
Y, D, X = make_sparse_coded_signal(n_samples=5, n_components=8,
n_features=10, n_nonzero_coefs=3,
random_state=0)
assert_equal(Y.shape, (10, 5), "Y shape mismatch")
assert_equal(D.shape, (10, 8), "D shape mismatch")
assert_equal(X.shape, (8, 5), "X shape mismatch")
for col in X.T:
assert_equal(len(np.flatnonzero(col)), 3, 'Non-zero coefs mismatch')
assert_array_almost_equal(np.dot(D, X), Y)
assert_array_almost_equal(np.sqrt((D ** 2).sum(axis=0)),
np.ones(D.shape[1]))
def test_make_sparse_uncorrelated():
X, y = make_sparse_uncorrelated(n_samples=5, n_features=10, random_state=0)
assert_equal(X.shape, (5, 10), "X shape mismatch")
assert_equal(y.shape, (5,), "y shape mismatch")
def test_make_spd_matrix():
X = make_spd_matrix(n_dim=5, random_state=0)
assert_equal(X.shape, (5, 5), "X shape mismatch")
assert_array_almost_equal(X, X.T)
from numpy.linalg import eig
eigenvalues, _ = eig(X)
assert_array_equal(eigenvalues > 0, np.array([True] * 5),
"X is not positive-definite")
def test_make_swiss_roll():
X, t = make_swiss_roll(n_samples=5, noise=0.0, random_state=0)
assert_equal(X.shape, (5, 3), "X shape mismatch")
assert_equal(t.shape, (5,), "t shape mismatch")
assert_array_almost_equal(X[:, 0], t * np.cos(t))
assert_array_almost_equal(X[:, 2], t * np.sin(t))
def test_make_s_curve():
X, t = make_s_curve(n_samples=5, noise=0.0, random_state=0)
assert_equal(X.shape, (5, 3), "X shape mismatch")
assert_equal(t.shape, (5,), "t shape mismatch")
assert_array_almost_equal(X[:, 0], np.sin(t))
assert_array_almost_equal(X[:, 2], np.sign(t) * (np.cos(t) - 1))
def test_make_biclusters():
X, rows, cols = make_biclusters(
shape=(100, 100), n_clusters=4, shuffle=True, random_state=0)
assert_equal(X.shape, (100, 100), "X shape mismatch")
assert_equal(rows.shape, (4, 100), "rows shape mismatch")
assert_equal(cols.shape, (4, 100,), "columns shape mismatch")
assert_all_finite(X)
assert_all_finite(rows)
assert_all_finite(cols)
X2, _, _ = make_biclusters(shape=(100, 100), n_clusters=4,
shuffle=True, random_state=0)
assert_array_almost_equal(X, X2)
def test_make_checkerboard():
X, rows, cols = make_checkerboard(
shape=(100, 100), n_clusters=(20, 5),
shuffle=True, random_state=0)
assert_equal(X.shape, (100, 100), "X shape mismatch")
assert_equal(rows.shape, (100, 100), "rows shape mismatch")
assert_equal(cols.shape, (100, 100,), "columns shape mismatch")
X, rows, cols = make_checkerboard(
shape=(100, 100), n_clusters=2, shuffle=True, random_state=0)
assert_all_finite(X)
assert_all_finite(rows)
assert_all_finite(cols)
X1, _, _ = make_checkerboard(shape=(100, 100), n_clusters=2,
shuffle=True, random_state=0)
X2, _, _ = make_checkerboard(shape=(100, 100), n_clusters=2,
shuffle=True, random_state=0)
assert_array_equal(X1, X2)
| mit |
alephobjects/Cura2 | tests/TestProfileRequirements.py | 2 | 1327 | import configparser #To read the profiles.
import os #To join paths.
import pytest
## Makes sure that the variants for the Ultimaker 3 Extended are exactly the
# same as for the Ultimaker 3.
#
# Once we have specialised profiles or a mechanism to inherit variants too, we
# may remove this test and have different profiles for the extended where
# needed, but until then we should keep them the same. It's happened all too
# often that we updated the variants for the UM3 but forgot about the UM3E.
@pytest.mark.parametrize("um3_file, um3e_file", [
#List the corresponding files below.
("ultimaker3_aa0.8.inst.cfg", "ultimaker3_extended_aa0.8.inst.cfg"),
("ultimaker3_aa04.inst.cfg", "ultimaker3_extended_aa04.inst.cfg"),
("ultimaker3_bb0.8.inst.cfg", "ultimaker3_extended_bb0.8.inst.cfg"),
("ultimaker3_bb04.inst.cfg", "ultimaker3_extended_bb04.inst.cfg")
])
def test_ultimaker3extended_variants(um3_file, um3e_file):
directory = os.path.join(os.path.dirname(__file__), "..", "resources", "variants") #TODO: Hardcoded path relative to this test file.
um3 = configparser.ConfigParser()
um3.read_file(open(os.path.join(directory, um3_file)))
um3e = configparser.ConfigParser()
um3e.read_file(open(os.path.join(directory, um3e_file)))
assert um3["values"] == um3e["values"] | lgpl-3.0 |
cs251-eclipse/EclipseOJ | EclipseOJ/core/models.py | 1 | 2626 | from django.db import models
from django.contrib.auth.models import User
from django_countries.fields import CountryField
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.contrib.staticfiles.templatetags.staticfiles import static
import os
def picture_path(instance, filename):
return 'users/{}/{}'.format(instance.user.username, 'picture')
class Profile(models.Model):
"""
This is used to extend default :model:`auth.User` to store extra information about the people using the site.
"""
user = models.OneToOneField(
User,
on_delete=models.CASCADE,
help_text="This is a OneToOneField relationship i.e. User and Profile are uniquly related. Profile extends the default User model. on_delete = CASCADE is true which means if User is deleted then the Profile will be deleted automatically",
)
picture = models.ImageField(
upload_to=picture_path,
default=static('default_picture.jpg'),
blank=True,
help_text="It is an ImageField and is used to store profile picture of user.",
)
first_name = models.CharField(
max_length=100,
blank=True,
help_text="First name of the user",
)
last_name = models.CharField(
max_length=100,
blank=True,
help_text="Last name of the user",
)
email = models.EmailField(
max_length=70,
blank=True,
help_text="This is the email id of the user. The EmailID validator provided by django is used",
)
country = CountryField(
default='IN',
blank=True,
help_text="This is used to store the country of user. A third party package django-countries has been used which access to all countries. Default has been set to India",
)
city = models.CharField(
max_length=100,
blank=True,
help_text="This is they city of the user",
)
institute = models.CharField(
max_length=100,
blank=True,
help_text="This is the institute of the user",
)
rating = models.IntegerField(
default=1500,
help_text="Users can compete with each other and rating is the parameter used for competing with each other. Rating are updated after each contest through probabilistic analysis of the contest result",
)
def __str__(self):
return self.user.username
@receiver(post_save, sender=User)
def create_user(sender, instance, created, **kwargs):
if not created:
return
os.mkdir(os.path.join(os.path.join(os.getcwd(), 'uploads/users/'), instance.get_username()))
| mit |
alexmandujano/django | django/contrib/comments/admin.py | 178 | 3736 | from __future__ import unicode_literals
from django.contrib import admin
from django.contrib.auth import get_user_model
from django.contrib.comments.models import Comment
from django.utils.translation import ugettext_lazy as _, ungettext_lazy
from django.contrib.comments import get_model
from django.contrib.comments.views.moderation import perform_flag, perform_approve, perform_delete
class UsernameSearch(object):
"""The User object may not be auth.User, so we need to provide
a mechanism for issuing the equivalent of a .filter(user__username=...)
search in CommentAdmin.
"""
def __str__(self):
return 'user__%s' % get_user_model().USERNAME_FIELD
class CommentsAdmin(admin.ModelAdmin):
fieldsets = (
(None,
{'fields': ('content_type', 'object_pk', 'site')}
),
(_('Content'),
{'fields': ('user', 'user_name', 'user_email', 'user_url', 'comment')}
),
(_('Metadata'),
{'fields': ('submit_date', 'ip_address', 'is_public', 'is_removed')}
),
)
list_display = ('name', 'content_type', 'object_pk', 'ip_address', 'submit_date', 'is_public', 'is_removed')
list_filter = ('submit_date', 'site', 'is_public', 'is_removed')
date_hierarchy = 'submit_date'
ordering = ('-submit_date',)
raw_id_fields = ('user',)
search_fields = ('comment', UsernameSearch(), 'user_name', 'user_email', 'user_url', 'ip_address')
actions = ["flag_comments", "approve_comments", "remove_comments"]
def get_actions(self, request):
actions = super(CommentsAdmin, self).get_actions(request)
# Only superusers should be able to delete the comments from the DB.
if not request.user.is_superuser and 'delete_selected' in actions:
actions.pop('delete_selected')
if not request.user.has_perm('comments.can_moderate'):
if 'approve_comments' in actions:
actions.pop('approve_comments')
if 'remove_comments' in actions:
actions.pop('remove_comments')
return actions
def flag_comments(self, request, queryset):
self._bulk_flag(request, queryset, perform_flag,
ungettext_lazy('%d comment was successfully flagged',
'%d comments were successfully flagged'))
flag_comments.short_description = _("Flag selected comments")
def approve_comments(self, request, queryset):
self._bulk_flag(request, queryset, perform_approve,
ungettext_lazy('%d comment was successfully approved',
'%d comments were successfully approved'))
approve_comments.short_description = _("Approve selected comments")
def remove_comments(self, request, queryset):
self._bulk_flag(request, queryset, perform_delete,
ungettext_lazy('%d comment was successfully removed',
'%d comments were successfully removed'))
remove_comments.short_description = _("Remove selected comments")
def _bulk_flag(self, request, queryset, action, done_message):
"""
Flag, approve, or remove some comments from an admin action. Actually
calls the `action` argument to perform the heavy lifting.
"""
n_comments = 0
for comment in queryset:
action(request, comment)
n_comments += 1
self.message_user(request, done_message % n_comments)
# Only register the default admin if the model is the built-in comment model
# (this won't be true if there's a custom comment app).
if get_model() is Comment:
admin.site.register(Comment, CommentsAdmin)
| bsd-3-clause |
carrillo/scikit-learn | sklearn/datasets/svmlight_format.py | 79 | 15976 | """This module implements a loader and dumper for the svmlight format
This format is a text-based format, with one sample per line. It does
not store zero valued features hence is suitable for sparse dataset.
The first element of each line can be used to store a target variable to
predict.
This format is used as the default format for both svmlight and the
libsvm command line programs.
"""
# Authors: Mathieu Blondel <mathieu@mblondel.org>
# Lars Buitinck <L.J.Buitinck@uva.nl>
# Olivier Grisel <olivier.grisel@ensta.org>
# License: BSD 3 clause
from contextlib import closing
import io
import os.path
import numpy as np
import scipy.sparse as sp
from ._svmlight_format import _load_svmlight_file
from .. import __version__
from ..externals import six
from ..externals.six import u, b
from ..externals.six.moves import range, zip
from ..utils import check_array
from ..utils.fixes import frombuffer_empty
def load_svmlight_file(f, n_features=None, dtype=np.float64,
multilabel=False, zero_based="auto", query_id=False):
"""Load datasets in the svmlight / libsvm format into sparse CSR matrix
This format is a text-based format, with one sample per line. It does
not store zero valued features hence is suitable for sparse dataset.
The first element of each line can be used to store a target variable
to predict.
This format is used as the default format for both svmlight and the
libsvm command line programs.
Parsing a text based source can be expensive. When working on
repeatedly on the same dataset, it is recommended to wrap this
loader with joblib.Memory.cache to store a memmapped backup of the
CSR results of the first call and benefit from the near instantaneous
loading of memmapped structures for the subsequent calls.
In case the file contains a pairwise preference constraint (known
as "qid" in the svmlight format) these are ignored unless the
query_id parameter is set to True. These pairwise preference
constraints can be used to constraint the combination of samples
when using pairwise loss functions (as is the case in some
learning to rank problems) so that only pairs with the same
query_id value are considered.
This implementation is written in Cython and is reasonably fast.
However, a faster API-compatible loader is also available at:
https://github.com/mblondel/svmlight-loader
Parameters
----------
f : {str, file-like, int}
(Path to) a file to load. If a path ends in ".gz" or ".bz2", it will
be uncompressed on the fly. If an integer is passed, it is assumed to
be a file descriptor. A file-like or file descriptor will not be closed
by this function. A file-like object must be opened in binary mode.
n_features : int or None
The number of features to use. If None, it will be inferred. This
argument is useful to load several files that are subsets of a
bigger sliced dataset: each subset might not have examples of
every feature, hence the inferred shape might vary from one
slice to another.
multilabel : boolean, optional, default False
Samples may have several labels each (see
http://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/multilabel.html)
zero_based : boolean or "auto", optional, default "auto"
Whether column indices in f are zero-based (True) or one-based
(False). If column indices are one-based, they are transformed to
zero-based to match Python/NumPy conventions.
If set to "auto", a heuristic check is applied to determine this from
the file contents. Both kinds of files occur "in the wild", but they
are unfortunately not self-identifying. Using "auto" or True should
always be safe.
query_id : boolean, default False
If True, will return the query_id array for each file.
dtype : numpy data type, default np.float64
Data type of dataset to be loaded. This will be the data type of the
output numpy arrays ``X`` and ``y``.
Returns
-------
X: scipy.sparse matrix of shape (n_samples, n_features)
y: ndarray of shape (n_samples,), or, in the multilabel a list of
tuples of length n_samples.
query_id: array of shape (n_samples,)
query_id for each sample. Only returned when query_id is set to
True.
See also
--------
load_svmlight_files: similar function for loading multiple files in this
format, enforcing the same number of features/columns on all of them.
Examples
--------
To use joblib.Memory to cache the svmlight file::
from sklearn.externals.joblib import Memory
from sklearn.datasets import load_svmlight_file
mem = Memory("./mycache")
@mem.cache
def get_data():
data = load_svmlight_file("mysvmlightfile")
return data[0], data[1]
X, y = get_data()
"""
return tuple(load_svmlight_files([f], n_features, dtype, multilabel,
zero_based, query_id))
def _gen_open(f):
if isinstance(f, int): # file descriptor
return io.open(f, "rb", closefd=False)
elif not isinstance(f, six.string_types):
raise TypeError("expected {str, int, file-like}, got %s" % type(f))
_, ext = os.path.splitext(f)
if ext == ".gz":
import gzip
return gzip.open(f, "rb")
elif ext == ".bz2":
from bz2 import BZ2File
return BZ2File(f, "rb")
else:
return open(f, "rb")
def _open_and_load(f, dtype, multilabel, zero_based, query_id):
if hasattr(f, "read"):
actual_dtype, data, ind, indptr, labels, query = \
_load_svmlight_file(f, dtype, multilabel, zero_based, query_id)
# XXX remove closing when Python 2.7+/3.1+ required
else:
with closing(_gen_open(f)) as f:
actual_dtype, data, ind, indptr, labels, query = \
_load_svmlight_file(f, dtype, multilabel, zero_based, query_id)
# convert from array.array, give data the right dtype
if not multilabel:
labels = frombuffer_empty(labels, np.float64)
data = frombuffer_empty(data, actual_dtype)
indices = frombuffer_empty(ind, np.intc)
indptr = np.frombuffer(indptr, dtype=np.intc) # never empty
query = frombuffer_empty(query, np.intc)
data = np.asarray(data, dtype=dtype) # no-op for float{32,64}
return data, indices, indptr, labels, query
def load_svmlight_files(files, n_features=None, dtype=np.float64,
multilabel=False, zero_based="auto", query_id=False):
"""Load dataset from multiple files in SVMlight format
This function is equivalent to mapping load_svmlight_file over a list of
files, except that the results are concatenated into a single, flat list
and the samples vectors are constrained to all have the same number of
features.
In case the file contains a pairwise preference constraint (known
as "qid" in the svmlight format) these are ignored unless the
query_id parameter is set to True. These pairwise preference
constraints can be used to constraint the combination of samples
when using pairwise loss functions (as is the case in some
learning to rank problems) so that only pairs with the same
query_id value are considered.
Parameters
----------
files : iterable over {str, file-like, int}
(Paths of) files to load. If a path ends in ".gz" or ".bz2", it will
be uncompressed on the fly. If an integer is passed, it is assumed to
be a file descriptor. File-likes and file descriptors will not be
closed by this function. File-like objects must be opened in binary
mode.
n_features: int or None
The number of features to use. If None, it will be inferred from the
maximum column index occurring in any of the files.
This can be set to a higher value than the actual number of features
in any of the input files, but setting it to a lower value will cause
an exception to be raised.
multilabel: boolean, optional
Samples may have several labels each (see
http://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/multilabel.html)
zero_based: boolean or "auto", optional
Whether column indices in f are zero-based (True) or one-based
(False). If column indices are one-based, they are transformed to
zero-based to match Python/NumPy conventions.
If set to "auto", a heuristic check is applied to determine this from
the file contents. Both kinds of files occur "in the wild", but they
are unfortunately not self-identifying. Using "auto" or True should
always be safe.
query_id: boolean, defaults to False
If True, will return the query_id array for each file.
dtype : numpy data type, default np.float64
Data type of dataset to be loaded. This will be the data type of the
output numpy arrays ``X`` and ``y``.
Returns
-------
[X1, y1, ..., Xn, yn]
where each (Xi, yi) pair is the result from load_svmlight_file(files[i]).
If query_id is set to True, this will return instead [X1, y1, q1,
..., Xn, yn, qn] where (Xi, yi, qi) is the result from
load_svmlight_file(files[i])
Notes
-----
When fitting a model to a matrix X_train and evaluating it against a
matrix X_test, it is essential that X_train and X_test have the same
number of features (X_train.shape[1] == X_test.shape[1]). This may not
be the case if you load the files individually with load_svmlight_file.
See also
--------
load_svmlight_file
"""
r = [_open_and_load(f, dtype, multilabel, bool(zero_based), bool(query_id))
for f in files]
if (zero_based is False
or zero_based == "auto" and all(np.min(tmp[1]) > 0 for tmp in r)):
for ind in r:
indices = ind[1]
indices -= 1
n_f = max(ind[1].max() for ind in r) + 1
if n_features is None:
n_features = n_f
elif n_features < n_f:
raise ValueError("n_features was set to {},"
" but input file contains {} features"
.format(n_features, n_f))
result = []
for data, indices, indptr, y, query_values in r:
shape = (indptr.shape[0] - 1, n_features)
X = sp.csr_matrix((data, indices, indptr), shape)
X.sort_indices()
result += X, y
if query_id:
result.append(query_values)
return result
def _dump_svmlight(X, y, f, multilabel, one_based, comment, query_id):
is_sp = int(hasattr(X, "tocsr"))
if X.dtype.kind == 'i':
value_pattern = u("%d:%d")
else:
value_pattern = u("%d:%.16g")
if y.dtype.kind == 'i':
label_pattern = u("%d")
else:
label_pattern = u("%.16g")
line_pattern = u("%s")
if query_id is not None:
line_pattern += u(" qid:%d")
line_pattern += u(" %s\n")
if comment:
f.write(b("# Generated by dump_svmlight_file from scikit-learn %s\n"
% __version__))
f.write(b("# Column indices are %s-based\n"
% ["zero", "one"][one_based]))
f.write(b("#\n"))
f.writelines(b("# %s\n" % line) for line in comment.splitlines())
for i in range(X.shape[0]):
if is_sp:
span = slice(X.indptr[i], X.indptr[i + 1])
row = zip(X.indices[span], X.data[span])
else:
nz = X[i] != 0
row = zip(np.where(nz)[0], X[i, nz])
s = " ".join(value_pattern % (j + one_based, x) for j, x in row)
if multilabel:
nz_labels = np.where(y[i] != 0)[0]
labels_str = ",".join(label_pattern % j for j in nz_labels)
else:
labels_str = label_pattern % y[i]
if query_id is not None:
feat = (labels_str, query_id[i], s)
else:
feat = (labels_str, s)
f.write((line_pattern % feat).encode('ascii'))
def dump_svmlight_file(X, y, f, zero_based=True, comment=None, query_id=None,
multilabel=False):
"""Dump the dataset in svmlight / libsvm file format.
This format is a text-based format, with one sample per line. It does
not store zero valued features hence is suitable for sparse dataset.
The first element of each line can be used to store a target variable
to predict.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Training vectors, where n_samples is the number of samples and
n_features is the number of features.
y : array-like, shape = [n_samples] or [n_samples, n_labels]
Target values. Class labels must be an integer or float, or array-like
objects of integer or float for multilabel classifications.
f : string or file-like in binary mode
If string, specifies the path that will contain the data.
If file-like, data will be written to f. f should be opened in binary
mode.
zero_based : boolean, optional
Whether column indices should be written zero-based (True) or one-based
(False).
comment : string, optional
Comment to insert at the top of the file. This should be either a
Unicode string, which will be encoded as UTF-8, or an ASCII byte
string.
If a comment is given, then it will be preceded by one that identifies
the file as having been dumped by scikit-learn. Note that not all
tools grok comments in SVMlight files.
query_id : array-like, shape = [n_samples]
Array containing pairwise preference constraints (qid in svmlight
format).
multilabel: boolean, optional
Samples may have several labels each (see
http://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/multilabel.html)
"""
if comment is not None:
# Convert comment string to list of lines in UTF-8.
# If a byte string is passed, then check whether it's ASCII;
# if a user wants to get fancy, they'll have to decode themselves.
# Avoid mention of str and unicode types for Python 3.x compat.
if isinstance(comment, bytes):
comment.decode("ascii") # just for the exception
else:
comment = comment.encode("utf-8")
if six.b("\0") in comment:
raise ValueError("comment string contains NUL byte")
y = np.asarray(y)
if y.ndim != 1 and not multilabel:
raise ValueError("expected y of shape (n_samples,), got %r"
% (y.shape,))
Xval = check_array(X, accept_sparse='csr')
if Xval.shape[0] != y.shape[0]:
raise ValueError("X.shape[0] and y.shape[0] should be the same, got"
" %r and %r instead." % (Xval.shape[0], y.shape[0]))
# We had some issues with CSR matrices with unsorted indices (e.g. #1501),
# so sort them here, but first make sure we don't modify the user's X.
# TODO We can do this cheaper; sorted_indices copies the whole matrix.
if Xval is X and hasattr(Xval, "sorted_indices"):
X = Xval.sorted_indices()
else:
X = Xval
if hasattr(X, "sort_indices"):
X.sort_indices()
if query_id is not None:
query_id = np.asarray(query_id)
if query_id.shape[0] != y.shape[0]:
raise ValueError("expected query_id of shape (n_samples,), got %r"
% (query_id.shape,))
one_based = not zero_based
if hasattr(f, "write"):
_dump_svmlight(X, y, f, multilabel, one_based, comment, query_id)
else:
with open(f, "wb") as f:
_dump_svmlight(X, y, f, multilabel, one_based, comment, query_id)
| bsd-3-clause |
dongguangming/pattern | pattern/web/cache/__init__.py | 21 | 4199 | #### PATTERN | CACHE ###############################################################################
# Copyright (c) 2010 University of Antwerp, Belgium
# Author: Tom De Smedt <tom@organisms.be>
# License: BSD (see LICENSE.txt for details).
# http://www.clips.ua.ac.be/pages/pattern
try:
import hashlib; md5=hashlib.md5
except:
import md5; md5=md5.new
#### UNICODE #######################################################################################
def decode_string(v, encoding="utf-8"):
""" Returns the given value as a Unicode string (if possible).
"""
if isinstance(encoding, basestring):
encoding = ((encoding,),) + (("windows-1252",), ("utf-8", "ignore"))
if isinstance(v, str):
for e in encoding:
try: return v.decode(*e)
except:
pass
return v
return unicode(v)
def encode_string(v, encoding="utf-8"):
""" Returns the given value as a Python byte string (if possible).
"""
if isinstance(encoding, basestring):
encoding = ((encoding,),) + (("windows-1252",), ("utf-8", "ignore"))
if isinstance(v, unicode):
for e in encoding:
try: return v.encode(*e)
except:
pass
return v
return str(v)
decode_utf8 = decode_string
encode_utf8 = encode_string
#### CACHE #########################################################################################
# Caching is implemented in URL.download(), which is used by all other downloaders.
import os
import glob
import tempfile
import codecs
import datetime
try:
MODULE = os.path.dirname(os.path.realpath(__file__))
except:
MODULE = ""
TMP = os.path.join(tempfile.gettempdir(), "pattern_web")
def date_now():
return datetime.datetime.today()
def date_modified(path):
return datetime.datetime.fromtimestamp(os.stat(path)[8])
class Cache(object):
def __init__(self, path=os.path.join(MODULE, "tmp")):
""" Cache with data stored as files with hashed filenames.
Content retrieved from URLs and search engines are stored in cache for performance.
The path where the cache is stored can be given. This way you can manage persistent
sets of downloaded data. If path=TMP, cached items are stored in a temporary folder.
"""
self.path = path
def _get_path(self):
return self._path
def _set_path(self, path):
if not os.path.isdir(path):
os.makedirs(path)
self._path = path
path = property(_get_path, _set_path)
def _hash(self, k):
k = encode_utf8(k) # MD5 works on Python byte strings.
return os.path.join(self.path, md5(k).hexdigest())
def __len__(self):
return len(glob.glob(os.path.join(self.path, "*")))
def __contains__(self, k):
return os.path.exists(self._hash(k))
def __getitem__(self, k):
return self.get(k)
def __setitem__(self, k, v):
f = open(self._hash(k), "wb")
f.write(codecs.BOM_UTF8)
f.write(encode_utf8(v))
f.close()
def __delitem__(self, k):
try: os.unlink(self._hash(k))
except OSError:
pass
def get(self, k, unicode=True):
""" Returns the data stored with the given id.
With unicode=True, returns a Unicode string.
"""
if k in self:
f = open(self._hash(k), "rb"); v=f.read().lstrip(codecs.BOM_UTF8)
f.close()
if unicode is True:
return decode_utf8(v)
else:
return v
raise KeyError(k)
def age(self, k):
""" Returns the age of the cached item, in days.
"""
p = self._hash(k)
return os.path.exists(p) and (date_now() - date_modified(p)).days or 0
def clear(self, age=None):
""" Clears all items from the cache (whose age is the given amount of days or older).
"""
n = date_now()
for p in glob.glob(os.path.join(self.path, "*")):
if age is None or (n - date_modified(p)).days >= age:
os.unlink(p)
cache = Cache() | bsd-3-clause |
JerryXia/fastgoagent | goagent/server/uploader/yaml/parser.py | 117 | 25554 |
# The following YAML grammar is LL(1) and is parsed by a recursive descent
# parser.
#
# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
# implicit_document ::= block_node DOCUMENT-END*
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
# block_node_or_indentless_sequence ::=
# ALIAS
# | properties (block_content | indentless_block_sequence)?
# | block_content
# | indentless_block_sequence
# block_node ::= ALIAS
# | properties block_content?
# | block_content
# flow_node ::= ALIAS
# | properties flow_content?
# | flow_content
# properties ::= TAG ANCHOR? | ANCHOR TAG?
# block_content ::= block_collection | flow_collection | SCALAR
# flow_content ::= flow_collection | SCALAR
# block_collection ::= block_sequence | block_mapping
# flow_collection ::= flow_sequence | flow_mapping
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
# block_mapping ::= BLOCK-MAPPING_START
# ((KEY block_node_or_indentless_sequence?)?
# (VALUE block_node_or_indentless_sequence?)?)*
# BLOCK-END
# flow_sequence ::= FLOW-SEQUENCE-START
# (flow_sequence_entry FLOW-ENTRY)*
# flow_sequence_entry?
# FLOW-SEQUENCE-END
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
# flow_mapping ::= FLOW-MAPPING-START
# (flow_mapping_entry FLOW-ENTRY)*
# flow_mapping_entry?
# FLOW-MAPPING-END
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
#
# FIRST sets:
#
# stream: { STREAM-START }
# explicit_document: { DIRECTIVE DOCUMENT-START }
# implicit_document: FIRST(block_node)
# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
# block_sequence: { BLOCK-SEQUENCE-START }
# block_mapping: { BLOCK-MAPPING-START }
# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY }
# indentless_sequence: { ENTRY }
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
# flow_sequence: { FLOW-SEQUENCE-START }
# flow_mapping: { FLOW-MAPPING-START }
# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
__all__ = ['Parser', 'ParserError']
from error import MarkedYAMLError
from tokens import *
from events import *
from scanner import *
class ParserError(MarkedYAMLError):
pass
class Parser(object):
# Since writing a recursive-descendant parser is a straightforward task, we
# do not give many comments here.
# Note that we use Python generators. If you rewrite the parser in another
# language, you may replace all 'yield'-s with event handler calls.
DEFAULT_TAGS = {
u'!': u'!',
u'!!': u'tag:yaml.org,2002:',
}
def __init__(self):
self.current_event = None
self.yaml_version = None
self.tag_handles = {}
self.states = []
self.marks = []
self.state = self.parse_stream_start
def check_event(self, *choices):
# Check the type of the next event.
if self.current_event is None:
if self.state:
self.current_event = self.state()
if self.current_event is not None:
if not choices:
return True
for choice in choices:
if isinstance(self.current_event, choice):
return True
return False
def peek_event(self):
# Get the next event.
if self.current_event is None:
if self.state:
self.current_event = self.state()
return self.current_event
def get_event(self):
# Get the next event and proceed further.
if self.current_event is None:
if self.state:
self.current_event = self.state()
value = self.current_event
self.current_event = None
return value
# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
# implicit_document ::= block_node DOCUMENT-END*
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
def parse_stream_start(self):
# Parse the stream start.
token = self.get_token()
event = StreamStartEvent(token.start_mark, token.end_mark,
encoding=token.encoding)
# Prepare the next state.
self.state = self.parse_implicit_document_start
return event
def parse_implicit_document_start(self):
# Parse an implicit document.
if not self.check_token(DirectiveToken, DocumentStartToken,
StreamEndToken):
self.tag_handles = self.DEFAULT_TAGS
token = self.peek_token()
start_mark = end_mark = token.start_mark
event = DocumentStartEvent(start_mark, end_mark,
explicit=False)
# Prepare the next state.
self.states.append(self.parse_document_end)
self.state = self.parse_block_node
return event
else:
return self.parse_document_start()
def parse_document_start(self):
# Parse any extra document end indicators.
while self.check_token(DocumentEndToken):
self.get_token()
# Parse an explicit document.
if not self.check_token(StreamEndToken):
token = self.peek_token()
start_mark = token.start_mark
version, tags = self.process_directives()
if not self.check_token(DocumentStartToken):
raise ParserError(None, None,
"expected '<document start>', but found %r"
% self.peek_token().id,
self.peek_token().start_mark)
token = self.get_token()
end_mark = token.end_mark
event = DocumentStartEvent(start_mark, end_mark,
explicit=True, version=version, tags=tags)
self.states.append(self.parse_document_end)
self.state = self.parse_document_content
else:
# Parse the end of the stream.
token = self.get_token()
event = StreamEndEvent(token.start_mark, token.end_mark)
assert not self.states
assert not self.marks
self.state = None
return event
def parse_document_end(self):
# Parse the document end.
token = self.peek_token()
start_mark = end_mark = token.start_mark
explicit = False
if self.check_token(DocumentEndToken):
token = self.get_token()
end_mark = token.end_mark
explicit = True
event = DocumentEndEvent(start_mark, end_mark,
explicit=explicit)
# Prepare the next state.
self.state = self.parse_document_start
return event
def parse_document_content(self):
if self.check_token(DirectiveToken,
DocumentStartToken, DocumentEndToken, StreamEndToken):
event = self.process_empty_scalar(self.peek_token().start_mark)
self.state = self.states.pop()
return event
else:
return self.parse_block_node()
def process_directives(self):
self.yaml_version = None
self.tag_handles = {}
while self.check_token(DirectiveToken):
token = self.get_token()
if token.name == u'YAML':
if self.yaml_version is not None:
raise ParserError(None, None,
"found duplicate YAML directive", token.start_mark)
major, minor = token.value
if major != 1:
raise ParserError(None, None,
"found incompatible YAML document (version 1.* is required)",
token.start_mark)
self.yaml_version = token.value
elif token.name == u'TAG':
handle, prefix = token.value
if handle in self.tag_handles:
raise ParserError(None, None,
"duplicate tag handle %r" % handle.encode('utf-8'),
token.start_mark)
self.tag_handles[handle] = prefix
if self.tag_handles:
value = self.yaml_version, self.tag_handles.copy()
else:
value = self.yaml_version, None
for key in self.DEFAULT_TAGS:
if key not in self.tag_handles:
self.tag_handles[key] = self.DEFAULT_TAGS[key]
return value
# block_node_or_indentless_sequence ::= ALIAS
# | properties (block_content | indentless_block_sequence)?
# | block_content
# | indentless_block_sequence
# block_node ::= ALIAS
# | properties block_content?
# | block_content
# flow_node ::= ALIAS
# | properties flow_content?
# | flow_content
# properties ::= TAG ANCHOR? | ANCHOR TAG?
# block_content ::= block_collection | flow_collection | SCALAR
# flow_content ::= flow_collection | SCALAR
# block_collection ::= block_sequence | block_mapping
# flow_collection ::= flow_sequence | flow_mapping
def parse_block_node(self):
return self.parse_node(block=True)
def parse_flow_node(self):
return self.parse_node()
def parse_block_node_or_indentless_sequence(self):
return self.parse_node(block=True, indentless_sequence=True)
def parse_node(self, block=False, indentless_sequence=False):
if self.check_token(AliasToken):
token = self.get_token()
event = AliasEvent(token.value, token.start_mark, token.end_mark)
self.state = self.states.pop()
else:
anchor = None
tag = None
start_mark = end_mark = tag_mark = None
if self.check_token(AnchorToken):
token = self.get_token()
start_mark = token.start_mark
end_mark = token.end_mark
anchor = token.value
if self.check_token(TagToken):
token = self.get_token()
tag_mark = token.start_mark
end_mark = token.end_mark
tag = token.value
elif self.check_token(TagToken):
token = self.get_token()
start_mark = tag_mark = token.start_mark
end_mark = token.end_mark
tag = token.value
if self.check_token(AnchorToken):
token = self.get_token()
end_mark = token.end_mark
anchor = token.value
if tag is not None:
handle, suffix = tag
if handle is not None:
if handle not in self.tag_handles:
raise ParserError("while parsing a node", start_mark,
"found undefined tag handle %r" % handle.encode('utf-8'),
tag_mark)
tag = self.tag_handles[handle]+suffix
else:
tag = suffix
#if tag == u'!':
# raise ParserError("while parsing a node", start_mark,
# "found non-specific tag '!'", tag_mark,
# "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.")
if start_mark is None:
start_mark = end_mark = self.peek_token().start_mark
event = None
implicit = (tag is None or tag == u'!')
if indentless_sequence and self.check_token(BlockEntryToken):
end_mark = self.peek_token().end_mark
event = SequenceStartEvent(anchor, tag, implicit,
start_mark, end_mark)
self.state = self.parse_indentless_sequence_entry
else:
if self.check_token(ScalarToken):
token = self.get_token()
end_mark = token.end_mark
if (token.plain and tag is None) or tag == u'!':
implicit = (True, False)
elif tag is None:
implicit = (False, True)
else:
implicit = (False, False)
event = ScalarEvent(anchor, tag, implicit, token.value,
start_mark, end_mark, style=token.style)
self.state = self.states.pop()
elif self.check_token(FlowSequenceStartToken):
end_mark = self.peek_token().end_mark
event = SequenceStartEvent(anchor, tag, implicit,
start_mark, end_mark, flow_style=True)
self.state = self.parse_flow_sequence_first_entry
elif self.check_token(FlowMappingStartToken):
end_mark = self.peek_token().end_mark
event = MappingStartEvent(anchor, tag, implicit,
start_mark, end_mark, flow_style=True)
self.state = self.parse_flow_mapping_first_key
elif block and self.check_token(BlockSequenceStartToken):
end_mark = self.peek_token().start_mark
event = SequenceStartEvent(anchor, tag, implicit,
start_mark, end_mark, flow_style=False)
self.state = self.parse_block_sequence_first_entry
elif block and self.check_token(BlockMappingStartToken):
end_mark = self.peek_token().start_mark
event = MappingStartEvent(anchor, tag, implicit,
start_mark, end_mark, flow_style=False)
self.state = self.parse_block_mapping_first_key
elif anchor is not None or tag is not None:
# Empty scalars are allowed even if a tag or an anchor is
# specified.
event = ScalarEvent(anchor, tag, (implicit, False), u'',
start_mark, end_mark)
self.state = self.states.pop()
else:
if block:
node = 'block'
else:
node = 'flow'
token = self.peek_token()
raise ParserError("while parsing a %s node" % node, start_mark,
"expected the node content, but found %r" % token.id,
token.start_mark)
return event
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
def parse_block_sequence_first_entry(self):
token = self.get_token()
self.marks.append(token.start_mark)
return self.parse_block_sequence_entry()
def parse_block_sequence_entry(self):
if self.check_token(BlockEntryToken):
token = self.get_token()
if not self.check_token(BlockEntryToken, BlockEndToken):
self.states.append(self.parse_block_sequence_entry)
return self.parse_block_node()
else:
self.state = self.parse_block_sequence_entry
return self.process_empty_scalar(token.end_mark)
if not self.check_token(BlockEndToken):
token = self.peek_token()
raise ParserError("while parsing a block collection", self.marks[-1],
"expected <block end>, but found %r" % token.id, token.start_mark)
token = self.get_token()
event = SequenceEndEvent(token.start_mark, token.end_mark)
self.state = self.states.pop()
self.marks.pop()
return event
# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
def parse_indentless_sequence_entry(self):
if self.check_token(BlockEntryToken):
token = self.get_token()
if not self.check_token(BlockEntryToken,
KeyToken, ValueToken, BlockEndToken):
self.states.append(self.parse_indentless_sequence_entry)
return self.parse_block_node()
else:
self.state = self.parse_indentless_sequence_entry
return self.process_empty_scalar(token.end_mark)
token = self.peek_token()
event = SequenceEndEvent(token.start_mark, token.start_mark)
self.state = self.states.pop()
return event
# block_mapping ::= BLOCK-MAPPING_START
# ((KEY block_node_or_indentless_sequence?)?
# (VALUE block_node_or_indentless_sequence?)?)*
# BLOCK-END
def parse_block_mapping_first_key(self):
token = self.get_token()
self.marks.append(token.start_mark)
return self.parse_block_mapping_key()
def parse_block_mapping_key(self):
if self.check_token(KeyToken):
token = self.get_token()
if not self.check_token(KeyToken, ValueToken, BlockEndToken):
self.states.append(self.parse_block_mapping_value)
return self.parse_block_node_or_indentless_sequence()
else:
self.state = self.parse_block_mapping_value
return self.process_empty_scalar(token.end_mark)
if not self.check_token(BlockEndToken):
token = self.peek_token()
raise ParserError("while parsing a block mapping", self.marks[-1],
"expected <block end>, but found %r" % token.id, token.start_mark)
token = self.get_token()
event = MappingEndEvent(token.start_mark, token.end_mark)
self.state = self.states.pop()
self.marks.pop()
return event
def parse_block_mapping_value(self):
if self.check_token(ValueToken):
token = self.get_token()
if not self.check_token(KeyToken, ValueToken, BlockEndToken):
self.states.append(self.parse_block_mapping_key)
return self.parse_block_node_or_indentless_sequence()
else:
self.state = self.parse_block_mapping_key
return self.process_empty_scalar(token.end_mark)
else:
self.state = self.parse_block_mapping_key
token = self.peek_token()
return self.process_empty_scalar(token.start_mark)
# flow_sequence ::= FLOW-SEQUENCE-START
# (flow_sequence_entry FLOW-ENTRY)*
# flow_sequence_entry?
# FLOW-SEQUENCE-END
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
#
# Note that while production rules for both flow_sequence_entry and
# flow_mapping_entry are equal, their interpretations are different.
# For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
# generate an inline mapping (set syntax).
def parse_flow_sequence_first_entry(self):
token = self.get_token()
self.marks.append(token.start_mark)
return self.parse_flow_sequence_entry(first=True)
def parse_flow_sequence_entry(self, first=False):
if not self.check_token(FlowSequenceEndToken):
if not first:
if self.check_token(FlowEntryToken):
self.get_token()
else:
token = self.peek_token()
raise ParserError("while parsing a flow sequence", self.marks[-1],
"expected ',' or ']', but got %r" % token.id, token.start_mark)
if self.check_token(KeyToken):
token = self.peek_token()
event = MappingStartEvent(None, None, True,
token.start_mark, token.end_mark,
flow_style=True)
self.state = self.parse_flow_sequence_entry_mapping_key
return event
elif not self.check_token(FlowSequenceEndToken):
self.states.append(self.parse_flow_sequence_entry)
return self.parse_flow_node()
token = self.get_token()
event = SequenceEndEvent(token.start_mark, token.end_mark)
self.state = self.states.pop()
self.marks.pop()
return event
def parse_flow_sequence_entry_mapping_key(self):
token = self.get_token()
if not self.check_token(ValueToken,
FlowEntryToken, FlowSequenceEndToken):
self.states.append(self.parse_flow_sequence_entry_mapping_value)
return self.parse_flow_node()
else:
self.state = self.parse_flow_sequence_entry_mapping_value
return self.process_empty_scalar(token.end_mark)
def parse_flow_sequence_entry_mapping_value(self):
if self.check_token(ValueToken):
token = self.get_token()
if not self.check_token(FlowEntryToken, FlowSequenceEndToken):
self.states.append(self.parse_flow_sequence_entry_mapping_end)
return self.parse_flow_node()
else:
self.state = self.parse_flow_sequence_entry_mapping_end
return self.process_empty_scalar(token.end_mark)
else:
self.state = self.parse_flow_sequence_entry_mapping_end
token = self.peek_token()
return self.process_empty_scalar(token.start_mark)
def parse_flow_sequence_entry_mapping_end(self):
self.state = self.parse_flow_sequence_entry
token = self.peek_token()
return MappingEndEvent(token.start_mark, token.start_mark)
# flow_mapping ::= FLOW-MAPPING-START
# (flow_mapping_entry FLOW-ENTRY)*
# flow_mapping_entry?
# FLOW-MAPPING-END
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
def parse_flow_mapping_first_key(self):
token = self.get_token()
self.marks.append(token.start_mark)
return self.parse_flow_mapping_key(first=True)
def parse_flow_mapping_key(self, first=False):
if not self.check_token(FlowMappingEndToken):
if not first:
if self.check_token(FlowEntryToken):
self.get_token()
else:
token = self.peek_token()
raise ParserError("while parsing a flow mapping", self.marks[-1],
"expected ',' or '}', but got %r" % token.id, token.start_mark)
if self.check_token(KeyToken):
token = self.get_token()
if not self.check_token(ValueToken,
FlowEntryToken, FlowMappingEndToken):
self.states.append(self.parse_flow_mapping_value)
return self.parse_flow_node()
else:
self.state = self.parse_flow_mapping_value
return self.process_empty_scalar(token.end_mark)
elif not self.check_token(FlowMappingEndToken):
self.states.append(self.parse_flow_mapping_empty_value)
return self.parse_flow_node()
token = self.get_token()
event = MappingEndEvent(token.start_mark, token.end_mark)
self.state = self.states.pop()
self.marks.pop()
return event
def parse_flow_mapping_value(self):
if self.check_token(ValueToken):
token = self.get_token()
if not self.check_token(FlowEntryToken, FlowMappingEndToken):
self.states.append(self.parse_flow_mapping_key)
return self.parse_flow_node()
else:
self.state = self.parse_flow_mapping_key
return self.process_empty_scalar(token.end_mark)
else:
self.state = self.parse_flow_mapping_key
token = self.peek_token()
return self.process_empty_scalar(token.start_mark)
def parse_flow_mapping_empty_value(self):
self.state = self.parse_flow_mapping_key
return self.process_empty_scalar(self.peek_token().start_mark)
def process_empty_scalar(self, mark):
return ScalarEvent(None, None, (True, False), u'', mark, mark)
| mit |
jvenezia/heroku-buildpack-python | vendor/pip-pop/pip/_vendor/html5lib/filters/inject_meta_charset.py | 1730 | 2746 | from __future__ import absolute_import, division, unicode_literals
from . import _base
class Filter(_base.Filter):
def __init__(self, source, encoding):
_base.Filter.__init__(self, source)
self.encoding = encoding
def __iter__(self):
state = "pre_head"
meta_found = (self.encoding is None)
pending = []
for token in _base.Filter.__iter__(self):
type = token["type"]
if type == "StartTag":
if token["name"].lower() == "head":
state = "in_head"
elif type == "EmptyTag":
if token["name"].lower() == "meta":
# replace charset with actual encoding
has_http_equiv_content_type = False
for (namespace, name), value in token["data"].items():
if namespace is not None:
continue
elif name.lower() == 'charset':
token["data"][(namespace, name)] = self.encoding
meta_found = True
break
elif name == 'http-equiv' and value.lower() == 'content-type':
has_http_equiv_content_type = True
else:
if has_http_equiv_content_type and (None, "content") in token["data"]:
token["data"][(None, "content")] = 'text/html; charset=%s' % self.encoding
meta_found = True
elif token["name"].lower() == "head" and not meta_found:
# insert meta into empty head
yield {"type": "StartTag", "name": "head",
"data": token["data"]}
yield {"type": "EmptyTag", "name": "meta",
"data": {(None, "charset"): self.encoding}}
yield {"type": "EndTag", "name": "head"}
meta_found = True
continue
elif type == "EndTag":
if token["name"].lower() == "head" and pending:
# insert meta into head (if necessary) and flush pending queue
yield pending.pop(0)
if not meta_found:
yield {"type": "EmptyTag", "name": "meta",
"data": {(None, "charset"): self.encoding}}
while pending:
yield pending.pop(0)
meta_found = True
state = "post_head"
if state == "in_head":
pending.append(token)
else:
yield token
| mit |
dennisobrien/bokeh | bokeh/io/__init__.py | 2 | 2362 | #-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2018, Anaconda, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
'''
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
# External Imports
# Bokeh imports
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
__all__ = (
'curdoc',
'export_png',
'export_svgs',
'install_notebook_hook',
'push_notebook',
'output_file',
'output_notebook',
'save',
'show',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
from .doc import curdoc
from .export import export_png
from .export import export_svgs
from .notebook import install_notebook_hook
from .notebook import push_notebook
from .output import output_file
from .output import output_notebook
from .output import reset_output
from .saving import save
from .showing import show
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
from .notebook import install_jupyter_hooks
install_jupyter_hooks()
del install_jupyter_hooks
| bsd-3-clause |
Ayi-/tornado_blog | views/index.py | 1 | 2211 | #-*- coding:utf-8 -*-
import sys
from views.flash.flash import FlashHandler
reload(sys)
sys.setdefaultencoding("utf8")
from tornado.options import define,options
import tornado.web
from tornado import ioloop, gen
from views.util import dictMerge
from settings import settings as config
__author__ = 'AE'
class BaseHandler(tornado.web.RequestHandler,FlashHandler):
# def get_current_user(self):
# return self.get_secure_cookie("user")
# http://www.tornadoweb.org/en/stable/web.html#tornado.web.RequestHandler.current_user
@gen.coroutine
def prepare(self):
self._flashes=None
# 返回request后就清除了,本次request内依旧可用
self.clear_cookie('flashes_cookies')
user = self.get_secure_cookie("user")
self.current_user =user
# 获取连接
self.application.conn.ping(True)
class Index(BaseHandler):
def get(self):
self.render("resource.html")
class Login(BaseHandler):
"""
登陆
"""
def get(self):
if not self.current_user:
username = self.get_argument('username',None)
password = self.get_argument('password',None)
if password not in(None,"") and password not in(None,""):
with self.application.conn.cursor() as cursor:
cursor.execute("select username,email from user where binary username = %s and binary password = %s", (username,password))
result = cursor.fetchone()
print result
if result:
self.set_secure_cookie("user",result.get("username"))
self.redirect(self.get_argument("next","/"))
return
self.flash(u"用户名或密码错误!")
else:
self.flash(u"用户名或密码不能为空!")
else:
self.flash(u"不要重复登陆!",'error')
self.redirect("/")
class Logout(BaseHandler):
"""
注销
"""
@tornado.web.authenticated
def get(self):
self.clear_cookie("user")
print self.request.uri
self.redirect(self.get_argument("next","/"))
| mit |
wanglei828/apollo | modules/tools/plot_planning/angular_velocity.py | 1 | 4080 | #!/usr/bin/env python
###############################################################################
# Copyright 2019 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import math
from record_reader import RecordItemReader
class AngularVelocity:
def __init__(self):
self.timestamp_list = []
self.angular_velocity_list = []
self.corrected_angular_velocity_list = []
self.last_corrected_angular_velocity = None
self.last_timestamp = None
def add(self, location_est):
timestamp_sec = location_est.header.timestamp_sec
angular_velocity = location_est.pose.angular_velocity.z
if self.last_corrected_angular_velocity is not None:
corrected = self.correct_angular_velocity(
angular_velocity, timestamp_sec)
else:
corrected = angular_velocity
self.timestamp_list.append(timestamp_sec)
self.angular_velocity_list.append(angular_velocity)
self.corrected_angular_velocity_list.append(corrected)
self.last_corrected_angular_velocity = corrected
self.last_timestamp = timestamp_sec
def correct_angular_velocity(self, angular_velocity, timestamp_sec):
if self.last_corrected_angular_velocity is None:
return angular_velocity
delta = abs(angular_velocity - self.last_corrected_angular_velocity)\
/ abs(self.last_corrected_angular_velocity)
if delta > 0.25:
corrected = angular_velocity / 2.0
return corrected
else:
return angular_velocity
def get_anglular_velocity_list(self):
return self.angular_velocity_list
def get_corrected_anglular_velocity_list(self):
return self.corrected_angular_velocity_list
def get_timestamp_list(self):
return self.timestamp_list
def get_latest_angular_velocity(self):
if len(self.angular_velocity_list) == 0:
return None
else:
return self.angular_velocity_list[-1]
def get_latest_corrected_angular_velocity(self):
if len(self.corrected_angular_velocity_list) == 0:
return None
else:
return self.corrected_angular_velocity_list[-1]
def get_latest_timestamp(self):
if len(self.timestamp_list) == 0:
return None
else:
return self.timestamp_list[-1]
if __name__ == "__main__":
import sys
import matplotlib.pyplot as plt
from os import listdir
from os.path import isfile, join
folders = sys.argv[1:]
fig, ax = plt.subplots()
colors = ["g", "b", "r", "m", "y"]
markers = ["o", "o", "o", "o"]
for i in range(len(folders)):
folder = folders[i]
color = colors[i % len(colors)]
marker = markers[i % len(markers)]
fns = [f for f in listdir(folder) if isfile(join(folder, f))]
for fn in fns:
reader = RecordItemReader(folder+"/"+fn)
processor = AngularVelocity()
for data in reader.read(["/apollo/localization/pose"]):
processor.add(data["pose"])
data_x = processor.get_timestamp_list()
data_y = processor.get_corrected_anglular_velocity_list()
ax.scatter(data_x, data_y, c=color, marker=marker, alpha=0.4)
data_y = processor.get_anglular_velocity_list()
ax.scatter(data_x, data_y, c='k', marker="+", alpha=0.8)
plt.show()
| apache-2.0 |
nerzhul/ansible | lib/ansible/modules/cloud/rackspace/rax_meta.py | 50 | 5305 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# This is a DOCUMENTATION stub specific to this module, it extends
# a documentation fragment located in ansible.utils.module_docs_fragments
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: rax_meta
short_description: Manipulate metadata for Rackspace Cloud Servers
description:
- Manipulate metadata for Rackspace Cloud Servers
version_added: 1.7
options:
address:
description:
- Server IP address to modify metadata for, will match any IP assigned to
the server
id:
description:
- Server ID to modify metadata for
name:
description:
- Server name to modify metadata for
default: null
meta:
description:
- A hash of metadata to associate with the instance
default: null
author: "Matt Martz (@sivel)"
extends_documentation_fragment: rackspace.openstack
'''
EXAMPLES = '''
- name: Set metadata for a server
hosts: all
gather_facts: False
tasks:
- name: Set metadata
local_action:
module: rax_meta
credentials: ~/.raxpub
name: "{{ inventory_hostname }}"
region: DFW
meta:
group: primary_group
groups:
- group_two
- group_three
app: my_app
- name: Clear metadata
local_action:
module: rax_meta
credentials: ~/.raxpub
name: "{{ inventory_hostname }}"
region: DFW
'''
try:
import pyrax
HAS_PYRAX = True
except ImportError:
HAS_PYRAX = False
def rax_meta(module, address, name, server_id, meta):
changed = False
cs = pyrax.cloudservers
if cs is None:
module.fail_json(msg='Failed to instantiate client. This '
'typically indicates an invalid region or an '
'incorrectly capitalized region name.')
search_opts = {}
if name:
search_opts = dict(name='^%s$' % name)
try:
servers = cs.servers.list(search_opts=search_opts)
except Exception as e:
module.fail_json(msg='%s' % e.message)
elif address:
servers = []
try:
for server in cs.servers.list():
for addresses in server.networks.values():
if address in addresses:
servers.append(server)
break
except Exception as e:
module.fail_json(msg='%s' % e.message)
elif server_id:
servers = []
try:
servers.append(cs.servers.get(server_id))
except Exception as e:
pass
if len(servers) > 1:
module.fail_json(msg='Multiple servers found matching provided '
'search parameters')
elif not servers:
module.fail_json(msg='Failed to find a server matching provided '
'search parameters')
# Normalize and ensure all metadata values are strings
for k, v in meta.items():
if isinstance(v, list):
meta[k] = ','.join(['%s' % i for i in v])
elif isinstance(v, dict):
meta[k] = json.dumps(v)
elif not isinstance(v, basestring):
meta[k] = '%s' % v
server = servers[0]
if server.metadata == meta:
changed = False
else:
changed = True
removed = set(server.metadata.keys()).difference(meta.keys())
cs.servers.delete_meta(server, list(removed))
cs.servers.set_meta(server, meta)
server.get()
module.exit_json(changed=changed, meta=server.metadata)
def main():
argument_spec = rax_argument_spec()
argument_spec.update(
dict(
address=dict(),
id=dict(),
name=dict(),
meta=dict(type='dict', default=dict()),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together(),
mutually_exclusive=[['address', 'id', 'name']],
required_one_of=[['address', 'id', 'name']],
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module')
address = module.params.get('address')
server_id = module.params.get('id')
name = module.params.get('name')
meta = module.params.get('meta')
setup_rax_module(module, pyrax)
rax_meta(module, address, name, server_id, meta)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.rax import *
### invoke the module
if __name__ == '__main__':
main()
| gpl-3.0 |
martindale/cjdns | node_build/dependencies/libuv/build/gyp/test/win/gyptest-link-ordering.py | 225 | 3058 | #!/usr/bin/env python
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Make sure the link order of object files is the same between msvs and ninja.
"""
import TestGyp
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'linker-flags'
test.run_gyp('link-ordering.gyp', chdir=CHDIR)
test.build('link-ordering.gyp', test.ALL, chdir=CHDIR)
def GetDisasm(exe):
full_path = test.built_file_path(exe, chdir=CHDIR)
# Get disassembly and drop int3 padding between functions.
return '\n'.join(
x for x in test.run_dumpbin('/disasm', full_path).splitlines()
if 'CC' not in x)
# This is the full dump that we expect. The source files in the .gyp match
# this order which is what determines the ordering in the binary.
expected_disasm_basic = '''
_mainCRTStartup:
00401000: B8 05 00 00 00 mov eax,5
00401005: C3 ret
?z@@YAHXZ:
00401010: B8 03 00 00 00 mov eax,3
00401015: C3 ret
?x@@YAHXZ:
00401020: B8 01 00 00 00 mov eax,1
00401025: C3 ret
?y@@YAHXZ:
00401030: B8 02 00 00 00 mov eax,2
00401035: C3 ret
_main:
00401040: 33 C0 xor eax,eax
00401042: C3 ret
'''
if expected_disasm_basic not in GetDisasm('test_ordering_exe.exe'):
print GetDisasm('test_ordering_exe.exe')
test.fail_test()
# Similar to above. The VS generator handles subdirectories differently.
expected_disasm_subdirs = '''
_mainCRTStartup:
00401000: B8 05 00 00 00 mov eax,5
00401005: C3 ret
_main:
00401010: 33 C0 xor eax,eax
00401012: C3 ret
?y@@YAHXZ:
00401020: B8 02 00 00 00 mov eax,2
00401025: C3 ret
?z@@YAHXZ:
00401030: B8 03 00 00 00 mov eax,3
00401035: C3 ret
'''
if expected_disasm_subdirs not in GetDisasm('test_ordering_subdirs.exe'):
print GetDisasm('test_ordering_subdirs.exe')
test.fail_test()
# Similar, but with directories mixed into folders (crt and main at the same
# level, but with a subdir in the middle).
expected_disasm_subdirs_mixed = '''
_mainCRTStartup:
00401000: B8 05 00 00 00 mov eax,5
00401005: C3 ret
?x@@YAHXZ:
00401010: B8 01 00 00 00 mov eax,1
00401015: C3 ret
_main:
00401020: 33 C0 xor eax,eax
00401022: C3 ret
?z@@YAHXZ:
00401030: B8 03 00 00 00 mov eax,3
00401035: C3 ret
?y@@YAHXZ:
00401040: B8 02 00 00 00 mov eax,2
00401045: C3 ret
'''
if (expected_disasm_subdirs_mixed not in
GetDisasm('test_ordering_subdirs_mixed.exe')):
print GetDisasm('test_ordering_subdirs_mixed.exe')
test.fail_test()
test.pass_test()
| gpl-3.0 |
wkschwartz/django | django/contrib/postgres/forms/hstore.py | 94 | 1766 | import json
from django import forms
from django.core.exceptions import ValidationError
from django.utils.translation import gettext_lazy as _
__all__ = ['HStoreField']
class HStoreField(forms.CharField):
"""
A field for HStore data which accepts dictionary JSON input.
"""
widget = forms.Textarea
default_error_messages = {
'invalid_json': _('Could not load JSON data.'),
'invalid_format': _('Input must be a JSON dictionary.'),
}
def prepare_value(self, value):
if isinstance(value, dict):
return json.dumps(value)
return value
def to_python(self, value):
if not value:
return {}
if not isinstance(value, dict):
try:
value = json.loads(value)
except json.JSONDecodeError:
raise ValidationError(
self.error_messages['invalid_json'],
code='invalid_json',
)
if not isinstance(value, dict):
raise ValidationError(
self.error_messages['invalid_format'],
code='invalid_format',
)
# Cast everything to strings for ease.
for key, val in value.items():
if val is not None:
val = str(val)
value[key] = val
return value
def has_changed(self, initial, data):
"""
Return True if data differs from initial.
"""
# For purposes of seeing whether something has changed, None is
# the same as an empty dict, if the data or initial value we get
# is None, replace it w/ {}.
initial_value = self.to_python(initial)
return super().has_changed(initial_value, data)
| bsd-3-clause |
SUSE/azure-sdk-for-python | azure-mgmt-compute/azure/mgmt/compute/compute/v2016_03_30/models/key_vault_key_reference.py | 2 | 1264 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class KeyVaultKeyReference(Model):
"""Describes a reference to Key Vault Key.
:param key_url: The URL referencing a key in a Key Vault.
:type key_url: str
:param source_vault: The relative URL of the Key Vault containing the key.
:type source_vault: :class:`SubResource
<azure.mgmt.compute.compute.v2016_03_30.models.SubResource>`
"""
_validation = {
'key_url': {'required': True},
'source_vault': {'required': True},
}
_attribute_map = {
'key_url': {'key': 'keyUrl', 'type': 'str'},
'source_vault': {'key': 'sourceVault', 'type': 'SubResource'},
}
def __init__(self, key_url, source_vault):
self.key_url = key_url
self.source_vault = source_vault
| mit |
yipenggao/moose | python/chigger/misc/VolumeAxes.py | 6 | 3779 | #pylint: disable=missing-docstring
#################################################################
# DO NOT MODIFY THIS HEADER #
# MOOSE - Multiphysics Object Oriented Simulation Environment #
# #
# (c) 2010 Battelle Energy Alliance, LLC #
# ALL RIGHTS RESERVED #
# #
# Prepared by Battelle Energy Alliance, LLC #
# Under Contract No. DE-AC07-05ID14517 #
# With the U. S. Department of Energy #
# #
# See COPYRIGHT for full restrictions #
#################################################################
import vtk
import mooseutils
from .. import utils
from .. import base
class VolumeAxes(base.ChiggerResultBase):
"""
A class for displaying the 3D axis around a volume
"""
@staticmethod
def getOptions():
opt = base.ChiggerResult.getOptions()
opt.add('xaxis', VolumeAxes.getAxisActorOptions(), "The x-axis options.")
opt.add('yaxis', VolumeAxes.getAxisActorOptions(), "The y-axis options.")
opt.add('zaxis', VolumeAxes.getAxisActorOptions(), "The z-axis options.")
return opt
@staticmethod
def getAxisActorOptions():
"""
Return the options for a vtkAxis object.
"""
opt = utils.Options()
opt.add('color', [1, 1, 1], "The color of the title, text, ticks, and axis line.")
opt.add('minor_ticks', False, "Enable/disable the minor tick marks.")
return opt
def __init__(self, result, **kwargs):
super(VolumeAxes, self).__init__(renderer=result.getVTKRenderer(), **kwargs)
self._vtkactor = vtk.vtkCubeAxesActor()
self._result = result
def reset(self):
"""
Remove the vtkCubeAxesActor.
"""
super(VolumeAxes, self).reset()
self._vtkrenderer.RemoveViewProp(self._vtkactor)
def initialize(self):
"""
Add the actor to renderer.
"""
super(VolumeAxes, self).initialize()
self._vtkrenderer.AddViewProp(self._vtkactor)
def update(self, **kwargs):
"""
Update vtkCubeAxesActor object to cover extents of result.
"""
super(VolumeAxes, self).update(**kwargs)
if self._result.needsUpdate():
self._result.update()
xmin, xmax = utils.get_bounds(*self._result.getSources())
bnds = [xmin[0], xmax[0], xmin[1], xmax[1], xmin[2], xmax[2]]
self._vtkactor.SetBounds(*bnds)
self._vtkactor.SetCamera(self._vtkrenderer.GetActiveCamera())
self.__updateAxisOptions('x')
self.__updateAxisOptions('y')
self.__updateAxisOptions('z')
self._vtkactor.SetGridLineLocation(vtk.vtkCubeAxesActor.VTK_GRID_LINES_FURTHEST)
def __updateAxisOptions(self, axis):
"""
Helper for updating Axis level settings.
"""
if axis not in ['x', 'y', 'z']:
mooseutils.mooseError("Must provide 'x', 'y', or 'z'.")
return
opt = self.getOption(axis + 'axis')
color = opt['color']
comp = ['x', 'y', 'z'].index(axis)
self._vtkactor.GetTitleTextProperty(comp).SetColor(*color)
self._vtkactor.GetLabelTextProperty(comp).SetColor(*color)
func = getattr(self._vtkactor, 'Set{}AxisMinorTickVisibility'.format(axis.upper()))
func(opt['minor_ticks'])
func = getattr(self._vtkactor, 'Get{}AxesLinesProperty'.format(axis.upper()))
func().SetColor(*color)
| lgpl-2.1 |
killswitch-GUI/SimplyTemplate | Modules/Internal/Agency/GovBudgetReduction.py | 1 | 5820 | #!/usr/bin/python
import os
# Class funcs will have:
# -Name
# -Author
# -Sophistication
# -Info
# Required Options for the class
class TemplateModule:
def __init__(self):
# Meta Tags for file name and such:
self.OutputName = "GovBudgetReduction.html"
self.RenderName = "GovBudgetReduction.html"
self.CoreOptions = "[Html, Link]"
# Required for each class:
self.Name = "Mangment email on sequestration of the Gov."
self.Author = "Killswitch-GUI"
self.Type = "Html"
self.Info = """A template focusing on long term financial issues and how to avoid sequestration. This template will be a good fit for must State/Gov orgs."""
self.Sophistication = "Medium"
self.SampleImage = str('''Modules/Sample/GovBudgetReduction.png''')
self.TemplatePath = str(
'''Modules/EmailTemplates/GovBudgetReduction.email''')
# Required options for itself:
self.RequiredOptions = {
"FromEmail": ["noreply@agency.com", "From Email"],
"FromName": ["Alex Jason", "From Full Name"],
"TodaysDate": ["Jan 1, 2016", "Set the proper phish date"],
"TargetCompany": ["Cyber Power", "Set the Target Company Full Name"],
"TargetLink": ["%URL%", "The full path link"],
}
self.RequiredText = {
"TextBlock1" : ["""In the coming weeks our nations leadership will be working to draft a plan to prevent long term financial issues and how to avoid the "sequestration" that we have all heard about for the past few months. All departments within the US Government are being directed to draft plans to help meet the projected budget shortfalls and find ways to reducing spending within the US Government.""", "Paragraph one"],
"TextBlock2": ["As a result the TARGET_COMP_NAME has developed a plan that will reduce the Information Technology budget by 25% over the next 18 months. This budget will include a reduction of current staff levels and also place a hiring freeze on new hires for the next 2 years. Current contractor staff will also be reduced by 20% to help drive a more lean workforce.", "Paragraph two"],
"TextBlock3": ["The country has asked us all to learn to work more efficiently and do more with less. As a result many budgets and programs are also facing significant reductions. The senior management within TARGET_COMP_NAME will work with their teams to ensure a smooth transition process and will do all they can to help reduce the stress and uncertainties that come with any significant changes such as this.", "Paragraph three"],
"TextBlock4": ["To read more about the budget plan for TODAYS_DATE please see the following page which outlines how this plan will be implemented and the projected time frames for the transitions.", "Ending Paragraph"],
"TextBlock5": ["TARGET_COMP_NAME Management Team", "Signature Block"]
}
def Generate(self, filename, location, Verbose=False):
# Gen will get
# Filename = the name of the output
# Location = Where do you want to place the output
# Verbose = Print all help data
# adapted from Andy
replaceDict = {
'FROM_EMAIL': self.RequiredOptions["FromEmail"][0],
'FROM_NAME': self.RequiredOptions["FromName"][0],
'TARGET_LINK': self.RequiredOptions["TargetLink"][0],
'TARGET_COMP_NAME': self.RequiredOptions["TargetCompany"][0],
'TODAYS_DATE': self.RequiredOptions["TodaysDate"][0],
}
replaceTextDict = {
'TEXT_BLOCK_1': self.RequiredText["TextBlock1"][0],
'TEXT_BLOCK_2': self.RequiredText["TextBlock2"][0],
'TEXT_BLOCK_3': self.RequiredText["TextBlock3"][0],
'TEXT_BLOCK_4': self.RequiredText["TextBlock4"][0],
'TEXT_BLOCK_5': self.RequiredText["TextBlock5"][0],
}
WritePath = str(filename) + str(location)
with open(self.TemplatePath, 'r') as templateEmail:
outputEmail = templateEmail.read()
for dummy, value in replaceTextDict.iteritems():
outputEmail = outputEmail.replace(dummy, value)
for dummy, value in replaceDict.iteritems():
outputEmail = outputEmail.replace(dummy, value)
try:
f = open(WritePath, 'w')
f.write(outputEmail)
f.close
except Exception as e:
print e
def Render(self, Verbose=False):
# Gen will get
# Filename = the name of the output
# Location = Where do you want to place the output
# Verbose = Print all help data
# adapted from Andy
replaceDict = {
'FROM_EMAIL': self.RequiredOptions["FromEmail"][0],
'FROM_NAME': self.RequiredOptions["FromName"][0],
'TARGET_LINK': self.RequiredOptions["TargetLink"][0],
'TARGET_COMP_NAME': self.RequiredOptions["TargetCompany"][0],
'TODAYS_DATE': self.RequiredOptions["TodaysDate"][0],
}
replaceTextDict = {
'TEXT_BLOCK_1': self.RequiredText["TextBlock1"][0],
'TEXT_BLOCK_2': self.RequiredText["TextBlock2"][0],
'TEXT_BLOCK_3': self.RequiredText["TextBlock3"][0],
'TEXT_BLOCK_4': self.RequiredText["TextBlock4"][0],
'TEXT_BLOCK_5': self.RequiredText["TextBlock5"][0],
}
with open(self.TemplatePath, 'r') as templateEmail:
outputEmail = templateEmail.read()
for dummy, value in replaceTextDict.iteritems():
outputEmail = outputEmail.replace(dummy, value)
for dummy, value in replaceDict.iteritems():
outputEmail = outputEmail.replace(dummy, value)
return outputEmail
| gpl-2.0 |
MiLk/ansible | lib/ansible/galaxy/role.py | 33 | 15332 | ########################################################################
#
# (C) 2015, Brian Coca <bcoca@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
########################################################################
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import datetime
import os
import tarfile
import tempfile
import yaml
from distutils.version import LooseVersion
from shutil import rmtree
from ansible.errors import AnsibleError
from ansible.module_utils.urls import open_url
from ansible.playbook.role.requirement import RoleRequirement
from ansible.galaxy.api import GalaxyAPI
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class GalaxyRole(object):
SUPPORTED_SCMS = set(['git', 'hg'])
META_MAIN = os.path.join('meta', 'main.yml')
META_INSTALL = os.path.join('meta', '.galaxy_install_info')
ROLE_DIRS = ('defaults', 'files', 'handlers', 'meta', 'tasks', 'templates', 'vars', 'tests')
def __init__(self, galaxy, name, src=None, version=None, scm=None, path=None):
self._metadata = None
self._install_info = None
self._validate_certs = not galaxy.options.ignore_certs
display.debug('Validate TLS certificates: %s' % self._validate_certs)
self.options = galaxy.options
self.galaxy = galaxy
self.name = name
self.version = version
self.src = src or name
self.scm = scm
if path is not None:
if self.name not in path:
path = os.path.join(path, self.name)
self.path = path
else:
for role_path_dir in galaxy.roles_paths:
role_path = os.path.join(role_path_dir, self.name)
if os.path.exists(role_path):
self.path = role_path
break
else:
# use the first path by default
self.path = os.path.join(galaxy.roles_paths[0], self.name)
# create list of possible paths
self.paths = [x for x in galaxy.roles_paths]
self.paths = [os.path.join(x, self.name) for x in self.paths]
def __repr__(self):
"""
Returns "rolename (version)" if version is not null
Returns "rolename" otherwise
"""
if self.version:
return "%s (%s)" % (self.name, self.version)
else:
return self.name
def __eq__(self, other):
return self.name == other.name
@property
def metadata(self):
"""
Returns role metadata
"""
if self._metadata is None:
meta_path = os.path.join(self.path, self.META_MAIN)
if os.path.isfile(meta_path):
try:
f = open(meta_path, 'r')
self._metadata = yaml.safe_load(f)
except:
display.vvvvv("Unable to load metadata for %s" % self.name)
return False
finally:
f.close()
return self._metadata
@property
def install_info(self):
"""
Returns role install info
"""
if self._install_info is None:
info_path = os.path.join(self.path, self.META_INSTALL)
if os.path.isfile(info_path):
try:
f = open(info_path, 'r')
self._install_info = yaml.safe_load(f)
except:
display.vvvvv("Unable to load Galaxy install info for %s" % self.name)
return False
finally:
f.close()
return self._install_info
def _write_galaxy_install_info(self):
"""
Writes a YAML-formatted file to the role's meta/ directory
(named .galaxy_install_info) which contains some information
we can use later for commands like 'list' and 'info'.
"""
info = dict(
version=self.version,
install_date=datetime.datetime.utcnow().strftime("%c"),
)
if not os.path.exists(os.path.join(self.path, 'meta')):
os.makedirs(os.path.join(self.path, 'meta'))
info_path = os.path.join(self.path, self.META_INSTALL)
with open(info_path, 'w+') as f:
try:
self._install_info = yaml.safe_dump(info, f)
except:
return False
return True
def remove(self):
"""
Removes the specified role from the roles path.
There is a sanity check to make sure there's a meta/main.yml file at this
path so the user doesn't blow away random directories.
"""
if self.metadata:
try:
rmtree(self.path)
return True
except:
pass
return False
def fetch(self, role_data):
"""
Downloads the archived role from github to a temp location
"""
if role_data:
# first grab the file and save it to a temp location
if "github_user" in role_data and "github_repo" in role_data:
archive_url = 'https://github.com/%s/%s/archive/%s.tar.gz' % (role_data["github_user"], role_data["github_repo"], self.version)
else:
archive_url = self.src
display.display("- downloading role from %s" % archive_url)
try:
url_file = open_url(archive_url, validate_certs=self._validate_certs)
temp_file = tempfile.NamedTemporaryFile(delete=False)
data = url_file.read()
while data:
temp_file.write(data)
data = url_file.read()
temp_file.close()
return temp_file.name
except Exception as e:
display.error("failed to download the file: %s" % str(e))
return False
def install(self):
# the file is a tar, so open it that way and extract it
# to the specified (or default) roles directory
local_file = False
if self.scm:
# create tar file from scm url
tmp_file = RoleRequirement.scm_archive_role(**self.spec)
elif self.src:
if os.path.isfile(self.src):
# installing a local tar.gz
local_file = True
tmp_file = self.src
elif '://' in self.src:
role_data = self.src
tmp_file = self.fetch(role_data)
else:
api = GalaxyAPI(self.galaxy)
role_data = api.lookup_role_by_name(self.src)
if not role_data:
raise AnsibleError("- sorry, %s was not found on %s." % (self.src, api.api_server))
if role_data.get('role_type') == 'CON' and not os.environ.get('ANSIBLE_CONTAINER'):
# Container Enabled, running outside of a container
display.warning("%s is a Container Enabled role and should only be installed using "
"Ansible Container" % self.name)
if role_data.get('role_type') == 'APP':
# Container Role
display.warning("%s is a Container App role and should only be installed using Ansible "
"Container" % self.name)
role_versions = api.fetch_role_related('versions', role_data['id'])
if not self.version:
# convert the version names to LooseVersion objects
# and sort them to get the latest version. If there
# are no versions in the list, we'll grab the head
# of the master branch
if len(role_versions) > 0:
loose_versions = [LooseVersion(a.get('name', None)) for a in role_versions]
loose_versions.sort()
self.version = str(loose_versions[-1])
elif role_data.get('github_branch', None):
self.version = role_data['github_branch']
else:
self.version = 'master'
elif self.version != 'master':
if role_versions and str(self.version) not in [a.get('name', None) for a in role_versions]:
raise AnsibleError("- the specified version (%s) of %s was not found in the list of available versions (%s)." % (self.version,
self.name,
role_versions))
tmp_file = self.fetch(role_data)
else:
raise AnsibleError("No valid role data found")
if tmp_file:
display.debug("installing from %s" % tmp_file)
if not tarfile.is_tarfile(tmp_file):
raise AnsibleError("the file downloaded was not a tar.gz")
else:
if tmp_file.endswith('.gz'):
role_tar_file = tarfile.open(tmp_file, "r:gz")
else:
role_tar_file = tarfile.open(tmp_file, "r")
# verify the role's meta file
meta_file = None
members = role_tar_file.getmembers()
# next find the metadata file
for member in members:
if self.META_MAIN in member.name:
# Look for parent of meta/main.yml
# Due to possibility of sub roles each containing meta/main.yml
# look for shortest length parent
meta_parent_dir = os.path.dirname(os.path.dirname(member.name))
if not meta_file:
archive_parent_dir = meta_parent_dir
meta_file = member
else:
if len(meta_parent_dir) < len(archive_parent_dir):
archive_parent_dir = meta_parent_dir
meta_file = member
if not meta_file:
raise AnsibleError("this role does not appear to have a meta/main.yml file.")
else:
try:
self._metadata = yaml.safe_load(role_tar_file.extractfile(meta_file))
except:
raise AnsibleError("this role does not appear to have a valid meta/main.yml file.")
# we strip off any higher-level directories for all of the files contained within
# the tar file here. The default is 'github_repo-target'. Gerrit instances, on the other
# hand, does not have a parent directory at all.
installed = False
while not installed:
display.display("- extracting %s to %s" % (self.name, self.path))
try:
if os.path.exists(self.path):
if not os.path.isdir(self.path):
raise AnsibleError("the specified roles path exists and is not a directory.")
elif not getattr(self.options, "force", False):
raise AnsibleError("the specified role %s appears to already exist. Use --force to replace it." % self.name)
else:
# using --force, remove the old path
if not self.remove():
raise AnsibleError("%s doesn't appear to contain a role.\n please remove this directory manually if you really "
"want to put the role here." % self.path)
else:
os.makedirs(self.path)
# now we do the actual extraction to the path
for member in members:
# we only extract files, and remove any relative path
# bits that might be in the file for security purposes
# and drop any containing directory, as mentioned above
if member.isreg() or member.issym():
parts = member.name.replace(archive_parent_dir, "", 1).split(os.sep)
final_parts = []
for part in parts:
if part != '..' and '~' not in part and '$' not in part:
final_parts.append(part)
member.name = os.path.join(*final_parts)
role_tar_file.extract(member, self.path)
# write out the install info file for later use
self._write_galaxy_install_info()
installed = True
except OSError as e:
error = True
if e[0] == 13 and len(self.paths) > 1:
current = self.paths.index(self.path)
nextidx = current + 1
if len(self.paths) >= current:
self.path = self.paths[nextidx]
error = False
if error:
raise AnsibleError("Could not update files in %s: %s" % (self.path, str(e)))
# return the parsed yaml metadata
display.display("- %s was installed successfully" % str(self))
if not local_file:
try:
os.unlink(tmp_file)
except (OSError, IOError) as e:
display.warning("Unable to remove tmp file (%s): %s" % (tmp_file, str(e)))
return True
return False
@property
def spec(self):
"""
Returns role spec info
{
'scm': 'git',
'src': 'http://git.example.com/repos/repo.git',
'version': 'v1.0',
'name': 'repo'
}
"""
return dict(scm=self.scm, src=self.src, version=self.version, name=self.name)
| gpl-3.0 |
Lektorium-LLC/edx-platform | common/djangoapps/track/tracker.py | 21 | 2372 | """
Module that tracks analytics events by sending them to different
configurable backends.
The backends can be configured using Django settings as the example
below::
TRACKING_BACKENDS = {
'tracker_name': {
'ENGINE': 'class.name.for.backend',
'OPTIONS': {
'host': ... ,
'port': ... ,
...
}
}
}
"""
import inspect
from importlib import import_module
from django.conf import settings
from dogapi import dog_stats_api
from track.backends import BaseBackend
__all__ = ['send']
backends = {}
def _initialize_backends_from_django_settings():
"""
Initialize the event tracking backends according to the
configuration in django settings
"""
backends.clear()
config = getattr(settings, 'TRACKING_BACKENDS', {})
for name, values in config.iteritems():
# Ignore empty values to turn-off default tracker backends
if values:
engine = values['ENGINE']
options = values.get('OPTIONS', {})
backends[name] = _instantiate_backend_from_name(engine, options)
def _instantiate_backend_from_name(name, options):
"""
Instantiate an event tracker backend from the full module path to
the backend class. Useful when setting backends from configuration
files.
"""
# Parse backend name
try:
parts = name.split('.')
module_name = '.'.join(parts[:-1])
class_name = parts[-1]
except IndexError:
raise ValueError('Invalid event track backend %s' % name)
# Get and verify the backend class
try:
module = import_module(module_name)
cls = getattr(module, class_name)
if not inspect.isclass(cls) or not issubclass(cls, BaseBackend):
raise TypeError
except (ValueError, AttributeError, TypeError, ImportError):
raise ValueError('Cannot find event track backend %s' % name)
backend = cls(**options)
return backend
@dog_stats_api.timed('track.send')
def send(event):
"""
Send an event object to all the initialized backends.
"""
dog_stats_api.increment('track.send.count')
for name, backend in backends.iteritems():
with dog_stats_api.timer('track.send.backend.{0}'.format(name)):
backend.send(event)
_initialize_backends_from_django_settings()
| agpl-3.0 |
MebiusHKU/flask-web | flask/lib/python2.7/site-packages/sqlalchemy/dialects/mysql/cymysql.py | 80 | 2349 | # mysql/cymysql.py
# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
.. dialect:: mysql+cymysql
:name: CyMySQL
:dbapi: cymysql
:connectstring: mysql+cymysql://<username>:<password>@<host>/<dbname>\
[?<options>]
:url: https://github.com/nakagami/CyMySQL
"""
import re
from .mysqldb import MySQLDialect_mysqldb
from .base import (BIT, MySQLDialect)
from ... import util
class _cymysqlBIT(BIT):
def result_processor(self, dialect, coltype):
"""Convert a MySQL's 64 bit, variable length binary string to a long.
"""
def process(value):
if value is not None:
v = 0
for i in util.iterbytes(value):
v = v << 8 | i
return v
return value
return process
class MySQLDialect_cymysql(MySQLDialect_mysqldb):
driver = 'cymysql'
description_encoding = None
supports_sane_rowcount = True
supports_sane_multi_rowcount = False
supports_unicode_statements = True
colspecs = util.update_copy(
MySQLDialect.colspecs,
{
BIT: _cymysqlBIT,
}
)
@classmethod
def dbapi(cls):
return __import__('cymysql')
def _get_server_version_info(self, connection):
dbapi_con = connection.connection
version = []
r = re.compile('[.\-]')
for n in r.split(dbapi_con.server_version):
try:
version.append(int(n))
except ValueError:
version.append(n)
return tuple(version)
def _detect_charset(self, connection):
return connection.connection.charset
def _extract_error_code(self, exception):
return exception.errno
def is_disconnect(self, e, connection, cursor):
if isinstance(e, self.dbapi.OperationalError):
return self._extract_error_code(e) in \
(2006, 2013, 2014, 2045, 2055)
elif isinstance(e, self.dbapi.InterfaceError):
# if underlying connection is closed,
# this is the error you get
return True
else:
return False
dialect = MySQLDialect_cymysql
| bsd-3-clause |
viniciusgama/blog_gae | django/db/backends/__init__.py | 77 | 31617 | import decimal
try:
import thread
except ImportError:
import dummy_thread as thread
from threading import local
from django.conf import settings
from django.db import DEFAULT_DB_ALIAS
from django.db.backends import util
from django.db.transaction import TransactionManagementError
from django.utils import datetime_safe
from django.utils.importlib import import_module
class BaseDatabaseWrapper(local):
"""
Represents a database connection.
"""
ops = None
vendor = 'unknown'
def __init__(self, settings_dict, alias=DEFAULT_DB_ALIAS):
# `settings_dict` should be a dictionary containing keys such as
# NAME, USER, etc. It's called `settings_dict` instead of `settings`
# to disambiguate it from Django settings modules.
self.connection = None
self.queries = []
self.settings_dict = settings_dict
self.alias = alias
self.use_debug_cursor = None
# Transaction related attributes
self.transaction_state = []
self.savepoint_state = 0
self._dirty = None
def __eq__(self, other):
return self.alias == other.alias
def __ne__(self, other):
return not self == other
def _commit(self):
if self.connection is not None:
return self.connection.commit()
def _rollback(self):
if self.connection is not None:
return self.connection.rollback()
def _enter_transaction_management(self, managed):
"""
A hook for backend-specific changes required when entering manual
transaction handling.
"""
pass
def _leave_transaction_management(self, managed):
"""
A hook for backend-specific changes required when leaving manual
transaction handling. Will usually be implemented only when
_enter_transaction_management() is also required.
"""
pass
def _savepoint(self, sid):
if not self.features.uses_savepoints:
return
self.cursor().execute(self.ops.savepoint_create_sql(sid))
def _savepoint_rollback(self, sid):
if not self.features.uses_savepoints:
return
self.cursor().execute(self.ops.savepoint_rollback_sql(sid))
def _savepoint_commit(self, sid):
if not self.features.uses_savepoints:
return
self.cursor().execute(self.ops.savepoint_commit_sql(sid))
def enter_transaction_management(self, managed=True):
"""
Enters transaction management for a running thread. It must be balanced with
the appropriate leave_transaction_management call, since the actual state is
managed as a stack.
The state and dirty flag are carried over from the surrounding block or
from the settings, if there is no surrounding block (dirty is always false
when no current block is running).
"""
if self.transaction_state:
self.transaction_state.append(self.transaction_state[-1])
else:
self.transaction_state.append(settings.TRANSACTIONS_MANAGED)
if self._dirty is None:
self._dirty = False
self._enter_transaction_management(managed)
def leave_transaction_management(self):
"""
Leaves transaction management for a running thread. A dirty flag is carried
over to the surrounding block, as a commit will commit all changes, even
those from outside. (Commits are on connection level.)
"""
self._leave_transaction_management(self.is_managed())
if self.transaction_state:
del self.transaction_state[-1]
else:
raise TransactionManagementError("This code isn't under transaction "
"management")
if self._dirty:
self.rollback()
raise TransactionManagementError("Transaction managed block ended with "
"pending COMMIT/ROLLBACK")
self._dirty = False
def is_dirty(self):
"""
Returns True if the current transaction requires a commit for changes to
happen.
"""
return self._dirty
def set_dirty(self):
"""
Sets a dirty flag for the current thread and code streak. This can be used
to decide in a managed block of code to decide whether there are open
changes waiting for commit.
"""
if self._dirty is not None:
self._dirty = True
else:
raise TransactionManagementError("This code isn't under transaction "
"management")
def set_clean(self):
"""
Resets a dirty flag for the current thread and code streak. This can be used
to decide in a managed block of code to decide whether a commit or rollback
should happen.
"""
if self._dirty is not None:
self._dirty = False
else:
raise TransactionManagementError("This code isn't under transaction management")
self.clean_savepoints()
def clean_savepoints(self):
self.savepoint_state = 0
def is_managed(self):
"""
Checks whether the transaction manager is in manual or in auto state.
"""
if self.transaction_state:
return self.transaction_state[-1]
return settings.TRANSACTIONS_MANAGED
def managed(self, flag=True):
"""
Puts the transaction manager into a manual state: managed transactions have
to be committed explicitly by the user. If you switch off transaction
management and there is a pending commit/rollback, the data will be
commited.
"""
top = self.transaction_state
if top:
top[-1] = flag
if not flag and self.is_dirty():
self._commit()
self.set_clean()
else:
raise TransactionManagementError("This code isn't under transaction "
"management")
def commit_unless_managed(self):
"""
Commits changes if the system is not in managed transaction mode.
"""
if not self.is_managed():
self._commit()
self.clean_savepoints()
else:
self.set_dirty()
def rollback_unless_managed(self):
"""
Rolls back changes if the system is not in managed transaction mode.
"""
if not self.is_managed():
self._rollback()
else:
self.set_dirty()
def commit(self):
"""
Does the commit itself and resets the dirty flag.
"""
self._commit()
self.set_clean()
def rollback(self):
"""
This function does the rollback itself and resets the dirty flag.
"""
self._rollback()
self.set_clean()
def savepoint(self):
"""
Creates a savepoint (if supported and required by the backend) inside the
current transaction. Returns an identifier for the savepoint that will be
used for the subsequent rollback or commit.
"""
thread_ident = thread.get_ident()
self.savepoint_state += 1
tid = str(thread_ident).replace('-', '')
sid = "s%s_x%d" % (tid, self.savepoint_state)
self._savepoint(sid)
return sid
def savepoint_rollback(self, sid):
"""
Rolls back the most recent savepoint (if one exists). Does nothing if
savepoints are not supported.
"""
if self.savepoint_state:
self._savepoint_rollback(sid)
def savepoint_commit(self, sid):
"""
Commits the most recent savepoint (if one exists). Does nothing if
savepoints are not supported.
"""
if self.savepoint_state:
self._savepoint_commit(sid)
def close(self):
if self.connection is not None:
self.connection.close()
self.connection = None
def cursor(self):
if (self.use_debug_cursor or
(self.use_debug_cursor is None and settings.DEBUG)):
cursor = self.make_debug_cursor(self._cursor())
else:
cursor = util.CursorWrapper(self._cursor(), self)
return cursor
def make_debug_cursor(self, cursor):
return util.CursorDebugWrapper(cursor, self)
class BaseDatabaseFeatures(object):
allows_group_by_pk = False
# True if django.db.backend.utils.typecast_timestamp is used on values
# returned from dates() calls.
needs_datetime_string_cast = True
empty_fetchmany_value = []
update_can_self_select = True
# Does the backend distinguish between '' and None?
interprets_empty_strings_as_nulls = False
# Does the backend allow inserting duplicate rows when a unique_together
# constraint exists, but one of the unique_together columns is NULL?
ignores_nulls_in_unique_constraints = True
can_use_chunked_reads = True
can_return_id_from_insert = False
uses_autocommit = False
uses_savepoints = False
# If True, don't use integer foreign keys referring to, e.g., positive
# integer primary keys.
related_fields_match_type = False
allow_sliced_subqueries = True
supports_joins = True
distinguishes_insert_from_update = True
supports_deleting_related_objects = True
supports_select_related = True
# Does the default test database allow multiple connections?
# Usually an indication that the test database is in-memory
test_db_allows_multiple_connections = True
# Can an object be saved without an explicit primary key?
supports_unspecified_pk = False
# Can a fixture contain forward references? i.e., are
# FK constraints checked at the end of transaction, or
# at the end of each save operation?
supports_forward_references = True
# Does a dirty transaction need to be rolled back
# before the cursor can be used again?
requires_rollback_on_dirty_transaction = False
# Does the backend allow very long model names without error?
supports_long_model_names = True
# Is there a REAL datatype in addition to floats/doubles?
has_real_datatype = False
supports_subqueries_in_group_by = True
supports_bitwise_or = True
# Do time/datetime fields have microsecond precision?
supports_microsecond_precision = True
# Does the __regex lookup support backreferencing and grouping?
supports_regex_backreferencing = True
# Can date/datetime lookups be performed using a string?
supports_date_lookup_using_string = True
# Can datetimes with timezones be used?
supports_timezones = True
# When performing a GROUP BY, is an ORDER BY NULL required
# to remove any ordering?
requires_explicit_null_ordering_when_grouping = False
# Is there a 1000 item limit on query parameters?
supports_1000_query_parameters = True
# Can an object have a primary key of 0? MySQL says No.
allows_primary_key_0 = True
# Do we need to NULL a ForeignKey out, or can the constraint check be
# deferred
can_defer_constraint_checks = False
# date_interval_sql can properly handle mixed Date/DateTime fields and timedeltas
supports_mixed_date_datetime_comparisons = True
# Features that need to be confirmed at runtime
# Cache whether the confirmation has been performed.
_confirmed = False
supports_transactions = None
supports_stddev = None
can_introspect_foreign_keys = None
def __init__(self, connection):
self.connection = connection
def confirm(self):
"Perform manual checks of any database features that might vary between installs"
self._confirmed = True
self.supports_transactions = self._supports_transactions()
self.supports_stddev = self._supports_stddev()
self.can_introspect_foreign_keys = self._can_introspect_foreign_keys()
def _supports_transactions(self):
"Confirm support for transactions"
cursor = self.connection.cursor()
cursor.execute('CREATE TABLE ROLLBACK_TEST (X INT)')
self.connection._commit()
cursor.execute('INSERT INTO ROLLBACK_TEST (X) VALUES (8)')
self.connection._rollback()
cursor.execute('SELECT COUNT(X) FROM ROLLBACK_TEST')
count, = cursor.fetchone()
cursor.execute('DROP TABLE ROLLBACK_TEST')
self.connection._commit()
return count == 0
def _supports_stddev(self):
"Confirm support for STDDEV and related stats functions"
class StdDevPop(object):
sql_function = 'STDDEV_POP'
try:
self.connection.ops.check_aggregate_support(StdDevPop())
except NotImplementedError:
self.supports_stddev = False
def _can_introspect_foreign_keys(self):
"Confirm support for introspected foreign keys"
# Every database can do this reliably, except MySQL,
# which can't do it for MyISAM tables
return True
class BaseDatabaseOperations(object):
"""
This class encapsulates all backend-specific differences, such as the way
a backend performs ordering or calculates the ID of a recently-inserted
row.
"""
compiler_module = "django.db.models.sql.compiler"
def __init__(self):
self._cache = None
def autoinc_sql(self, table, column):
"""
Returns any SQL needed to support auto-incrementing primary keys, or
None if no SQL is necessary.
This SQL is executed when a table is created.
"""
return None
def date_extract_sql(self, lookup_type, field_name):
"""
Given a lookup_type of 'year', 'month' or 'day', returns the SQL that
extracts a value from the given date field field_name.
"""
raise NotImplementedError()
def date_interval_sql(self, sql, connector, timedelta):
"""
Implements the date interval functionality for expressions
"""
raise NotImplementedError()
def date_trunc_sql(self, lookup_type, field_name):
"""
Given a lookup_type of 'year', 'month' or 'day', returns the SQL that
truncates the given date field field_name to a DATE object with only
the given specificity.
"""
raise NotImplementedError()
def datetime_cast_sql(self):
"""
Returns the SQL necessary to cast a datetime value so that it will be
retrieved as a Python datetime object instead of a string.
This SQL should include a '%s' in place of the field's name.
"""
return "%s"
def deferrable_sql(self):
"""
Returns the SQL necessary to make a constraint "initially deferred"
during a CREATE TABLE statement.
"""
return ''
def drop_foreignkey_sql(self):
"""
Returns the SQL command that drops a foreign key.
"""
return "DROP CONSTRAINT"
def drop_sequence_sql(self, table):
"""
Returns any SQL necessary to drop the sequence for the given table.
Returns None if no SQL is necessary.
"""
return None
def fetch_returned_insert_id(self, cursor):
"""
Given a cursor object that has just performed an INSERT...RETURNING
statement into a table that has an auto-incrementing ID, returns the
newly created ID.
"""
return cursor.fetchone()[0]
def field_cast_sql(self, db_type):
"""
Given a column type (e.g. 'BLOB', 'VARCHAR'), returns the SQL necessary
to cast it before using it in a WHERE statement. Note that the
resulting string should contain a '%s' placeholder for the column being
searched against.
"""
return '%s'
def force_no_ordering(self):
"""
Returns a list used in the "ORDER BY" clause to force no ordering at
all. Returning an empty list means that nothing will be included in the
ordering.
"""
return []
def fulltext_search_sql(self, field_name):
"""
Returns the SQL WHERE clause to use in order to perform a full-text
search of the given field_name. Note that the resulting string should
contain a '%s' placeholder for the value being searched against.
"""
raise NotImplementedError('Full-text search is not implemented for this database backend')
def last_executed_query(self, cursor, sql, params):
"""
Returns a string of the query last executed by the given cursor, with
placeholders replaced with actual values.
`sql` is the raw query containing placeholders, and `params` is the
sequence of parameters. These are used by default, but this method
exists for database backends to provide a better implementation
according to their own quoting schemes.
"""
from django.utils.encoding import smart_unicode, force_unicode
# Convert params to contain Unicode values.
to_unicode = lambda s: force_unicode(s, strings_only=True, errors='replace')
if isinstance(params, (list, tuple)):
u_params = tuple([to_unicode(val) for val in params])
else:
u_params = dict([(to_unicode(k), to_unicode(v)) for k, v in params.items()])
return smart_unicode(sql) % u_params
def last_insert_id(self, cursor, table_name, pk_name):
"""
Given a cursor object that has just performed an INSERT statement into
a table that has an auto-incrementing ID, returns the newly created ID.
This method also receives the table name and the name of the primary-key
column.
"""
return cursor.lastrowid
def lookup_cast(self, lookup_type):
"""
Returns the string to use in a query when performing lookups
("contains", "like", etc). The resulting string should contain a '%s'
placeholder for the column being searched against.
"""
return "%s"
def max_in_list_size(self):
"""
Returns the maximum number of items that can be passed in a single 'IN'
list condition, or None if the backend does not impose a limit.
"""
return None
def max_name_length(self):
"""
Returns the maximum length of table and column names, or None if there
is no limit.
"""
return None
def no_limit_value(self):
"""
Returns the value to use for the LIMIT when we are wanting "LIMIT
infinity". Returns None if the limit clause can be omitted in this case.
"""
raise NotImplementedError
def pk_default_value(self):
"""
Returns the value to use during an INSERT statement to specify that
the field should use its default value.
"""
return 'DEFAULT'
def process_clob(self, value):
"""
Returns the value of a CLOB column, for backends that return a locator
object that requires additional processing.
"""
return value
def return_insert_id(self):
"""
For backends that support returning the last insert ID as part
of an insert query, this method returns the SQL and params to
append to the INSERT query. The returned fragment should
contain a format string to hold the appropriate column.
"""
pass
def compiler(self, compiler_name):
"""
Returns the SQLCompiler class corresponding to the given name,
in the namespace corresponding to the `compiler_module` attribute
on this backend.
"""
if self._cache is None:
self._cache = import_module(self.compiler_module)
return getattr(self._cache, compiler_name)
def quote_name(self, name):
"""
Returns a quoted version of the given table, index or column name. Does
not quote the given name if it's already been quoted.
"""
raise NotImplementedError()
def random_function_sql(self):
"""
Returns a SQL expression that returns a random value.
"""
return 'RANDOM()'
def regex_lookup(self, lookup_type):
"""
Returns the string to use in a query when performing regular expression
lookups (using "regex" or "iregex"). The resulting string should
contain a '%s' placeholder for the column being searched against.
If the feature is not supported (or part of it is not supported), a
NotImplementedError exception can be raised.
"""
raise NotImplementedError
def savepoint_create_sql(self, sid):
"""
Returns the SQL for starting a new savepoint. Only required if the
"uses_savepoints" feature is True. The "sid" parameter is a string
for the savepoint id.
"""
raise NotImplementedError
def savepoint_commit_sql(self, sid):
"""
Returns the SQL for committing the given savepoint.
"""
raise NotImplementedError
def savepoint_rollback_sql(self, sid):
"""
Returns the SQL for rolling back the given savepoint.
"""
raise NotImplementedError
def sql_flush(self, style, tables, sequences):
"""
Returns a list of SQL statements required to remove all data from
the given database tables (without actually removing the tables
themselves).
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
"""
raise NotImplementedError()
def sequence_reset_sql(self, style, model_list):
"""
Returns a list of the SQL statements required to reset sequences for
the given models.
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
"""
return [] # No sequence reset required by default.
def start_transaction_sql(self):
"""
Returns the SQL statement required to start a transaction.
"""
return "BEGIN;"
def end_transaction_sql(self, success=True):
if not success:
return "ROLLBACK;"
return "COMMIT;"
def tablespace_sql(self, tablespace, inline=False):
"""
Returns the SQL that will be appended to tables or rows to define
a tablespace. Returns '' if the backend doesn't use tablespaces.
"""
return ''
def prep_for_like_query(self, x):
"""Prepares a value for use in a LIKE query."""
from django.utils.encoding import smart_unicode
return smart_unicode(x).replace("\\", "\\\\").replace("%", "\%").replace("_", "\_")
# Same as prep_for_like_query(), but called for "iexact" matches, which
# need not necessarily be implemented using "LIKE" in the backend.
prep_for_iexact_query = prep_for_like_query
def value_to_db_auto(self, value):
"""
Transform a value to an object compatible with the auto field required
by the backend driver for auto columns.
"""
if value is None:
return None
return int(value)
def value_to_db_date(self, value):
"""
Transform a date value to an object compatible with what is expected
by the backend driver for date columns.
"""
if value is None:
return None
return datetime_safe.new_date(value).strftime('%Y-%m-%d')
def value_to_db_datetime(self, value):
"""
Transform a datetime value to an object compatible with what is expected
by the backend driver for datetime columns.
"""
if value is None:
return None
return unicode(value)
def value_to_db_time(self, value):
"""
Transform a datetime value to an object compatible with what is expected
by the backend driver for time columns.
"""
if value is None:
return None
return unicode(value)
def value_to_db_decimal(self, value, max_digits, decimal_places):
"""
Transform a decimal.Decimal value to an object compatible with what is
expected by the backend driver for decimal (numeric) columns.
"""
if value is None:
return None
return util.format_number(value, max_digits, decimal_places)
def year_lookup_bounds(self, value):
"""
Returns a two-elements list with the lower and upper bound to be used
with a BETWEEN operator to query a field value using a year lookup
`value` is an int, containing the looked-up year.
"""
first = '%s-01-01 00:00:00'
second = '%s-12-31 23:59:59.999999'
return [first % value, second % value]
def year_lookup_bounds_for_date_field(self, value):
"""
Returns a two-elements list with the lower and upper bound to be used
with a BETWEEN operator to query a DateField value using a year lookup
`value` is an int, containing the looked-up year.
By default, it just calls `self.year_lookup_bounds`. Some backends need
this hook because on their DB date fields can't be compared to values
which include a time part.
"""
return self.year_lookup_bounds(value)
def convert_values(self, value, field):
"""Coerce the value returned by the database backend into a consistent type that
is compatible with the field type.
"""
internal_type = field.get_internal_type()
if internal_type == 'DecimalField':
return value
elif internal_type and internal_type.endswith('IntegerField') or internal_type == 'AutoField':
return int(value)
elif internal_type in ('DateField', 'DateTimeField', 'TimeField'):
return value
# No field, or the field isn't known to be a decimal or integer
# Default to a float
return float(value)
def check_aggregate_support(self, aggregate_func):
"""Check that the backend supports the provided aggregate
This is used on specific backends to rule out known aggregates
that are known to have faulty implementations. If the named
aggregate function has a known problem, the backend should
raise NotImplemented.
"""
pass
def combine_expression(self, connector, sub_expressions):
"""Combine a list of subexpressions into a single expression, using
the provided connecting operator. This is required because operators
can vary between backends (e.g., Oracle with %% and &) and between
subexpression types (e.g., date expressions)
"""
conn = ' %s ' % connector
return conn.join(sub_expressions)
class BaseDatabaseIntrospection(object):
"""
This class encapsulates all backend-specific introspection utilities
"""
data_types_reverse = {}
def __init__(self, connection):
self.connection = connection
def get_field_type(self, data_type, description):
"""Hook for a database backend to use the cursor description to
match a Django field type to a database column.
For Oracle, the column data_type on its own is insufficient to
distinguish between a FloatField and IntegerField, for example."""
return self.data_types_reverse[data_type]
def table_name_converter(self, name):
"""Apply a conversion to the name for the purposes of comparison.
The default table name converter is for case sensitive comparison.
"""
return name
def table_names(self):
"Returns a list of names of all tables that exist in the database."
cursor = self.connection.cursor()
return self.get_table_list(cursor)
def django_table_names(self, only_existing=False):
"""
Returns a list of all table names that have associated Django models and
are in INSTALLED_APPS.
If only_existing is True, the resulting list will only include the tables
that actually exist in the database.
"""
from django.db import models, router
tables = set()
for app in models.get_apps():
for model in models.get_models(app):
if not model._meta.managed:
continue
if not router.allow_syncdb(self.connection.alias, model):
continue
tables.add(model._meta.db_table)
tables.update([f.m2m_db_table() for f in model._meta.local_many_to_many])
if only_existing:
existing_tables = self.table_names()
tables = [
t
for t in tables
if self.table_name_converter(t) in existing_tables
]
return tables
def installed_models(self, tables):
"Returns a set of all models represented by the provided list of table names."
from django.db import models, router
all_models = []
for app in models.get_apps():
for model in models.get_models(app):
if router.allow_syncdb(self.connection.alias, model):
all_models.append(model)
tables = map(self.table_name_converter, tables)
return set([
m for m in all_models
if self.table_name_converter(m._meta.db_table) in tables
])
def sequence_list(self):
"Returns a list of information about all DB sequences for all models in all apps."
from django.db import models, router
apps = models.get_apps()
sequence_list = []
for app in apps:
for model in models.get_models(app):
if not model._meta.managed:
continue
if not router.allow_syncdb(self.connection.alias, model):
continue
for f in model._meta.local_fields:
if isinstance(f, models.AutoField):
sequence_list.append({'table': model._meta.db_table, 'column': f.column})
break # Only one AutoField is allowed per model, so don't bother continuing.
for f in model._meta.local_many_to_many:
# If this is an m2m using an intermediate table,
# we don't need to reset the sequence.
if f.rel.through is None:
sequence_list.append({'table': f.m2m_db_table(), 'column': None})
return sequence_list
class BaseDatabaseClient(object):
"""
This class encapsulates all backend-specific methods for opening a
client shell.
"""
# This should be a string representing the name of the executable
# (e.g., "psql"). Subclasses must override this.
executable_name = None
def __init__(self, connection):
# connection is an instance of BaseDatabaseWrapper.
self.connection = connection
def runshell(self):
raise NotImplementedError()
class BaseDatabaseValidation(object):
"""
This class encapsualtes all backend-specific model validation.
"""
def __init__(self, connection):
self.connection = connection
def validate_field(self, errors, opts, f):
"By default, there is no backend-specific validation"
pass
| bsd-3-clause |
KetsuN/kafka-python | example.py | 29 | 1092 | #!/usr/bin/env python
import threading, logging, time
from kafka.client import KafkaClient
from kafka.consumer import SimpleConsumer
from kafka.producer import SimpleProducer
class Producer(threading.Thread):
daemon = True
def run(self):
client = KafkaClient("localhost:9092")
producer = SimpleProducer(client)
while True:
producer.send_messages('my-topic', "test")
producer.send_messages('my-topic', "\xc2Hola, mundo!")
time.sleep(1)
class Consumer(threading.Thread):
daemon = True
def run(self):
client = KafkaClient("localhost:9092")
consumer = SimpleConsumer(client, "test-group", "my-topic")
for message in consumer:
print(message)
def main():
threads = [
Producer(),
Consumer()
]
for t in threads:
t.start()
time.sleep(5)
if __name__ == "__main__":
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.DEBUG
)
main()
| apache-2.0 |
effigies/mne-python | examples/realtime/rt_feedback_server.py | 2 | 4956 | """
==============================================
Real-time feedback for decoding :: Server Side
==============================================
This example demonstrates how to setup a real-time feedback
mechanism using StimServer and StimClient.
The idea here is to display future stimuli for the class which
is predicted less accurately. This allows on-demand adaptation
of the stimuli depending on the needs of the classifier.
To run this example, open ipython in two separate terminals.
In the first, run rt_feedback_server.py and then wait for the
message
RtServer: Start
Once that appears, run rt_feedback_client.py in the other terminal
and the feedback script should start.
All brain responses are simulated from a fiff file to make it easy
to test. However, it should be possible to adapt this script
for a real experiment.
"""
print(__doc__)
# Author: Mainak Jas <mainak@neuro.hut.fi>
#
# License: BSD (3-clause)
import time
import mne
import numpy as np
import matplotlib.pyplot as plt
from mne.datasets import sample
from mne.realtime import StimServer
from mne.realtime import MockRtClient
from mne.decoding import ConcatenateChannels, FilterEstimator
from sklearn import preprocessing
from sklearn.svm import SVC
from sklearn.pipeline import Pipeline
from sklearn.cross_validation import train_test_split
from sklearn.metrics import confusion_matrix
# Load fiff file to simulate data
data_path = sample.data_path()
raw_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw.fif'
raw = mne.io.Raw(raw_fname, preload=True)
# Instantiating stimulation server
# The with statement is necessary to ensure a clean exit
with StimServer('localhost', port=4218) as stim_server:
# The channels to be used while decoding
picks = mne.pick_types(raw.info, meg='grad', eeg=False, eog=True,
stim=True, exclude=raw.info['bads'])
rt_client = MockRtClient(raw)
# Constructing the pipeline for classification
filt = FilterEstimator(raw.info, 1, 40)
scaler = preprocessing.StandardScaler()
concatenator = ConcatenateChannels()
clf = SVC(C=1, kernel='linear')
concat_classifier = Pipeline([('filter', filt), ('concat', concatenator),
('scaler', scaler), ('svm', clf)])
stim_server.start(verbose=True)
# Just some initially decided events to be simulated
# Rest will decided on the fly
ev_list = [4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4]
score_c1, score_c2, score_x = [], [], []
for ii in range(50):
# Tell the stim_client about the next stimuli
stim_server.add_trigger(ev_list[ii])
# Collecting data
if ii == 0:
X = rt_client.get_event_data(event_id=ev_list[ii], tmin=-0.2,
tmax=0.5, picks=picks,
stim_channel='STI 014')[None, ...]
y = ev_list[ii]
else:
X_temp = rt_client.get_event_data(event_id=ev_list[ii], tmin=-0.2,
tmax=0.5, picks=picks,
stim_channel='STI 014')
X_temp = X_temp[np.newaxis, ...]
X = np.concatenate((X, X_temp), axis=0)
time.sleep(1) # simulating the isi
y = np.append(y, ev_list[ii])
# Start decoding after collecting sufficient data
if ii >= 10:
# Now start doing rtfeedback
X_train, X_test, y_train, y_test = train_test_split(X, y,
test_size=0.2,
random_state=7)
y_pred = concat_classifier.fit(X_train, y_train).predict(X_test)
cm = confusion_matrix(y_test, y_pred)
score_c1.append(float(cm[0, 0]) / sum(cm, 1)[0] * 100)
score_c2.append(float(cm[1, 1]) / sum(cm, 1)[1] * 100)
# do something if one class is decoded better than the other
if score_c1[-1] < score_c2[-1]:
print("We decoded class RV better than class LV")
ev_list.append(3) # adding more LV to future simulated data
else:
print("We decoded class LV better than class RV")
ev_list.append(4) # adding more RV to future simulated data
# Clear the figure
plt.clf()
# The x-axis for the plot
score_x.append(ii)
# Now plot the accuracy
plt.plot(score_x[-5:], score_c1[-5:])
plt.hold(True)
plt.plot(score_x[-5:], score_c2[-5:])
plt.xlabel('Trials')
plt.ylabel('Classification score (% correct)')
plt.title('Real-time feedback')
plt.ylim([0, 100])
plt.xticks(score_x[-5:])
plt.legend(('LV', 'RV'), loc='upper left')
plt.show()
| bsd-3-clause |
mozilla/stoneridge | python/src/Lib/test/test_deque.py | 29 | 24677 | from collections import deque
import unittest
from test import test_support, seq_tests
import gc
import weakref
import copy
import cPickle as pickle
import random
BIG = 100000
def fail():
raise SyntaxError
yield 1
class BadCmp:
def __eq__(self, other):
raise RuntimeError
class MutateCmp:
def __init__(self, deque, result):
self.deque = deque
self.result = result
def __eq__(self, other):
self.deque.clear()
return self.result
class TestBasic(unittest.TestCase):
def test_basics(self):
d = deque(xrange(-5125, -5000))
d.__init__(xrange(200))
for i in xrange(200, 400):
d.append(i)
for i in reversed(xrange(-200, 0)):
d.appendleft(i)
self.assertEqual(list(d), range(-200, 400))
self.assertEqual(len(d), 600)
left = [d.popleft() for i in xrange(250)]
self.assertEqual(left, range(-200, 50))
self.assertEqual(list(d), range(50, 400))
right = [d.pop() for i in xrange(250)]
right.reverse()
self.assertEqual(right, range(150, 400))
self.assertEqual(list(d), range(50, 150))
def test_maxlen(self):
self.assertRaises(ValueError, deque, 'abc', -1)
self.assertRaises(ValueError, deque, 'abc', -2)
it = iter(range(10))
d = deque(it, maxlen=3)
self.assertEqual(list(it), [])
self.assertEqual(repr(d), 'deque([7, 8, 9], maxlen=3)')
self.assertEqual(list(d), range(7, 10))
self.assertEqual(d, deque(range(10), 3))
d.append(10)
self.assertEqual(list(d), range(8, 11))
d.appendleft(7)
self.assertEqual(list(d), range(7, 10))
d.extend([10, 11])
self.assertEqual(list(d), range(9, 12))
d.extendleft([8, 7])
self.assertEqual(list(d), range(7, 10))
d = deque(xrange(200), maxlen=10)
d.append(d)
test_support.unlink(test_support.TESTFN)
fo = open(test_support.TESTFN, "wb")
try:
print >> fo, d,
fo.close()
fo = open(test_support.TESTFN, "rb")
self.assertEqual(fo.read(), repr(d))
finally:
fo.close()
test_support.unlink(test_support.TESTFN)
d = deque(range(10), maxlen=None)
self.assertEqual(repr(d), 'deque([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])')
fo = open(test_support.TESTFN, "wb")
try:
print >> fo, d,
fo.close()
fo = open(test_support.TESTFN, "rb")
self.assertEqual(fo.read(), repr(d))
finally:
fo.close()
test_support.unlink(test_support.TESTFN)
def test_maxlen_zero(self):
it = iter(range(100))
deque(it, maxlen=0)
self.assertEqual(list(it), [])
it = iter(range(100))
d = deque(maxlen=0)
d.extend(it)
self.assertEqual(list(it), [])
it = iter(range(100))
d = deque(maxlen=0)
d.extendleft(it)
self.assertEqual(list(it), [])
def test_maxlen_attribute(self):
self.assertEqual(deque().maxlen, None)
self.assertEqual(deque('abc').maxlen, None)
self.assertEqual(deque('abc', maxlen=4).maxlen, 4)
self.assertEqual(deque('abc', maxlen=2).maxlen, 2)
self.assertEqual(deque('abc', maxlen=0).maxlen, 0)
with self.assertRaises(AttributeError):
d = deque('abc')
d.maxlen = 10
def test_count(self):
for s in ('', 'abracadabra', 'simsalabim'*500+'abc'):
s = list(s)
d = deque(s)
for letter in 'abcdefghijklmnopqrstuvwxyz':
self.assertEqual(s.count(letter), d.count(letter), (s, d, letter))
self.assertRaises(TypeError, d.count) # too few args
self.assertRaises(TypeError, d.count, 1, 2) # too many args
class BadCompare:
def __eq__(self, other):
raise ArithmeticError
d = deque([1, 2, BadCompare(), 3])
self.assertRaises(ArithmeticError, d.count, 2)
d = deque([1, 2, 3])
self.assertRaises(ArithmeticError, d.count, BadCompare())
class MutatingCompare:
def __eq__(self, other):
self.d.pop()
return True
m = MutatingCompare()
d = deque([1, 2, 3, m, 4, 5])
m.d = d
self.assertRaises(RuntimeError, d.count, 3)
# test issue11004
# block advance failed after rotation aligned elements on right side of block
d = deque([None]*16)
for i in range(len(d)):
d.rotate(-1)
d.rotate(1)
self.assertEqual(d.count(1), 0)
self.assertEqual(d.count(None), 16)
def test_comparisons(self):
d = deque('xabc'); d.popleft()
for e in [d, deque('abc'), deque('ab'), deque(), list(d)]:
self.assertEqual(d==e, type(d)==type(e) and list(d)==list(e))
self.assertEqual(d!=e, not(type(d)==type(e) and list(d)==list(e)))
args = map(deque, ('', 'a', 'b', 'ab', 'ba', 'abc', 'xba', 'xabc', 'cba'))
for x in args:
for y in args:
self.assertEqual(x == y, list(x) == list(y), (x,y))
self.assertEqual(x != y, list(x) != list(y), (x,y))
self.assertEqual(x < y, list(x) < list(y), (x,y))
self.assertEqual(x <= y, list(x) <= list(y), (x,y))
self.assertEqual(x > y, list(x) > list(y), (x,y))
self.assertEqual(x >= y, list(x) >= list(y), (x,y))
self.assertEqual(cmp(x,y), cmp(list(x),list(y)), (x,y))
def test_extend(self):
d = deque('a')
self.assertRaises(TypeError, d.extend, 1)
d.extend('bcd')
self.assertEqual(list(d), list('abcd'))
d.extend(d)
self.assertEqual(list(d), list('abcdabcd'))
def test_iadd(self):
d = deque('a')
d += 'bcd'
self.assertEqual(list(d), list('abcd'))
d += d
self.assertEqual(list(d), list('abcdabcd'))
def test_extendleft(self):
d = deque('a')
self.assertRaises(TypeError, d.extendleft, 1)
d.extendleft('bcd')
self.assertEqual(list(d), list(reversed('abcd')))
d.extendleft(d)
self.assertEqual(list(d), list('abcddcba'))
d = deque()
d.extendleft(range(1000))
self.assertEqual(list(d), list(reversed(range(1000))))
self.assertRaises(SyntaxError, d.extendleft, fail())
def test_getitem(self):
n = 200
d = deque(xrange(n))
l = range(n)
for i in xrange(n):
d.popleft()
l.pop(0)
if random.random() < 0.5:
d.append(i)
l.append(i)
for j in xrange(1-len(l), len(l)):
assert d[j] == l[j]
d = deque('superman')
self.assertEqual(d[0], 's')
self.assertEqual(d[-1], 'n')
d = deque()
self.assertRaises(IndexError, d.__getitem__, 0)
self.assertRaises(IndexError, d.__getitem__, -1)
def test_setitem(self):
n = 200
d = deque(xrange(n))
for i in xrange(n):
d[i] = 10 * i
self.assertEqual(list(d), [10*i for i in xrange(n)])
l = list(d)
for i in xrange(1-n, 0, -1):
d[i] = 7*i
l[i] = 7*i
self.assertEqual(list(d), l)
def test_delitem(self):
n = 500 # O(n**2) test, don't make this too big
d = deque(xrange(n))
self.assertRaises(IndexError, d.__delitem__, -n-1)
self.assertRaises(IndexError, d.__delitem__, n)
for i in xrange(n):
self.assertEqual(len(d), n-i)
j = random.randrange(-len(d), len(d))
val = d[j]
self.assertIn(val, d)
del d[j]
self.assertNotIn(val, d)
self.assertEqual(len(d), 0)
def test_reverse(self):
n = 500 # O(n**2) test, don't make this too big
data = [random.random() for i in range(n)]
for i in range(n):
d = deque(data[:i])
r = d.reverse()
self.assertEqual(list(d), list(reversed(data[:i])))
self.assertIs(r, None)
d.reverse()
self.assertEqual(list(d), data[:i])
self.assertRaises(TypeError, d.reverse, 1) # Arity is zero
def test_rotate(self):
s = tuple('abcde')
n = len(s)
d = deque(s)
d.rotate(1) # verify rot(1)
self.assertEqual(''.join(d), 'eabcd')
d = deque(s)
d.rotate(-1) # verify rot(-1)
self.assertEqual(''.join(d), 'bcdea')
d.rotate() # check default to 1
self.assertEqual(tuple(d), s)
for i in xrange(n*3):
d = deque(s)
e = deque(d)
d.rotate(i) # check vs. rot(1) n times
for j in xrange(i):
e.rotate(1)
self.assertEqual(tuple(d), tuple(e))
d.rotate(-i) # check that it works in reverse
self.assertEqual(tuple(d), s)
e.rotate(n-i) # check that it wraps forward
self.assertEqual(tuple(e), s)
for i in xrange(n*3):
d = deque(s)
e = deque(d)
d.rotate(-i)
for j in xrange(i):
e.rotate(-1) # check vs. rot(-1) n times
self.assertEqual(tuple(d), tuple(e))
d.rotate(i) # check that it works in reverse
self.assertEqual(tuple(d), s)
e.rotate(i-n) # check that it wraps backaround
self.assertEqual(tuple(e), s)
d = deque(s)
e = deque(s)
e.rotate(BIG+17) # verify on long series of rotates
dr = d.rotate
for i in xrange(BIG+17):
dr()
self.assertEqual(tuple(d), tuple(e))
self.assertRaises(TypeError, d.rotate, 'x') # Wrong arg type
self.assertRaises(TypeError, d.rotate, 1, 10) # Too many args
d = deque()
d.rotate() # rotate an empty deque
self.assertEqual(d, deque())
def test_len(self):
d = deque('ab')
self.assertEqual(len(d), 2)
d.popleft()
self.assertEqual(len(d), 1)
d.pop()
self.assertEqual(len(d), 0)
self.assertRaises(IndexError, d.pop)
self.assertEqual(len(d), 0)
d.append('c')
self.assertEqual(len(d), 1)
d.appendleft('d')
self.assertEqual(len(d), 2)
d.clear()
self.assertEqual(len(d), 0)
def test_underflow(self):
d = deque()
self.assertRaises(IndexError, d.pop)
self.assertRaises(IndexError, d.popleft)
def test_clear(self):
d = deque(xrange(100))
self.assertEqual(len(d), 100)
d.clear()
self.assertEqual(len(d), 0)
self.assertEqual(list(d), [])
d.clear() # clear an emtpy deque
self.assertEqual(list(d), [])
def test_remove(self):
d = deque('abcdefghcij')
d.remove('c')
self.assertEqual(d, deque('abdefghcij'))
d.remove('c')
self.assertEqual(d, deque('abdefghij'))
self.assertRaises(ValueError, d.remove, 'c')
self.assertEqual(d, deque('abdefghij'))
# Handle comparison errors
d = deque(['a', 'b', BadCmp(), 'c'])
e = deque(d)
self.assertRaises(RuntimeError, d.remove, 'c')
for x, y in zip(d, e):
# verify that original order and values are retained.
self.assertTrue(x is y)
# Handle evil mutator
for match in (True, False):
d = deque(['ab'])
d.extend([MutateCmp(d, match), 'c'])
self.assertRaises(IndexError, d.remove, 'c')
self.assertEqual(d, deque())
def test_repr(self):
d = deque(xrange(200))
e = eval(repr(d))
self.assertEqual(list(d), list(e))
d.append(d)
self.assertIn('...', repr(d))
def test_print(self):
d = deque(xrange(200))
d.append(d)
test_support.unlink(test_support.TESTFN)
fo = open(test_support.TESTFN, "wb")
try:
print >> fo, d,
fo.close()
fo = open(test_support.TESTFN, "rb")
self.assertEqual(fo.read(), repr(d))
finally:
fo.close()
test_support.unlink(test_support.TESTFN)
def test_init(self):
self.assertRaises(TypeError, deque, 'abc', 2, 3);
self.assertRaises(TypeError, deque, 1);
def test_hash(self):
self.assertRaises(TypeError, hash, deque('abc'))
def test_long_steadystate_queue_popleft(self):
for size in (0, 1, 2, 100, 1000):
d = deque(xrange(size))
append, pop = d.append, d.popleft
for i in xrange(size, BIG):
append(i)
x = pop()
if x != i - size:
self.assertEqual(x, i-size)
self.assertEqual(list(d), range(BIG-size, BIG))
def test_long_steadystate_queue_popright(self):
for size in (0, 1, 2, 100, 1000):
d = deque(reversed(xrange(size)))
append, pop = d.appendleft, d.pop
for i in xrange(size, BIG):
append(i)
x = pop()
if x != i - size:
self.assertEqual(x, i-size)
self.assertEqual(list(reversed(list(d))), range(BIG-size, BIG))
def test_big_queue_popleft(self):
pass
d = deque()
append, pop = d.append, d.popleft
for i in xrange(BIG):
append(i)
for i in xrange(BIG):
x = pop()
if x != i:
self.assertEqual(x, i)
def test_big_queue_popright(self):
d = deque()
append, pop = d.appendleft, d.pop
for i in xrange(BIG):
append(i)
for i in xrange(BIG):
x = pop()
if x != i:
self.assertEqual(x, i)
def test_big_stack_right(self):
d = deque()
append, pop = d.append, d.pop
for i in xrange(BIG):
append(i)
for i in reversed(xrange(BIG)):
x = pop()
if x != i:
self.assertEqual(x, i)
self.assertEqual(len(d), 0)
def test_big_stack_left(self):
d = deque()
append, pop = d.appendleft, d.popleft
for i in xrange(BIG):
append(i)
for i in reversed(xrange(BIG)):
x = pop()
if x != i:
self.assertEqual(x, i)
self.assertEqual(len(d), 0)
def test_roundtrip_iter_init(self):
d = deque(xrange(200))
e = deque(d)
self.assertNotEqual(id(d), id(e))
self.assertEqual(list(d), list(e))
def test_pickle(self):
d = deque(xrange(200))
for i in range(pickle.HIGHEST_PROTOCOL + 1):
s = pickle.dumps(d, i)
e = pickle.loads(s)
self.assertNotEqual(id(d), id(e))
self.assertEqual(list(d), list(e))
## def test_pickle_recursive(self):
## d = deque('abc')
## d.append(d)
## for i in range(pickle.HIGHEST_PROTOCOL + 1):
## e = pickle.loads(pickle.dumps(d, i))
## self.assertNotEqual(id(d), id(e))
## self.assertEqual(id(e), id(e[-1]))
def test_deepcopy(self):
mut = [10]
d = deque([mut])
e = copy.deepcopy(d)
self.assertEqual(list(d), list(e))
mut[0] = 11
self.assertNotEqual(id(d), id(e))
self.assertNotEqual(list(d), list(e))
def test_copy(self):
mut = [10]
d = deque([mut])
e = copy.copy(d)
self.assertEqual(list(d), list(e))
mut[0] = 11
self.assertNotEqual(id(d), id(e))
self.assertEqual(list(d), list(e))
def test_reversed(self):
for s in ('abcd', xrange(2000)):
self.assertEqual(list(reversed(deque(s))), list(reversed(s)))
def test_gc_doesnt_blowup(self):
import gc
# This used to assert-fail in deque_traverse() under a debug
# build, or run wild with a NULL pointer in a release build.
d = deque()
for i in xrange(100):
d.append(1)
gc.collect()
def test_container_iterator(self):
# Bug #3680: tp_traverse was not implemented for deque iterator objects
class C(object):
pass
for i in range(2):
obj = C()
ref = weakref.ref(obj)
if i == 0:
container = deque([obj, 1])
else:
container = reversed(deque([obj, 1]))
obj.x = iter(container)
del obj, container
gc.collect()
self.assertTrue(ref() is None, "Cycle was not collected")
class TestVariousIteratorArgs(unittest.TestCase):
def test_constructor(self):
for s in ("123", "", range(1000), ('do', 1.2), xrange(2000,2200,5)):
for g in (seq_tests.Sequence, seq_tests.IterFunc,
seq_tests.IterGen, seq_tests.IterFuncStop,
seq_tests.itermulti, seq_tests.iterfunc):
self.assertEqual(list(deque(g(s))), list(g(s)))
self.assertRaises(TypeError, deque, seq_tests.IterNextOnly(s))
self.assertRaises(TypeError, deque, seq_tests.IterNoNext(s))
self.assertRaises(ZeroDivisionError, deque, seq_tests.IterGenExc(s))
def test_iter_with_altered_data(self):
d = deque('abcdefg')
it = iter(d)
d.pop()
self.assertRaises(RuntimeError, it.next)
def test_runtime_error_on_empty_deque(self):
d = deque()
it = iter(d)
d.append(10)
self.assertRaises(RuntimeError, it.next)
class Deque(deque):
pass
class DequeWithBadIter(deque):
def __iter__(self):
raise TypeError
class TestSubclass(unittest.TestCase):
def test_basics(self):
d = Deque(xrange(25))
d.__init__(xrange(200))
for i in xrange(200, 400):
d.append(i)
for i in reversed(xrange(-200, 0)):
d.appendleft(i)
self.assertEqual(list(d), range(-200, 400))
self.assertEqual(len(d), 600)
left = [d.popleft() for i in xrange(250)]
self.assertEqual(left, range(-200, 50))
self.assertEqual(list(d), range(50, 400))
right = [d.pop() for i in xrange(250)]
right.reverse()
self.assertEqual(right, range(150, 400))
self.assertEqual(list(d), range(50, 150))
d.clear()
self.assertEqual(len(d), 0)
def test_copy_pickle(self):
d = Deque('abc')
e = d.__copy__()
self.assertEqual(type(d), type(e))
self.assertEqual(list(d), list(e))
e = Deque(d)
self.assertEqual(type(d), type(e))
self.assertEqual(list(d), list(e))
s = pickle.dumps(d)
e = pickle.loads(s)
self.assertNotEqual(id(d), id(e))
self.assertEqual(type(d), type(e))
self.assertEqual(list(d), list(e))
d = Deque('abcde', maxlen=4)
e = d.__copy__()
self.assertEqual(type(d), type(e))
self.assertEqual(list(d), list(e))
e = Deque(d)
self.assertEqual(type(d), type(e))
self.assertEqual(list(d), list(e))
s = pickle.dumps(d)
e = pickle.loads(s)
self.assertNotEqual(id(d), id(e))
self.assertEqual(type(d), type(e))
self.assertEqual(list(d), list(e))
## def test_pickle(self):
## d = Deque('abc')
## d.append(d)
##
## e = pickle.loads(pickle.dumps(d))
## self.assertNotEqual(id(d), id(e))
## self.assertEqual(type(d), type(e))
## dd = d.pop()
## ee = e.pop()
## self.assertEqual(id(e), id(ee))
## self.assertEqual(d, e)
##
## d.x = d
## e = pickle.loads(pickle.dumps(d))
## self.assertEqual(id(e), id(e.x))
##
## d = DequeWithBadIter('abc')
## self.assertRaises(TypeError, pickle.dumps, d)
def test_weakref(self):
d = deque('gallahad')
p = weakref.proxy(d)
self.assertEqual(str(p), str(d))
d = None
self.assertRaises(ReferenceError, str, p)
def test_strange_subclass(self):
class X(deque):
def __iter__(self):
return iter([])
d1 = X([1,2,3])
d2 = X([4,5,6])
d1 == d2 # not clear if this is supposed to be True or False,
# but it used to give a SystemError
class SubclassWithKwargs(deque):
def __init__(self, newarg=1):
deque.__init__(self)
class TestSubclassWithKwargs(unittest.TestCase):
def test_subclass_with_kwargs(self):
# SF bug #1486663 -- this used to erroneously raise a TypeError
SubclassWithKwargs(newarg=1)
#==============================================================================
libreftest = """
Example from the Library Reference: Doc/lib/libcollections.tex
>>> from collections import deque
>>> d = deque('ghi') # make a new deque with three items
>>> for elem in d: # iterate over the deque's elements
... print elem.upper()
G
H
I
>>> d.append('j') # add a new entry to the right side
>>> d.appendleft('f') # add a new entry to the left side
>>> d # show the representation of the deque
deque(['f', 'g', 'h', 'i', 'j'])
>>> d.pop() # return and remove the rightmost item
'j'
>>> d.popleft() # return and remove the leftmost item
'f'
>>> list(d) # list the contents of the deque
['g', 'h', 'i']
>>> d[0] # peek at leftmost item
'g'
>>> d[-1] # peek at rightmost item
'i'
>>> list(reversed(d)) # list the contents of a deque in reverse
['i', 'h', 'g']
>>> 'h' in d # search the deque
True
>>> d.extend('jkl') # add multiple elements at once
>>> d
deque(['g', 'h', 'i', 'j', 'k', 'l'])
>>> d.rotate(1) # right rotation
>>> d
deque(['l', 'g', 'h', 'i', 'j', 'k'])
>>> d.rotate(-1) # left rotation
>>> d
deque(['g', 'h', 'i', 'j', 'k', 'l'])
>>> deque(reversed(d)) # make a new deque in reverse order
deque(['l', 'k', 'j', 'i', 'h', 'g'])
>>> d.clear() # empty the deque
>>> d.pop() # cannot pop from an empty deque
Traceback (most recent call last):
File "<pyshell#6>", line 1, in -toplevel-
d.pop()
IndexError: pop from an empty deque
>>> d.extendleft('abc') # extendleft() reverses the input order
>>> d
deque(['c', 'b', 'a'])
>>> def delete_nth(d, n):
... d.rotate(-n)
... d.popleft()
... d.rotate(n)
...
>>> d = deque('abcdef')
>>> delete_nth(d, 2) # remove the entry at d[2]
>>> d
deque(['a', 'b', 'd', 'e', 'f'])
>>> def roundrobin(*iterables):
... pending = deque(iter(i) for i in iterables)
... while pending:
... task = pending.popleft()
... try:
... yield task.next()
... except StopIteration:
... continue
... pending.append(task)
...
>>> for value in roundrobin('abc', 'd', 'efgh'):
... print value
...
a
d
e
b
f
c
g
h
>>> def maketree(iterable):
... d = deque(iterable)
... while len(d) > 1:
... pair = [d.popleft(), d.popleft()]
... d.append(pair)
... return list(d)
...
>>> print maketree('abcdefgh')
[[[['a', 'b'], ['c', 'd']], [['e', 'f'], ['g', 'h']]]]
"""
#==============================================================================
__test__ = {'libreftest' : libreftest}
def test_main(verbose=None):
import sys
test_classes = (
TestBasic,
TestVariousIteratorArgs,
TestSubclass,
TestSubclassWithKwargs,
)
test_support.run_unittest(*test_classes)
# verify reference counting
if verbose and hasattr(sys, "gettotalrefcount"):
import gc
counts = [None] * 5
for i in xrange(len(counts)):
test_support.run_unittest(*test_classes)
gc.collect()
counts[i] = sys.gettotalrefcount()
print counts
# doctests
from test import test_deque
test_support.run_doctest(test_deque, verbose)
if __name__ == "__main__":
test_main(verbose=True)
| mpl-2.0 |
heeraj123/oh-mainline | vendor/packages/Pygments/pygments/lexers/math.py | 71 | 76438 | # -*- coding: utf-8 -*-
"""
pygments.lexers.math
~~~~~~~~~~~~~~~~~~~~
Lexers for math languages.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.util import shebang_matches
from pygments.lexer import Lexer, RegexLexer, bygroups, include, \
combined, do_insertions
from pygments.token import Comment, String, Punctuation, Keyword, Name, \
Operator, Number, Text, Generic
from pygments.lexers.agile import PythonLexer
from pygments.lexers import _scilab_builtins
from pygments.lexers import _stan_builtins
__all__ = ['JuliaLexer', 'JuliaConsoleLexer', 'MuPADLexer', 'MatlabLexer',
'MatlabSessionLexer', 'OctaveLexer', 'ScilabLexer', 'NumPyLexer',
'RConsoleLexer', 'SLexer', 'JagsLexer', 'BugsLexer', 'StanLexer',
'IDLLexer', 'RdLexer']
class JuliaLexer(RegexLexer):
"""
For `Julia <http://julialang.org/>`_ source code.
*New in Pygments 1.6.*
"""
name = 'Julia'
aliases = ['julia','jl']
filenames = ['*.jl']
mimetypes = ['text/x-julia','application/x-julia']
builtins = [
'exit','whos','edit','load','is','isa','isequal','typeof','tuple',
'ntuple','uid','hash','finalizer','convert','promote','subtype',
'typemin','typemax','realmin','realmax','sizeof','eps','promote_type',
'method_exists','applicable','invoke','dlopen','dlsym','system',
'error','throw','assert','new','Inf','Nan','pi','im',
]
tokens = {
'root': [
(r'\n', Text),
(r'[^\S\n]+', Text),
(r'#.*$', Comment),
(r'[]{}:(),;[@]', Punctuation),
(r'\\\n', Text),
(r'\\', Text),
# keywords
(r'(begin|while|for|in|return|break|continue|'
r'macro|quote|let|if|elseif|else|try|catch|end|'
r'bitstype|ccall|do|using|module|import|export|'
r'importall|baremodule)\b', Keyword),
(r'(local|global|const)\b', Keyword.Declaration),
(r'(Bool|Int|Int8|Int16|Int32|Int64|Uint|Uint8|Uint16|Uint32|Uint64'
r'|Float32|Float64|Complex64|Complex128|Any|Nothing|None)\b',
Keyword.Type),
# functions
(r'(function)((?:\s|\\\s)+)',
bygroups(Keyword,Name.Function), 'funcname'),
# types
(r'(type|typealias|abstract)((?:\s|\\\s)+)',
bygroups(Keyword,Name.Class), 'typename'),
# operators
(r'==|!=|<=|>=|->|&&|\|\||::|<:|[-~+/*%=<>&^|.?!$]', Operator),
(r'\.\*|\.\^|\.\\|\.\/|\\', Operator),
# builtins
('(' + '|'.join(builtins) + r')\b', Name.Builtin),
# backticks
(r'`(?s).*?`', String.Backtick),
# chars
(r"'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,3}|\\u[a-fA-F0-9]{1,4}|"
r"\\U[a-fA-F0-9]{1,6}|[^\\\'\n])'", String.Char),
# try to match trailing transpose
(r'(?<=[.\w\)\]])\'+', Operator),
# strings
(r'(?:[IL])"', String, 'string'),
(r'[E]?"', String, combined('stringescape', 'string')),
# names
(r'@[a-zA-Z0-9_.]+', Name.Decorator),
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name),
# numbers
(r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
(r'\d+[eEf][+-]?[0-9]+', Number.Float),
(r'0b[01]+', Number.Binary),
(r'0o[0-7]+', Number.Oct),
(r'0x[a-fA-F0-9]+', Number.Hex),
(r'\d+', Number.Integer)
],
'funcname': [
('[a-zA-Z_][a-zA-Z0-9_]*', Name.Function, '#pop'),
('\([^\s\w{]{1,2}\)', Operator, '#pop'),
('[^\s\w{]{1,2}', Operator, '#pop'),
],
'typename': [
('[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
],
'stringescape': [
(r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|'
r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
],
'string': [
(r'"', String, '#pop'),
(r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings
(r'\$(\([a-zA-Z0-9_]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?',
String.Interpol),
(r'[^\\"$]+', String),
# quotes, dollar signs, and backslashes must be parsed one at a time
(r'["\\]', String),
# unhandled string formatting sign
(r'\$', String)
],
}
def analyse_text(text):
return shebang_matches(text, r'julia')
line_re = re.compile('.*?\n')
class JuliaConsoleLexer(Lexer):
"""
For Julia console sessions. Modeled after MatlabSessionLexer.
*New in Pygments 1.6.*
"""
name = 'Julia console'
aliases = ['jlcon']
def get_tokens_unprocessed(self, text):
jllexer = JuliaLexer(**self.options)
curcode = ''
insertions = []
for match in line_re.finditer(text):
line = match.group()
if line.startswith('julia>'):
insertions.append((len(curcode),
[(0, Generic.Prompt, line[:3])]))
curcode += line[3:]
elif line.startswith(' '):
idx = len(curcode)
# without is showing error on same line as before...?
line = "\n" + line
token = (0, Generic.Traceback, line)
insertions.append((idx, [token]))
else:
if curcode:
for item in do_insertions(
insertions, jllexer.get_tokens_unprocessed(curcode)):
yield item
curcode = ''
insertions = []
yield match.start(), Generic.Output, line
if curcode: # or item:
for item in do_insertions(
insertions, jllexer.get_tokens_unprocessed(curcode)):
yield item
class MuPADLexer(RegexLexer):
"""
A `MuPAD <http://www.mupad.com>`_ lexer.
Contributed by Christopher Creutzig <christopher@creutzig.de>.
*New in Pygments 0.8.*
"""
name = 'MuPAD'
aliases = ['mupad']
filenames = ['*.mu']
tokens = {
'root' : [
(r'//.*?$', Comment.Single),
(r'/\*', Comment.Multiline, 'comment'),
(r'"(?:[^"\\]|\\.)*"', String),
(r'\(|\)|\[|\]|\{|\}', Punctuation),
(r'''(?x)\b(?:
next|break|end|
axiom|end_axiom|category|end_category|domain|end_domain|inherits|
if|%if|then|elif|else|end_if|
case|of|do|otherwise|end_case|
while|end_while|
repeat|until|end_repeat|
for|from|to|downto|step|end_for|
proc|local|option|save|begin|end_proc|
delete|frame
)\b''', Keyword),
(r'''(?x)\b(?:
DOM_ARRAY|DOM_BOOL|DOM_COMPLEX|DOM_DOMAIN|DOM_EXEC|DOM_EXPR|
DOM_FAIL|DOM_FLOAT|DOM_FRAME|DOM_FUNC_ENV|DOM_HFARRAY|DOM_IDENT|
DOM_INT|DOM_INTERVAL|DOM_LIST|DOM_NIL|DOM_NULL|DOM_POLY|DOM_PROC|
DOM_PROC_ENV|DOM_RAT|DOM_SET|DOM_STRING|DOM_TABLE|DOM_VAR
)\b''', Name.Class),
(r'''(?x)\b(?:
PI|EULER|E|CATALAN|
NIL|FAIL|undefined|infinity|
TRUE|FALSE|UNKNOWN
)\b''',
Name.Constant),
(r'\b(?:dom|procname)\b', Name.Builtin.Pseudo),
(r'\.|,|:|;|=|\+|-|\*|/|\^|@|>|<|\$|\||!|\'|%|~=', Operator),
(r'''(?x)\b(?:
and|or|not|xor|
assuming|
div|mod|
union|minus|intersect|in|subset
)\b''',
Operator.Word),
(r'\b(?:I|RDN_INF|RD_NINF|RD_NAN)\b', Number),
#(r'\b(?:adt|linalg|newDomain|hold)\b', Name.Builtin),
(r'''(?x)
((?:[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)
(?:::[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)*)(\s*)([(])''',
bygroups(Name.Function, Text, Punctuation)),
(r'''(?x)
(?:[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)
(?:::[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)*''', Name.Variable),
(r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number),
(r'\.[0-9]+(?:e[0-9]+)?', Number),
(r'.', Text)
],
'comment' : [
(r'[^*/]', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline)
]
}
class MatlabLexer(RegexLexer):
"""
For Matlab source code.
*New in Pygments 0.10.*
"""
name = 'Matlab'
aliases = ['matlab']
filenames = ['*.m']
mimetypes = ['text/matlab']
#
# These lists are generated automatically.
# Run the following in bash shell:
#
# for f in elfun specfun elmat; do
# echo -n "$f = "
# matlab -nojvm -r "help $f;exit;" | perl -ne \
# 'push(@c,$1) if /^ (\w+)\s+-/; END {print q{["}.join(q{","},@c).qq{"]\n};}'
# done
#
# elfun: Elementary math functions
# specfun: Special Math functions
# elmat: Elementary matrices and matrix manipulation
#
# taken from Matlab version 7.4.0.336 (R2007a)
#
elfun = ["sin","sind","sinh","asin","asind","asinh","cos","cosd","cosh",
"acos","acosd","acosh","tan","tand","tanh","atan","atand","atan2",
"atanh","sec","secd","sech","asec","asecd","asech","csc","cscd",
"csch","acsc","acscd","acsch","cot","cotd","coth","acot","acotd",
"acoth","hypot","exp","expm1","log","log1p","log10","log2","pow2",
"realpow","reallog","realsqrt","sqrt","nthroot","nextpow2","abs",
"angle","complex","conj","imag","real","unwrap","isreal","cplxpair",
"fix","floor","ceil","round","mod","rem","sign"]
specfun = ["airy","besselj","bessely","besselh","besseli","besselk","beta",
"betainc","betaln","ellipj","ellipke","erf","erfc","erfcx",
"erfinv","expint","gamma","gammainc","gammaln","psi","legendre",
"cross","dot","factor","isprime","primes","gcd","lcm","rat",
"rats","perms","nchoosek","factorial","cart2sph","cart2pol",
"pol2cart","sph2cart","hsv2rgb","rgb2hsv"]
elmat = ["zeros","ones","eye","repmat","rand","randn","linspace","logspace",
"freqspace","meshgrid","accumarray","size","length","ndims","numel",
"disp","isempty","isequal","isequalwithequalnans","cat","reshape",
"diag","blkdiag","tril","triu","fliplr","flipud","flipdim","rot90",
"find","end","sub2ind","ind2sub","bsxfun","ndgrid","permute",
"ipermute","shiftdim","circshift","squeeze","isscalar","isvector",
"ans","eps","realmax","realmin","pi","i","inf","nan","isnan",
"isinf","isfinite","j","why","compan","gallery","hadamard","hankel",
"hilb","invhilb","magic","pascal","rosser","toeplitz","vander",
"wilkinson"]
tokens = {
'root': [
# line starting with '!' is sent as a system command. not sure what
# label to use...
(r'^!.*', String.Other),
(r'%\{\s*\n', Comment.Multiline, 'blockcomment'),
(r'%.*$', Comment),
(r'^\s*function', Keyword, 'deffunc'),
# from 'iskeyword' on version 7.11 (R2010):
(r'(break|case|catch|classdef|continue|else|elseif|end|enumerated|'
r'events|for|function|global|if|methods|otherwise|parfor|'
r'persistent|properties|return|spmd|switch|try|while)\b', Keyword),
("(" + "|".join(elfun+specfun+elmat) + r')\b', Name.Builtin),
# line continuation with following comment:
(r'\.\.\..*$', Comment),
# operators:
(r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
# operators requiring escape for re:
(r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
# punctuation:
(r'\[|\]|\(|\)|\{|\}|:|@|\.|,', Punctuation),
(r'=|:|;', Punctuation),
# quote can be transpose, instead of string:
# (not great, but handles common cases...)
(r'(?<=[\w\)\]])\'', Operator),
(r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
(r'\d+[eEf][+-]?[0-9]+', Number.Float),
(r'\d+', Number.Integer),
(r'(?<![\w\)\]])\'', String, 'string'),
('[a-zA-Z_][a-zA-Z0-9_]*', Name),
(r'.', Text),
],
'string': [
(r'[^\']*\'', String, '#pop')
],
'blockcomment': [
(r'^\s*%\}', Comment.Multiline, '#pop'),
(r'^.*\n', Comment.Multiline),
(r'.', Comment.Multiline),
],
'deffunc': [
(r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
bygroups(Text.Whitespace, Text, Text.Whitespace, Punctuation,
Text.Whitespace, Name.Function, Punctuation, Text,
Punctuation, Text.Whitespace), '#pop'),
],
}
def analyse_text(text):
if re.match('^\s*%', text, re.M): # comment
return 0.9
elif re.match('^!\w+', text, re.M): # system cmd
return 0.9
return 0.1
line_re = re.compile('.*?\n')
class MatlabSessionLexer(Lexer):
"""
For Matlab sessions. Modeled after PythonConsoleLexer.
Contributed by Ken Schutte <kschutte@csail.mit.edu>.
*New in Pygments 0.10.*
"""
name = 'Matlab session'
aliases = ['matlabsession']
def get_tokens_unprocessed(self, text):
mlexer = MatlabLexer(**self.options)
curcode = ''
insertions = []
for match in line_re.finditer(text):
line = match.group()
if line.startswith('>>'):
insertions.append((len(curcode),
[(0, Generic.Prompt, line[:3])]))
curcode += line[3:]
elif line.startswith('???'):
idx = len(curcode)
# without is showing error on same line as before...?
line = "\n" + line
token = (0, Generic.Traceback, line)
insertions.append((idx, [token]))
else:
if curcode:
for item in do_insertions(
insertions, mlexer.get_tokens_unprocessed(curcode)):
yield item
curcode = ''
insertions = []
yield match.start(), Generic.Output, line
if curcode: # or item:
for item in do_insertions(
insertions, mlexer.get_tokens_unprocessed(curcode)):
yield item
class OctaveLexer(RegexLexer):
"""
For GNU Octave source code.
*New in Pygments 1.5.*
"""
name = 'Octave'
aliases = ['octave']
filenames = ['*.m']
mimetypes = ['text/octave']
# These lists are generated automatically.
# Run the following in bash shell:
#
# First dump all of the Octave manual into a plain text file:
#
# $ info octave --subnodes -o octave-manual
#
# Now grep through it:
# for i in \
# "Built-in Function" "Command" "Function File" \
# "Loadable Function" "Mapping Function";
# do
# perl -e '@name = qw('"$i"');
# print lc($name[0]),"_kw = [\n"';
#
# perl -n -e 'print "\"$1\",\n" if /-- '"$i"': .* (\w*) \(/;' \
# octave-manual | sort | uniq ;
# echo "]" ;
# echo;
# done
# taken from Octave Mercurial changeset 8cc154f45e37 (30-jan-2011)
builtin_kw = [ "addlistener", "addpath", "addproperty", "all",
"and", "any", "argnames", "argv", "assignin",
"atexit", "autoload",
"available_graphics_toolkits", "beep_on_error",
"bitand", "bitmax", "bitor", "bitshift", "bitxor",
"cat", "cell", "cellstr", "char", "class", "clc",
"columns", "command_line_path",
"completion_append_char", "completion_matches",
"complex", "confirm_recursive_rmdir", "cputime",
"crash_dumps_octave_core", "ctranspose", "cumprod",
"cumsum", "debug_on_error", "debug_on_interrupt",
"debug_on_warning", "default_save_options",
"dellistener", "diag", "diff", "disp",
"doc_cache_file", "do_string_escapes", "double",
"drawnow", "e", "echo_executing_commands", "eps",
"eq", "errno", "errno_list", "error", "eval",
"evalin", "exec", "exist", "exit", "eye", "false",
"fclear", "fclose", "fcntl", "fdisp", "feof",
"ferror", "feval", "fflush", "fgetl", "fgets",
"fieldnames", "file_in_loadpath", "file_in_path",
"filemarker", "filesep", "find_dir_in_path",
"fixed_point_format", "fnmatch", "fopen", "fork",
"formula", "fprintf", "fputs", "fread", "freport",
"frewind", "fscanf", "fseek", "fskipl", "ftell",
"functions", "fwrite", "ge", "genpath", "get",
"getegid", "getenv", "geteuid", "getgid",
"getpgrp", "getpid", "getppid", "getuid", "glob",
"gt", "gui_mode", "history_control",
"history_file", "history_size",
"history_timestamp_format_string", "home",
"horzcat", "hypot", "ifelse",
"ignore_function_time_stamp", "inferiorto",
"info_file", "info_program", "inline", "input",
"intmax", "intmin", "ipermute",
"is_absolute_filename", "isargout", "isbool",
"iscell", "iscellstr", "ischar", "iscomplex",
"isempty", "isfield", "isfloat", "isglobal",
"ishandle", "isieee", "isindex", "isinteger",
"islogical", "ismatrix", "ismethod", "isnull",
"isnumeric", "isobject", "isreal",
"is_rooted_relative_filename", "issorted",
"isstruct", "isvarname", "kbhit", "keyboard",
"kill", "lasterr", "lasterror", "lastwarn",
"ldivide", "le", "length", "link", "linspace",
"logical", "lstat", "lt", "make_absolute_filename",
"makeinfo_program", "max_recursion_depth", "merge",
"methods", "mfilename", "minus", "mislocked",
"mkdir", "mkfifo", "mkstemp", "mldivide", "mlock",
"mouse_wheel_zoom", "mpower", "mrdivide", "mtimes",
"munlock", "nargin", "nargout",
"native_float_format", "ndims", "ne", "nfields",
"nnz", "norm", "not", "numel", "nzmax",
"octave_config_info", "octave_core_file_limit",
"octave_core_file_name",
"octave_core_file_options", "ones", "or",
"output_max_field_width", "output_precision",
"page_output_immediately", "page_screen_output",
"path", "pathsep", "pause", "pclose", "permute",
"pi", "pipe", "plus", "popen", "power",
"print_empty_dimensions", "printf",
"print_struct_array_contents", "prod",
"program_invocation_name", "program_name",
"putenv", "puts", "pwd", "quit", "rats", "rdivide",
"readdir", "readlink", "read_readline_init_file",
"realmax", "realmin", "rehash", "rename",
"repelems", "re_read_readline_init_file", "reset",
"reshape", "resize", "restoredefaultpath",
"rethrow", "rmdir", "rmfield", "rmpath", "rows",
"save_header_format_string", "save_precision",
"saving_history", "scanf", "set", "setenv",
"shell_cmd", "sighup_dumps_octave_core",
"sigterm_dumps_octave_core", "silent_functions",
"single", "size", "size_equal", "sizemax",
"sizeof", "sleep", "source", "sparse_auto_mutate",
"split_long_rows", "sprintf", "squeeze", "sscanf",
"stat", "stderr", "stdin", "stdout", "strcmp",
"strcmpi", "string_fill_char", "strncmp",
"strncmpi", "struct", "struct_levels_to_print",
"strvcat", "subsasgn", "subsref", "sum", "sumsq",
"superiorto", "suppress_verbose_help_message",
"symlink", "system", "tic", "tilde_expand",
"times", "tmpfile", "tmpnam", "toc", "toupper",
"transpose", "true", "typeinfo", "umask", "uminus",
"uname", "undo_string_escapes", "unlink", "uplus",
"upper", "usage", "usleep", "vec", "vectorize",
"vertcat", "waitpid", "warning", "warranty",
"whos_line_format", "yes_or_no", "zeros",
"inf", "Inf", "nan", "NaN"]
command_kw = [ "close", "load", "who", "whos", ]
function_kw = [ "accumarray", "accumdim", "acosd", "acotd",
"acscd", "addtodate", "allchild", "ancestor",
"anova", "arch_fit", "arch_rnd", "arch_test",
"area", "arma_rnd", "arrayfun", "ascii", "asctime",
"asecd", "asind", "assert", "atand",
"autoreg_matrix", "autumn", "axes", "axis", "bar",
"barh", "bartlett", "bartlett_test", "beep",
"betacdf", "betainv", "betapdf", "betarnd",
"bicgstab", "bicubic", "binary", "binocdf",
"binoinv", "binopdf", "binornd", "bitcmp",
"bitget", "bitset", "blackman", "blanks",
"blkdiag", "bone", "box", "brighten", "calendar",
"cast", "cauchy_cdf", "cauchy_inv", "cauchy_pdf",
"cauchy_rnd", "caxis", "celldisp", "center", "cgs",
"chisquare_test_homogeneity",
"chisquare_test_independence", "circshift", "cla",
"clabel", "clf", "clock", "cloglog", "closereq",
"colon", "colorbar", "colormap", "colperm",
"comet", "common_size", "commutation_matrix",
"compan", "compare_versions", "compass",
"computer", "cond", "condest", "contour",
"contourc", "contourf", "contrast", "conv",
"convhull", "cool", "copper", "copyfile", "cor",
"corrcoef", "cor_test", "cosd", "cotd", "cov",
"cplxpair", "cross", "cscd", "cstrcat", "csvread",
"csvwrite", "ctime", "cumtrapz", "curl", "cut",
"cylinder", "date", "datenum", "datestr",
"datetick", "datevec", "dblquad", "deal",
"deblank", "deconv", "delaunay", "delaunayn",
"delete", "demo", "detrend", "diffpara", "diffuse",
"dir", "discrete_cdf", "discrete_inv",
"discrete_pdf", "discrete_rnd", "display",
"divergence", "dlmwrite", "dos", "dsearch",
"dsearchn", "duplication_matrix", "durbinlevinson",
"ellipsoid", "empirical_cdf", "empirical_inv",
"empirical_pdf", "empirical_rnd", "eomday",
"errorbar", "etime", "etreeplot", "example",
"expcdf", "expinv", "expm", "exppdf", "exprnd",
"ezcontour", "ezcontourf", "ezmesh", "ezmeshc",
"ezplot", "ezpolar", "ezsurf", "ezsurfc", "factor",
"factorial", "fail", "fcdf", "feather", "fftconv",
"fftfilt", "fftshift", "figure", "fileattrib",
"fileparts", "fill", "findall", "findobj",
"findstr", "finv", "flag", "flipdim", "fliplr",
"flipud", "fpdf", "fplot", "fractdiff", "freqz",
"freqz_plot", "frnd", "fsolve",
"f_test_regression", "ftp", "fullfile", "fzero",
"gamcdf", "gaminv", "gampdf", "gamrnd", "gca",
"gcbf", "gcbo", "gcf", "genvarname", "geocdf",
"geoinv", "geopdf", "geornd", "getfield", "ginput",
"glpk", "gls", "gplot", "gradient",
"graphics_toolkit", "gray", "grid", "griddata",
"griddatan", "gtext", "gunzip", "gzip", "hadamard",
"hamming", "hankel", "hanning", "hggroup",
"hidden", "hilb", "hist", "histc", "hold", "hot",
"hotelling_test", "housh", "hsv", "hurst",
"hygecdf", "hygeinv", "hygepdf", "hygernd",
"idivide", "ifftshift", "image", "imagesc",
"imfinfo", "imread", "imshow", "imwrite", "index",
"info", "inpolygon", "inputname", "interpft",
"interpn", "intersect", "invhilb", "iqr", "isa",
"isdefinite", "isdir", "is_duplicate_entry",
"isequal", "isequalwithequalnans", "isfigure",
"ishermitian", "ishghandle", "is_leap_year",
"isletter", "ismac", "ismember", "ispc", "isprime",
"isprop", "isscalar", "issquare", "isstrprop",
"issymmetric", "isunix", "is_valid_file_id",
"isvector", "jet", "kendall",
"kolmogorov_smirnov_cdf",
"kolmogorov_smirnov_test", "kruskal_wallis_test",
"krylov", "kurtosis", "laplace_cdf", "laplace_inv",
"laplace_pdf", "laplace_rnd", "legend", "legendre",
"license", "line", "linkprop", "list_primes",
"loadaudio", "loadobj", "logistic_cdf",
"logistic_inv", "logistic_pdf", "logistic_rnd",
"logit", "loglog", "loglogerr", "logm", "logncdf",
"logninv", "lognpdf", "lognrnd", "logspace",
"lookfor", "ls_command", "lsqnonneg", "magic",
"mahalanobis", "manova", "matlabroot",
"mcnemar_test", "mean", "meansq", "median", "menu",
"mesh", "meshc", "meshgrid", "meshz", "mexext",
"mget", "mkpp", "mode", "moment", "movefile",
"mpoles", "mput", "namelengthmax", "nargchk",
"nargoutchk", "nbincdf", "nbininv", "nbinpdf",
"nbinrnd", "nchoosek", "ndgrid", "newplot", "news",
"nonzeros", "normcdf", "normest", "norminv",
"normpdf", "normrnd", "now", "nthroot", "null",
"ocean", "ols", "onenormest", "optimget",
"optimset", "orderfields", "orient", "orth",
"pack", "pareto", "parseparams", "pascal", "patch",
"pathdef", "pcg", "pchip", "pcolor", "pcr",
"peaks", "periodogram", "perl", "perms", "pie",
"pink", "planerot", "playaudio", "plot",
"plotmatrix", "plotyy", "poisscdf", "poissinv",
"poisspdf", "poissrnd", "polar", "poly",
"polyaffine", "polyarea", "polyderiv", "polyfit",
"polygcd", "polyint", "polyout", "polyreduce",
"polyval", "polyvalm", "postpad", "powerset",
"ppder", "ppint", "ppjumps", "ppplot", "ppval",
"pqpnonneg", "prepad", "primes", "print",
"print_usage", "prism", "probit", "qp", "qqplot",
"quadcc", "quadgk", "quadl", "quadv", "quiver",
"qzhess", "rainbow", "randi", "range", "rank",
"ranks", "rat", "reallog", "realpow", "realsqrt",
"record", "rectangle_lw", "rectangle_sw",
"rectint", "refresh", "refreshdata",
"regexptranslate", "repmat", "residue", "ribbon",
"rindex", "roots", "rose", "rosser", "rotdim",
"rref", "run", "run_count", "rundemos", "run_test",
"runtests", "saveas", "saveaudio", "saveobj",
"savepath", "scatter", "secd", "semilogx",
"semilogxerr", "semilogy", "semilogyerr",
"setaudio", "setdiff", "setfield", "setxor",
"shading", "shift", "shiftdim", "sign_test",
"sinc", "sind", "sinetone", "sinewave", "skewness",
"slice", "sombrero", "sortrows", "spaugment",
"spconvert", "spdiags", "spearman", "spectral_adf",
"spectral_xdf", "specular", "speed", "spencer",
"speye", "spfun", "sphere", "spinmap", "spline",
"spones", "sprand", "sprandn", "sprandsym",
"spring", "spstats", "spy", "sqp", "stairs",
"statistics", "std", "stdnormal_cdf",
"stdnormal_inv", "stdnormal_pdf", "stdnormal_rnd",
"stem", "stft", "strcat", "strchr", "strjust",
"strmatch", "strread", "strsplit", "strtok",
"strtrim", "strtrunc", "structfun", "studentize",
"subplot", "subsindex", "subspace", "substr",
"substruct", "summer", "surf", "surface", "surfc",
"surfl", "surfnorm", "svds", "swapbytes",
"sylvester_matrix", "symvar", "synthesis", "table",
"tand", "tar", "tcdf", "tempdir", "tempname",
"test", "text", "textread", "textscan", "tinv",
"title", "toeplitz", "tpdf", "trace", "trapz",
"treelayout", "treeplot", "triangle_lw",
"triangle_sw", "tril", "trimesh", "triplequad",
"triplot", "trisurf", "triu", "trnd", "tsearchn",
"t_test", "t_test_regression", "type", "unidcdf",
"unidinv", "unidpdf", "unidrnd", "unifcdf",
"unifinv", "unifpdf", "unifrnd", "union", "unique",
"unix", "unmkpp", "unpack", "untabify", "untar",
"unwrap", "unzip", "u_test", "validatestring",
"vander", "var", "var_test", "vech", "ver",
"version", "view", "voronoi", "voronoin",
"waitforbuttonpress", "wavread", "wavwrite",
"wblcdf", "wblinv", "wblpdf", "wblrnd", "weekday",
"welch_test", "what", "white", "whitebg",
"wienrnd", "wilcoxon_test", "wilkinson", "winter",
"xlabel", "xlim", "ylabel", "yulewalker", "zip",
"zlabel", "z_test", ]
loadable_kw = [ "airy", "amd", "balance", "besselh", "besseli",
"besselj", "besselk", "bessely", "bitpack",
"bsxfun", "builtin", "ccolamd", "cellfun",
"cellslices", "chol", "choldelete", "cholinsert",
"cholinv", "cholshift", "cholupdate", "colamd",
"colloc", "convhulln", "convn", "csymamd",
"cummax", "cummin", "daspk", "daspk_options",
"dasrt", "dasrt_options", "dassl", "dassl_options",
"dbclear", "dbdown", "dbstack", "dbstatus",
"dbstop", "dbtype", "dbup", "dbwhere", "det",
"dlmread", "dmperm", "dot", "eig", "eigs",
"endgrent", "endpwent", "etree", "fft", "fftn",
"fftw", "filter", "find", "full", "gcd",
"getgrent", "getgrgid", "getgrnam", "getpwent",
"getpwnam", "getpwuid", "getrusage", "givens",
"gmtime", "gnuplot_binary", "hess", "ifft",
"ifftn", "inv", "isdebugmode", "issparse", "kron",
"localtime", "lookup", "lsode", "lsode_options",
"lu", "luinc", "luupdate", "matrix_type", "max",
"min", "mktime", "pinv", "qr", "qrdelete",
"qrinsert", "qrshift", "qrupdate", "quad",
"quad_options", "qz", "rand", "rande", "randg",
"randn", "randp", "randperm", "rcond", "regexp",
"regexpi", "regexprep", "schur", "setgrent",
"setpwent", "sort", "spalloc", "sparse", "spparms",
"sprank", "sqrtm", "strfind", "strftime",
"strptime", "strrep", "svd", "svd_driver", "syl",
"symamd", "symbfact", "symrcm", "time", "tsearch",
"typecast", "urlread", "urlwrite", ]
mapping_kw = [ "abs", "acos", "acosh", "acot", "acoth", "acsc",
"acsch", "angle", "arg", "asec", "asech", "asin",
"asinh", "atan", "atanh", "beta", "betainc",
"betaln", "bincoeff", "cbrt", "ceil", "conj", "cos",
"cosh", "cot", "coth", "csc", "csch", "erf", "erfc",
"erfcx", "erfinv", "exp", "finite", "fix", "floor",
"fmod", "gamma", "gammainc", "gammaln", "imag",
"isalnum", "isalpha", "isascii", "iscntrl",
"isdigit", "isfinite", "isgraph", "isinf",
"islower", "isna", "isnan", "isprint", "ispunct",
"isspace", "isupper", "isxdigit", "lcm", "lgamma",
"log", "lower", "mod", "real", "rem", "round",
"roundb", "sec", "sech", "sign", "sin", "sinh",
"sqrt", "tan", "tanh", "toascii", "tolower", "xor",
]
builtin_consts = [ "EDITOR", "EXEC_PATH", "I", "IMAGE_PATH", "NA",
"OCTAVE_HOME", "OCTAVE_VERSION", "PAGER",
"PAGER_FLAGS", "SEEK_CUR", "SEEK_END", "SEEK_SET",
"SIG", "S_ISBLK", "S_ISCHR", "S_ISDIR", "S_ISFIFO",
"S_ISLNK", "S_ISREG", "S_ISSOCK", "WCONTINUE",
"WCOREDUMP", "WEXITSTATUS", "WIFCONTINUED",
"WIFEXITED", "WIFSIGNALED", "WIFSTOPPED", "WNOHANG",
"WSTOPSIG", "WTERMSIG", "WUNTRACED", ]
tokens = {
'root': [
#We should look into multiline comments
(r'[%#].*$', Comment),
(r'^\s*function', Keyword, 'deffunc'),
# from 'iskeyword' on hg changeset 8cc154f45e37
(r'(__FILE__|__LINE__|break|case|catch|classdef|continue|do|else|'
r'elseif|end|end_try_catch|end_unwind_protect|endclassdef|'
r'endevents|endfor|endfunction|endif|endmethods|endproperties|'
r'endswitch|endwhile|events|for|function|get|global|if|methods|'
r'otherwise|persistent|properties|return|set|static|switch|try|'
r'until|unwind_protect|unwind_protect_cleanup|while)\b', Keyword),
("(" + "|".join( builtin_kw + command_kw
+ function_kw + loadable_kw
+ mapping_kw) + r')\b', Name.Builtin),
("(" + "|".join(builtin_consts) + r')\b', Name.Constant),
# operators in Octave but not Matlab:
(r'-=|!=|!|/=|--', Operator),
# operators:
(r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
# operators in Octave but not Matlab requiring escape for re:
(r'\*=|\+=|\^=|\/=|\\=|\*\*|\+\+|\.\*\*',Operator),
# operators requiring escape for re:
(r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
# punctuation:
(r'\[|\]|\(|\)|\{|\}|:|@|\.|,', Punctuation),
(r'=|:|;', Punctuation),
(r'"[^"]*"', String),
(r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
(r'\d+[eEf][+-]?[0-9]+', Number.Float),
(r'\d+', Number.Integer),
# quote can be transpose, instead of string:
# (not great, but handles common cases...)
(r'(?<=[\w\)\]])\'', Operator),
(r'(?<![\w\)\]])\'', String, 'string'),
('[a-zA-Z_][a-zA-Z0-9_]*', Name),
(r'.', Text),
],
'string': [
(r"[^']*'", String, '#pop'),
],
'deffunc': [
(r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
bygroups(Text.Whitespace, Text, Text.Whitespace, Punctuation,
Text.Whitespace, Name.Function, Punctuation, Text,
Punctuation, Text.Whitespace), '#pop'),
],
}
def analyse_text(text):
if re.match('^\s*[%#]', text, re.M): #Comment
return 0.1
class ScilabLexer(RegexLexer):
"""
For Scilab source code.
*New in Pygments 1.5.*
"""
name = 'Scilab'
aliases = ['scilab']
filenames = ['*.sci', '*.sce', '*.tst']
mimetypes = ['text/scilab']
tokens = {
'root': [
(r'//.*?$', Comment.Single),
(r'^\s*function', Keyword, 'deffunc'),
(r'(__FILE__|__LINE__|break|case|catch|classdef|continue|do|else|'
r'elseif|end|end_try_catch|end_unwind_protect|endclassdef|'
r'endevents|endfor|endfunction|endif|endmethods|endproperties|'
r'endswitch|endwhile|events|for|function|get|global|if|methods|'
r'otherwise|persistent|properties|return|set|static|switch|try|'
r'until|unwind_protect|unwind_protect_cleanup|while)\b', Keyword),
("(" + "|".join(_scilab_builtins.functions_kw +
_scilab_builtins.commands_kw +
_scilab_builtins.macros_kw
) + r')\b', Name.Builtin),
(r'(%s)\b' % "|".join(map(re.escape, _scilab_builtins.builtin_consts)),
Name.Constant),
# operators:
(r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
# operators requiring escape for re:
(r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
# punctuation:
(r'[\[\](){}@.,=:;]', Punctuation),
(r'"[^"]*"', String),
# quote can be transpose, instead of string:
# (not great, but handles common cases...)
(r'(?<=[\w\)\]])\'', Operator),
(r'(?<![\w\)\]])\'', String, 'string'),
(r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
(r'\d+[eEf][+-]?[0-9]+', Number.Float),
(r'\d+', Number.Integer),
('[a-zA-Z_][a-zA-Z0-9_]*', Name),
(r'.', Text),
],
'string': [
(r"[^']*'", String, '#pop'),
(r'.', String, '#pop'),
],
'deffunc': [
(r'(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
bygroups(Text.Whitespace, Text, Text.Whitespace, Punctuation,
Text.Whitespace, Name.Function, Punctuation, Text,
Punctuation, Text.Whitespace), '#pop'),
],
}
class NumPyLexer(PythonLexer):
"""
A Python lexer recognizing Numerical Python builtins.
*New in Pygments 0.10.*
"""
name = 'NumPy'
aliases = ['numpy']
# override the mimetypes to not inherit them from python
mimetypes = []
filenames = []
EXTRA_KEYWORDS = set([
'abs', 'absolute', 'accumulate', 'add', 'alen', 'all', 'allclose',
'alltrue', 'alterdot', 'amax', 'amin', 'angle', 'any', 'append',
'apply_along_axis', 'apply_over_axes', 'arange', 'arccos', 'arccosh',
'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'argmax', 'argmin',
'argsort', 'argwhere', 'around', 'array', 'array2string', 'array_equal',
'array_equiv', 'array_repr', 'array_split', 'array_str', 'arrayrange',
'asanyarray', 'asarray', 'asarray_chkfinite', 'ascontiguousarray',
'asfarray', 'asfortranarray', 'asmatrix', 'asscalar', 'astype',
'atleast_1d', 'atleast_2d', 'atleast_3d', 'average', 'bartlett',
'base_repr', 'beta', 'binary_repr', 'bincount', 'binomial',
'bitwise_and', 'bitwise_not', 'bitwise_or', 'bitwise_xor', 'blackman',
'bmat', 'broadcast', 'byte_bounds', 'bytes', 'byteswap', 'c_',
'can_cast', 'ceil', 'choose', 'clip', 'column_stack', 'common_type',
'compare_chararrays', 'compress', 'concatenate', 'conj', 'conjugate',
'convolve', 'copy', 'corrcoef', 'correlate', 'cos', 'cosh', 'cov',
'cross', 'cumprod', 'cumproduct', 'cumsum', 'delete', 'deprecate',
'diag', 'diagflat', 'diagonal', 'diff', 'digitize', 'disp', 'divide',
'dot', 'dsplit', 'dstack', 'dtype', 'dump', 'dumps', 'ediff1d', 'empty',
'empty_like', 'equal', 'exp', 'expand_dims', 'expm1', 'extract', 'eye',
'fabs', 'fastCopyAndTranspose', 'fft', 'fftfreq', 'fftshift', 'fill',
'finfo', 'fix', 'flat', 'flatnonzero', 'flatten', 'fliplr', 'flipud',
'floor', 'floor_divide', 'fmod', 'frexp', 'fromarrays', 'frombuffer',
'fromfile', 'fromfunction', 'fromiter', 'frompyfunc', 'fromstring',
'generic', 'get_array_wrap', 'get_include', 'get_numarray_include',
'get_numpy_include', 'get_printoptions', 'getbuffer', 'getbufsize',
'geterr', 'geterrcall', 'geterrobj', 'getfield', 'gradient', 'greater',
'greater_equal', 'gumbel', 'hamming', 'hanning', 'histogram',
'histogram2d', 'histogramdd', 'hsplit', 'hstack', 'hypot', 'i0',
'identity', 'ifft', 'imag', 'index_exp', 'indices', 'inf', 'info',
'inner', 'insert', 'int_asbuffer', 'interp', 'intersect1d',
'intersect1d_nu', 'inv', 'invert', 'iscomplex', 'iscomplexobj',
'isfinite', 'isfortran', 'isinf', 'isnan', 'isneginf', 'isposinf',
'isreal', 'isrealobj', 'isscalar', 'issctype', 'issubclass_',
'issubdtype', 'issubsctype', 'item', 'itemset', 'iterable', 'ix_',
'kaiser', 'kron', 'ldexp', 'left_shift', 'less', 'less_equal', 'lexsort',
'linspace', 'load', 'loads', 'loadtxt', 'log', 'log10', 'log1p', 'log2',
'logical_and', 'logical_not', 'logical_or', 'logical_xor', 'logspace',
'lstsq', 'mat', 'matrix', 'max', 'maximum', 'maximum_sctype',
'may_share_memory', 'mean', 'median', 'meshgrid', 'mgrid', 'min',
'minimum', 'mintypecode', 'mod', 'modf', 'msort', 'multiply', 'nan',
'nan_to_num', 'nanargmax', 'nanargmin', 'nanmax', 'nanmin', 'nansum',
'ndenumerate', 'ndim', 'ndindex', 'negative', 'newaxis', 'newbuffer',
'newbyteorder', 'nonzero', 'not_equal', 'obj2sctype', 'ogrid', 'ones',
'ones_like', 'outer', 'permutation', 'piecewise', 'pinv', 'pkgload',
'place', 'poisson', 'poly', 'poly1d', 'polyadd', 'polyder', 'polydiv',
'polyfit', 'polyint', 'polymul', 'polysub', 'polyval', 'power', 'prod',
'product', 'ptp', 'put', 'putmask', 'r_', 'randint', 'random_integers',
'random_sample', 'ranf', 'rank', 'ravel', 'real', 'real_if_close',
'recarray', 'reciprocal', 'reduce', 'remainder', 'repeat', 'require',
'reshape', 'resize', 'restoredot', 'right_shift', 'rint', 'roll',
'rollaxis', 'roots', 'rot90', 'round', 'round_', 'row_stack', 's_',
'sample', 'savetxt', 'sctype2char', 'searchsorted', 'seed', 'select',
'set_numeric_ops', 'set_printoptions', 'set_string_function',
'setbufsize', 'setdiff1d', 'seterr', 'seterrcall', 'seterrobj',
'setfield', 'setflags', 'setmember1d', 'setxor1d', 'shape',
'show_config', 'shuffle', 'sign', 'signbit', 'sin', 'sinc', 'sinh',
'size', 'slice', 'solve', 'sometrue', 'sort', 'sort_complex', 'source',
'split', 'sqrt', 'square', 'squeeze', 'standard_normal', 'std',
'subtract', 'sum', 'svd', 'swapaxes', 'take', 'tan', 'tanh', 'tensordot',
'test', 'tile', 'tofile', 'tolist', 'tostring', 'trace', 'transpose',
'trapz', 'tri', 'tril', 'trim_zeros', 'triu', 'true_divide', 'typeDict',
'typename', 'uniform', 'union1d', 'unique', 'unique1d', 'unravel_index',
'unwrap', 'vander', 'var', 'vdot', 'vectorize', 'view', 'vonmises',
'vsplit', 'vstack', 'weibull', 'where', 'who', 'zeros', 'zeros_like'
])
def get_tokens_unprocessed(self, text):
for index, token, value in \
PythonLexer.get_tokens_unprocessed(self, text):
if token is Name and value in self.EXTRA_KEYWORDS:
yield index, Keyword.Pseudo, value
else:
yield index, token, value
class RConsoleLexer(Lexer):
"""
For R console transcripts or R CMD BATCH output files.
"""
name = 'RConsole'
aliases = ['rconsole', 'rout']
filenames = ['*.Rout']
def get_tokens_unprocessed(self, text):
slexer = SLexer(**self.options)
current_code_block = ''
insertions = []
for match in line_re.finditer(text):
line = match.group()
if line.startswith('>') or line.startswith('+'):
# Colorize the prompt as such,
# then put rest of line into current_code_block
insertions.append((len(current_code_block),
[(0, Generic.Prompt, line[:2])]))
current_code_block += line[2:]
else:
# We have reached a non-prompt line!
# If we have stored prompt lines, need to process them first.
if current_code_block:
# Weave together the prompts and highlight code.
for item in do_insertions(insertions,
slexer.get_tokens_unprocessed(current_code_block)):
yield item
# Reset vars for next code block.
current_code_block = ''
insertions = []
# Now process the actual line itself, this is output from R.
yield match.start(), Generic.Output, line
# If we happen to end on a code block with nothing after it, need to
# process the last code block. This is neither elegant nor DRY so
# should be changed.
if current_code_block:
for item in do_insertions(insertions,
slexer.get_tokens_unprocessed(current_code_block)):
yield item
class SLexer(RegexLexer):
"""
For S, S-plus, and R source code.
*New in Pygments 0.10.*
"""
name = 'S'
aliases = ['splus', 's', 'r']
filenames = ['*.S', '*.R', '.Rhistory', '.Rprofile']
mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r',
'text/x-R', 'text/x-r-history', 'text/x-r-profile']
tokens = {
'comments': [
(r'#.*$', Comment.Single),
],
'valid_name': [
(r'[a-zA-Z][0-9a-zA-Z\._]*', Text),
# can begin with ., but not if that is followed by a digit
(r'\.[a-zA-Z_][0-9a-zA-Z\._]*', Text),
],
'punctuation': [
(r'\[{1,2}|\]{1,2}|\(|\)|;|,', Punctuation),
],
'keywords': [
(r'(if|else|for|while|repeat|in|next|break|return|switch|function)'
r'(?![0-9a-zA-Z\._])',
Keyword.Reserved)
],
'operators': [
(r'<<?-|->>?|-|==|<=|>=|<|>|&&?|!=|\|\|?|\?', Operator),
(r'\*|\+|\^|/|!|%[^%]*%|=|~|\$|@|:{1,3}', Operator)
],
'builtin_symbols': [
(r'(NULL|NA(_(integer|real|complex|character)_)?|'
r'Inf|TRUE|FALSE|NaN|\.\.(\.|[0-9]+))'
r'(?![0-9a-zA-Z\._])',
Keyword.Constant),
(r'(T|F)\b', Keyword.Variable),
],
'numbers': [
# hex number
(r'0[xX][a-fA-F0-9]+([pP][0-9]+)?[Li]?', Number.Hex),
# decimal number
(r'[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+)([eE][+-]?[0-9]+)?[Li]?',
Number),
],
'statements': [
include('comments'),
# whitespaces
(r'\s+', Text),
(r'`.*?`', String.Backtick),
(r'\'', String, 'string_squote'),
(r'\"', String, 'string_dquote'),
include('builtin_symbols'),
include('numbers'),
include('keywords'),
include('punctuation'),
include('operators'),
include('valid_name'),
],
'root': [
include('statements'),
# blocks:
(r'\{|\}', Punctuation),
#(r'\{', Punctuation, 'block'),
(r'.', Text),
],
#'block': [
# include('statements'),
# ('\{', Punctuation, '#push'),
# ('\}', Punctuation, '#pop')
#],
'string_squote': [
(r'([^\'\\]|\\.)*\'', String, '#pop'),
],
'string_dquote': [
(r'([^"\\]|\\.)*"', String, '#pop'),
],
}
def analyse_text(text):
return '<-' in text
class BugsLexer(RegexLexer):
"""
Pygments Lexer for `OpenBugs <http://www.openbugs.info/w/>`_ and WinBugs
models.
*New in Pygments 1.6.*
"""
name = 'BUGS'
aliases = ['bugs', 'winbugs', 'openbugs']
filenames = ['*.bug']
_FUNCTIONS = [
# Scalar functions
'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
'cloglog', 'cos', 'cosh', 'cumulative', 'cut', 'density', 'deviance',
'equals', 'expr', 'gammap', 'ilogit', 'icloglog', 'integral', 'log',
'logfact', 'loggam', 'logit', 'max', 'min', 'phi', 'post.p.value',
'pow', 'prior.p.value', 'probit', 'replicate.post', 'replicate.prior',
'round', 'sin', 'sinh', 'solution', 'sqrt', 'step', 'tan', 'tanh',
'trunc',
# Vector functions
'inprod', 'interp.lin', 'inverse', 'logdet', 'mean', 'eigen.vals',
'ode', 'prod', 'p.valueM', 'rank', 'ranked', 'replicate.postM',
'sd', 'sort', 'sum',
## Special
'D', 'I', 'F', 'T', 'C']
""" OpenBUGS built-in functions
From http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAII
This also includes
- T, C, I : Truncation and censoring.
``T`` and ``C`` are in OpenBUGS. ``I`` in WinBUGS.
- D : ODE
- F : Functional http://www.openbugs.info/Examples/Functionals.html
"""
_DISTRIBUTIONS = ['dbern', 'dbin', 'dcat', 'dnegbin', 'dpois',
'dhyper', 'dbeta', 'dchisqr', 'ddexp', 'dexp',
'dflat', 'dgamma', 'dgev', 'df', 'dggamma', 'dgpar',
'dloglik', 'dlnorm', 'dlogis', 'dnorm', 'dpar',
'dt', 'dunif', 'dweib', 'dmulti', 'ddirch', 'dmnorm',
'dmt', 'dwish']
""" OpenBUGS built-in distributions
Functions from
http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAI
"""
tokens = {
'whitespace' : [
(r"\s+", Text),
],
'comments' : [
# Comments
(r'#.*$', Comment.Single),
],
'root': [
# Comments
include('comments'),
include('whitespace'),
# Block start
(r'(model)(\s+)({)',
bygroups(Keyword.Namespace, Text, Punctuation)),
# Reserved Words
(r'(for|in)(?![0-9a-zA-Z\._])', Keyword.Reserved),
# Built-in Functions
(r'(%s)(?=\s*\()'
% r'|'.join(_FUNCTIONS + _DISTRIBUTIONS),
Name.Builtin),
# Regular variable names
(r'[A-Za-z][A-Za-z0-9_.]*', Name),
# Number Literals
(r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
# Punctuation
(r'\[|\]|\(|\)|:|,|;', Punctuation),
# Assignment operators
# SLexer makes these tokens Operators.
(r'<-|~', Operator),
# Infix and prefix operators
(r'\+|-|\*|/', Operator),
# Block
(r'[{}]', Punctuation),
]
}
def analyse_text(text):
if re.search(r"^\s*model\s*{", text, re.M):
return 0.7
else:
return 0.0
class JagsLexer(RegexLexer):
"""
Pygments Lexer for JAGS.
*New in Pygments 1.6.*
"""
name = 'JAGS'
aliases = ['jags']
filenames = ['*.jag', '*.bug']
## JAGS
_FUNCTIONS = [
'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
'cos', 'cosh', 'cloglog',
'equals', 'exp', 'icloglog', 'ifelse', 'ilogit', 'log', 'logfact',
'loggam', 'logit', 'phi', 'pow', 'probit', 'round', 'sin', 'sinh',
'sqrt', 'step', 'tan', 'tanh', 'trunc', 'inprod', 'interp.lin',
'logdet', 'max', 'mean', 'min', 'prod', 'sum', 'sd', 'inverse',
'rank', 'sort', 't', 'acos', 'acosh', 'asin', 'asinh', 'atan',
# Truncation/Censoring (should I include)
'T', 'I']
# Distributions with density, probability and quartile functions
_DISTRIBUTIONS = ['[dpq]%s' % x for x in
['bern', 'beta', 'dchiqsqr', 'ddexp', 'dexp',
'df', 'gamma', 'gen.gamma', 'logis', 'lnorm',
'negbin', 'nchisqr', 'norm', 'par', 'pois', 'weib']]
# Other distributions without density and probability
_OTHER_DISTRIBUTIONS = [
'dt', 'dunif', 'dbetabin', 'dbern', 'dbin', 'dcat', 'dhyper',
'ddirch', 'dmnorm', 'dwish', 'dmt', 'dmulti', 'dbinom', 'dchisq',
'dnbinom', 'dweibull', 'ddirich']
tokens = {
'whitespace' : [
(r"\s+", Text),
],
'names' : [
# Regular variable names
(r'[a-zA-Z][a-zA-Z0-9_.]*\b', Name),
],
'comments' : [
# do not use stateful comments
(r'(?s)/\*.*?\*/', Comment.Multiline),
# Comments
(r'#.*$', Comment.Single),
],
'root': [
# Comments
include('comments'),
include('whitespace'),
# Block start
(r'(model|data)(\s+)({)',
bygroups(Keyword.Namespace, Text, Punctuation)),
(r'var(?![0-9a-zA-Z\._])', Keyword.Declaration),
# Reserved Words
(r'(for|in)(?![0-9a-zA-Z\._])', Keyword.Reserved),
# Builtins
# Need to use lookahead because . is a valid char
(r'(%s)(?=\s*\()' % r'|'.join(_FUNCTIONS
+ _DISTRIBUTIONS
+ _OTHER_DISTRIBUTIONS),
Name.Builtin),
# Names
include('names'),
# Number Literals
(r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
(r'\[|\]|\(|\)|:|,|;', Punctuation),
# Assignment operators
(r'<-|~', Operator),
# # JAGS includes many more than OpenBUGS
(r'\+|-|\*|\/|\|\|[&]{2}|[<>=]=?|\^|%.*?%', Operator),
(r'[{}]', Punctuation),
]
}
def analyse_text(text):
if re.search(r'^\s*model\s*\{', text, re.M):
if re.search(r'^\s*data\s*\{', text, re.M):
return 0.9
elif re.search(r'^\s*var', text, re.M):
return 0.9
else:
return 0.3
else:
return 0
class StanLexer(RegexLexer):
"""
Pygments Lexer for Stan models.
*New in Pygments 1.6.*
"""
name = 'Stan'
aliases = ['stan']
filenames = ['*.stan']
_RESERVED = ('for', 'in', 'while', 'repeat', 'until', 'if',
'then', 'else', 'true', 'false', 'T',
'lower', 'upper', 'print')
_TYPES = ('int', 'real', 'vector', 'simplex', 'ordered', 'row_vector',
'matrix', 'corr_matrix', 'cov_matrix', 'positive_ordered')
tokens = {
'whitespace' : [
(r"\s+", Text),
],
'comments' : [
(r'(?s)/\*.*?\*/', Comment.Multiline),
# Comments
(r'(//|#).*$', Comment.Single),
],
'root': [
# Stan is more restrictive on strings than this regex
(r'"[^"]*"', String),
# Comments
include('comments'),
# block start
include('whitespace'),
# Block start
(r'(%s)(\s*)({)' %
r'|'.join(('data', r'transformed\s+?data',
'parameters', r'transformed\s+parameters',
'model', r'generated\s+quantities')),
bygroups(Keyword.Namespace, Text, Punctuation)),
# Reserved Words
(r'(%s)\b' % r'|'.join(_RESERVED), Keyword.Reserved),
# Data types
(r'(%s)\b' % r'|'.join(_TYPES), Keyword.Type),
# Punctuation
(r"[;:,\[\]()<>]", Punctuation),
# Builtin
(r'(%s)(?=\s*\()'
% r'|'.join(_stan_builtins.FUNCTIONS
+ _stan_builtins.DISTRIBUTIONS),
Name.Builtin),
(r'(%s)(?=\s*\()'
% r'|'.join(_stan_builtins.CONSTANTS), Keyword.Constant),
# Special names ending in __, like lp__
(r'[A-Za-z][A-Za-z0-9_]*__\b', Name.Builtin.Pseudo),
# Regular variable names
(r'[A-Za-z][A-Za-z0-9_]*\b', Name),
# Real Literals
(r'-?[0-9]+(\.[0-9]+)?[eE]-?[0-9]+', Number.Float),
(r'-?[0-9]*\.[0-9]*', Number.Float),
# Integer Literals
(r'-?[0-9]+', Number.Integer),
# Assignment operators
# SLexer makes these tokens Operators.
(r'<-|~', Operator),
# Infix and prefix operators (and = )
(r"\+|-|\.?\*|\.?/|\\|'|=", Operator),
# Block delimiters
(r'[{}]', Punctuation),
]
}
def analyse_text(text):
if re.search(r'^\s*parameters\s*\{', text, re.M):
return 1.0
else:
return 0.0
class IDLLexer(RegexLexer):
"""
Pygments Lexer for IDL (Interactive Data Language).
*New in Pygments 1.6.*
"""
name = 'IDL'
aliases = ['idl']
filenames = ['*.pro']
mimetypes = ['text/idl']
_RESERVED = ['and', 'begin', 'break', 'case', 'common', 'compile_opt',
'continue', 'do', 'else', 'end', 'endcase', 'elseelse',
'endfor', 'endforeach', 'endif', 'endrep', 'endswitch',
'endwhile', 'eq', 'for', 'foreach', 'forward_function',
'function', 'ge', 'goto', 'gt', 'if', 'inherits', 'le',
'lt', 'mod', 'ne', 'not', 'of', 'on_ioerror', 'or', 'pro',
'repeat', 'switch', 'then', 'until', 'while', 'xor']
"""Reserved words from: http://www.exelisvis.com/docs/reswords.html"""
_BUILTIN_LIB = ['abs', 'acos', 'adapt_hist_equal', 'alog', 'alog10',
'amoeba', 'annotate', 'app_user_dir', 'app_user_dir_query',
'arg_present', 'array_equal', 'array_indices', 'arrow',
'ascii_template', 'asin', 'assoc', 'atan', 'axis',
'a_correlate', 'bandpass_filter', 'bandreject_filter',
'barplot', 'bar_plot', 'beseli', 'beselj', 'beselk',
'besely', 'beta', 'bilinear', 'binary_template', 'bindgen',
'binomial', 'bin_date', 'bit_ffs', 'bit_population',
'blas_axpy', 'blk_con', 'box_cursor', 'breakpoint',
'broyden', 'butterworth', 'bytarr', 'byte', 'byteorder',
'bytscl', 'caldat', 'calendar', 'call_external',
'call_function', 'call_method', 'call_procedure', 'canny',
'catch', 'cd', 'cdf_[0-9a-za-z_]*', 'ceil', 'chebyshev',
'check_math',
'chisqr_cvf', 'chisqr_pdf', 'choldc', 'cholsol', 'cindgen',
'cir_3pnt', 'close', 'cluster', 'cluster_tree', 'clust_wts',
'cmyk_convert', 'colorbar', 'colorize_sample',
'colormap_applicable', 'colormap_gradient',
'colormap_rotation', 'colortable', 'color_convert',
'color_exchange', 'color_quan', 'color_range_map', 'comfit',
'command_line_args', 'complex', 'complexarr', 'complexround',
'compute_mesh_normals', 'cond', 'congrid', 'conj',
'constrained_min', 'contour', 'convert_coord', 'convol',
'convol_fft', 'coord2to3', 'copy_lun', 'correlate', 'cos',
'cosh', 'cpu', 'cramer', 'create_cursor', 'create_struct',
'create_view', 'crossp', 'crvlength', 'cti_test',
'ct_luminance', 'cursor', 'curvefit', 'cvttobm', 'cv_coord',
'cw_animate', 'cw_animate_getp', 'cw_animate_load',
'cw_animate_run', 'cw_arcball', 'cw_bgroup', 'cw_clr_index',
'cw_colorsel', 'cw_defroi', 'cw_field', 'cw_filesel',
'cw_form', 'cw_fslider', 'cw_light_editor',
'cw_light_editor_get', 'cw_light_editor_set', 'cw_orient',
'cw_palette_editor', 'cw_palette_editor_get',
'cw_palette_editor_set', 'cw_pdmenu', 'cw_rgbslider',
'cw_tmpl', 'cw_zoom', 'c_correlate', 'dblarr', 'db_exists',
'dcindgen', 'dcomplex', 'dcomplexarr', 'define_key',
'define_msgblk', 'define_msgblk_from_file', 'defroi',
'defsysv', 'delvar', 'dendrogram', 'dendro_plot', 'deriv',
'derivsig', 'determ', 'device', 'dfpmin', 'diag_matrix',
'dialog_dbconnect', 'dialog_message', 'dialog_pickfile',
'dialog_printersetup', 'dialog_printjob',
'dialog_read_image', 'dialog_write_image', 'digital_filter',
'dilate', 'dindgen', 'dissolve', 'dist', 'distance_measure',
'dlm_load', 'dlm_register', 'doc_library', 'double',
'draw_roi', 'edge_dog', 'efont', 'eigenql', 'eigenvec',
'ellipse', 'elmhes', 'emboss', 'empty', 'enable_sysrtn',
'eof', 'eos_[0-9a-za-z_]*', 'erase', 'erf', 'erfc', 'erfcx',
'erode', 'errorplot', 'errplot', 'estimator_filter',
'execute', 'exit', 'exp', 'expand', 'expand_path', 'expint',
'extrac', 'extract_slice', 'factorial', 'fft', 'filepath',
'file_basename', 'file_chmod', 'file_copy', 'file_delete',
'file_dirname', 'file_expand_path', 'file_info',
'file_lines', 'file_link', 'file_mkdir', 'file_move',
'file_poll_input', 'file_readlink', 'file_same',
'file_search', 'file_test', 'file_which', 'findgen',
'finite', 'fix', 'flick', 'float', 'floor', 'flow3',
'fltarr', 'flush', 'format_axis_values', 'free_lun',
'fstat', 'fulstr', 'funct', 'fv_test', 'fx_root',
'fz_roots', 'f_cvf', 'f_pdf', 'gamma', 'gamma_ct',
'gauss2dfit', 'gaussfit', 'gaussian_function', 'gaussint',
'gauss_cvf', 'gauss_pdf', 'gauss_smooth', 'getenv',
'getwindows', 'get_drive_list', 'get_dxf_objects',
'get_kbrd', 'get_login_info', 'get_lun', 'get_screen_size',
'greg2jul', 'grib_[0-9a-za-z_]*', 'grid3', 'griddata',
'grid_input', 'grid_tps', 'gs_iter',
'h5[adfgirst]_[0-9a-za-z_]*', 'h5_browser', 'h5_close',
'h5_create', 'h5_get_libversion', 'h5_open', 'h5_parse',
'hanning', 'hash', 'hdf_[0-9a-za-z_]*', 'heap_free',
'heap_gc', 'heap_nosave', 'heap_refcount', 'heap_save',
'help', 'hilbert', 'histogram', 'hist_2d', 'hist_equal',
'hls', 'hough', 'hqr', 'hsv', 'h_eq_ct', 'h_eq_int',
'i18n_multibytetoutf8', 'i18n_multibytetowidechar',
'i18n_utf8tomultibyte', 'i18n_widechartomultibyte',
'ibeta', 'icontour', 'iconvertcoord', 'idelete', 'identity',
'idlexbr_assistant', 'idlitsys_createtool', 'idl_base64',
'idl_validname', 'iellipse', 'igamma', 'igetcurrent',
'igetdata', 'igetid', 'igetproperty', 'iimage', 'image',
'image_cont', 'image_statistics', 'imaginary', 'imap',
'indgen', 'intarr', 'interpol', 'interpolate',
'interval_volume', 'int_2d', 'int_3d', 'int_tabulated',
'invert', 'ioctl', 'iopen', 'iplot', 'ipolygon',
'ipolyline', 'iputdata', 'iregister', 'ireset', 'iresolve',
'irotate', 'ir_filter', 'isa', 'isave', 'iscale',
'isetcurrent', 'isetproperty', 'ishft', 'isocontour',
'isosurface', 'isurface', 'itext', 'itranslate', 'ivector',
'ivolume', 'izoom', 'i_beta', 'journal', 'json_parse',
'json_serialize', 'jul2greg', 'julday', 'keyword_set',
'krig2d', 'kurtosis', 'kw_test', 'l64indgen', 'label_date',
'label_region', 'ladfit', 'laguerre', 'laplacian',
'la_choldc', 'la_cholmprove', 'la_cholsol', 'la_determ',
'la_eigenproblem', 'la_eigenql', 'la_eigenvec', 'la_elmhes',
'la_gm_linear_model', 'la_hqr', 'la_invert',
'la_least_squares', 'la_least_square_equality',
'la_linear_equation', 'la_ludc', 'la_lumprove', 'la_lusol',
'la_svd', 'la_tridc', 'la_trimprove', 'la_triql',
'la_trired', 'la_trisol', 'least_squares_filter', 'leefilt',
'legend', 'legendre', 'linbcg', 'lindgen', 'linfit',
'linkimage', 'list', 'll_arc_distance', 'lmfit', 'lmgr',
'lngamma', 'lnp_test', 'loadct', 'locale_get',
'logical_and', 'logical_or', 'logical_true', 'lon64arr',
'lonarr', 'long', 'long64', 'lsode', 'ludc', 'lumprove',
'lusol', 'lu_complex', 'machar', 'make_array', 'make_dll',
'make_rt', 'map', 'mapcontinents', 'mapgrid', 'map_2points',
'map_continents', 'map_grid', 'map_image', 'map_patch',
'map_proj_forward', 'map_proj_image', 'map_proj_info',
'map_proj_init', 'map_proj_inverse', 'map_set',
'matrix_multiply', 'matrix_power', 'max', 'md_test',
'mean', 'meanabsdev', 'mean_filter', 'median', 'memory',
'mesh_clip', 'mesh_decimate', 'mesh_issolid', 'mesh_merge',
'mesh_numtriangles', 'mesh_obj', 'mesh_smooth',
'mesh_surfacearea', 'mesh_validate', 'mesh_volume',
'message', 'min', 'min_curve_surf', 'mk_html_help',
'modifyct', 'moment', 'morph_close', 'morph_distance',
'morph_gradient', 'morph_hitormiss', 'morph_open',
'morph_thin', 'morph_tophat', 'multi', 'm_correlate',
'ncdf_[0-9a-za-z_]*', 'newton', 'noise_hurl', 'noise_pick',
'noise_scatter', 'noise_slur', 'norm', 'n_elements',
'n_params', 'n_tags', 'objarr', 'obj_class', 'obj_destroy',
'obj_hasmethod', 'obj_isa', 'obj_new', 'obj_valid',
'online_help', 'on_error', 'open', 'oplot', 'oploterr',
'parse_url', 'particle_trace', 'path_cache', 'path_sep',
'pcomp', 'plot', 'plot3d', 'ploterr', 'plots', 'plot_3dbox',
'plot_field', 'pnt_line', 'point_lun', 'polarplot',
'polar_contour', 'polar_surface', 'poly', 'polyfill',
'polyfillv', 'polygon', 'polyline', 'polyshade', 'polywarp',
'poly_2d', 'poly_area', 'poly_fit', 'popd', 'powell',
'pref_commit', 'pref_get', 'pref_set', 'prewitt', 'primes',
'print', 'printd', 'product', 'profile', 'profiler',
'profiles', 'project_vol', 'psafm', 'pseudo',
'ps_show_fonts', 'ptrarr', 'ptr_free', 'ptr_new',
'ptr_valid', 'pushd', 'p_correlate', 'qgrid3', 'qhull',
'qromb', 'qromo', 'qsimp', 'query_ascii', 'query_bmp',
'query_csv', 'query_dicom', 'query_gif', 'query_image',
'query_jpeg', 'query_jpeg2000', 'query_mrsid', 'query_pict',
'query_png', 'query_ppm', 'query_srf', 'query_tiff',
'query_wav', 'radon', 'randomn', 'randomu', 'ranks',
'rdpix', 'read', 'reads', 'readu', 'read_ascii',
'read_binary', 'read_bmp', 'read_csv', 'read_dicom',
'read_gif', 'read_image', 'read_interfile', 'read_jpeg',
'read_jpeg2000', 'read_mrsid', 'read_pict', 'read_png',
'read_ppm', 'read_spr', 'read_srf', 'read_sylk',
'read_tiff', 'read_wav', 'read_wave', 'read_x11_bitmap',
'read_xwd', 'real_part', 'rebin', 'recall_commands',
'recon3', 'reduce_colors', 'reform', 'region_grow',
'register_cursor', 'regress', 'replicate',
'replicate_inplace', 'resolve_all', 'resolve_routine',
'restore', 'retall', 'return', 'reverse', 'rk4', 'roberts',
'rot', 'rotate', 'round', 'routine_filepath',
'routine_info', 'rs_test', 'r_correlate', 'r_test',
'save', 'savgol', 'scale3', 'scale3d', 'scope_level',
'scope_traceback', 'scope_varfetch', 'scope_varname',
'search2d', 'search3d', 'sem_create', 'sem_delete',
'sem_lock', 'sem_release', 'setenv', 'set_plot',
'set_shading', 'sfit', 'shade_surf', 'shade_surf_irr',
'shade_volume', 'shift', 'shift_diff', 'shmdebug', 'shmmap',
'shmunmap', 'shmvar', 'show3', 'showfont', 'simplex', 'sin',
'sindgen', 'sinh', 'size', 'skewness', 'skip_lun',
'slicer3', 'slide_image', 'smooth', 'sobel', 'socket',
'sort', 'spawn', 'spher_harm', 'sph_4pnt', 'sph_scat',
'spline', 'spline_p', 'spl_init', 'spl_interp', 'sprsab',
'sprsax', 'sprsin', 'sprstp', 'sqrt', 'standardize',
'stddev', 'stop', 'strarr', 'strcmp', 'strcompress',
'streamline', 'stregex', 'stretch', 'string', 'strjoin',
'strlen', 'strlowcase', 'strmatch', 'strmessage', 'strmid',
'strpos', 'strput', 'strsplit', 'strtrim', 'struct_assign',
'struct_hide', 'strupcase', 'surface', 'surfr', 'svdc',
'svdfit', 'svsol', 'swap_endian', 'swap_endian_inplace',
'symbol', 'systime', 's_test', 't3d', 'tag_names', 'tan',
'tanh', 'tek_color', 'temporary', 'tetra_clip',
'tetra_surface', 'tetra_volume', 'text', 'thin', 'threed',
'timegen', 'time_test2', 'tm_test', 'total', 'trace',
'transpose', 'triangulate', 'trigrid', 'triql', 'trired',
'trisol', 'tri_surf', 'truncate_lun', 'ts_coef', 'ts_diff',
'ts_fcast', 'ts_smooth', 'tv', 'tvcrs', 'tvlct', 'tvrd',
'tvscl', 'typename', 't_cvt', 't_pdf', 'uindgen', 'uint',
'uintarr', 'ul64indgen', 'ulindgen', 'ulon64arr', 'ulonarr',
'ulong', 'ulong64', 'uniq', 'unsharp_mask', 'usersym',
'value_locate', 'variance', 'vector', 'vector_field', 'vel',
'velovect', 'vert_t3d', 'voigt', 'voronoi', 'voxel_proj',
'wait', 'warp_tri', 'watershed', 'wdelete', 'wf_draw',
'where', 'widget_base', 'widget_button', 'widget_combobox',
'widget_control', 'widget_displaycontextmen', 'widget_draw',
'widget_droplist', 'widget_event', 'widget_info',
'widget_label', 'widget_list', 'widget_propertysheet',
'widget_slider', 'widget_tab', 'widget_table',
'widget_text', 'widget_tree', 'widget_tree_move',
'widget_window', 'wiener_filter', 'window', 'writeu',
'write_bmp', 'write_csv', 'write_gif', 'write_image',
'write_jpeg', 'write_jpeg2000', 'write_nrif', 'write_pict',
'write_png', 'write_ppm', 'write_spr', 'write_srf',
'write_sylk', 'write_tiff', 'write_wav', 'write_wave',
'wset', 'wshow', 'wtn', 'wv_applet', 'wv_cwt',
'wv_cw_wavelet', 'wv_denoise', 'wv_dwt', 'wv_fn_coiflet',
'wv_fn_daubechies', 'wv_fn_gaussian', 'wv_fn_haar',
'wv_fn_morlet', 'wv_fn_paul', 'wv_fn_symlet',
'wv_import_data', 'wv_import_wavelet', 'wv_plot3d_wps',
'wv_plot_multires', 'wv_pwt', 'wv_tool_denoise',
'xbm_edit', 'xdisplayfile', 'xdxf', 'xfont',
'xinteranimate', 'xloadct', 'xmanager', 'xmng_tmpl',
'xmtool', 'xobjview', 'xobjview_rotate',
'xobjview_write_image', 'xpalette', 'xpcolor', 'xplot3d',
'xregistered', 'xroi', 'xsq_test', 'xsurface', 'xvaredit',
'xvolume', 'xvolume_rotate', 'xvolume_write_image',
'xyouts', 'zoom', 'zoom_24']
"""Functions from: http://www.exelisvis.com/docs/routines-1.html"""
tokens = {
'root': [
(r'^\s*;.*?\n', Comment.Singleline),
(r'\b(' + '|'.join(_RESERVED) + r')\b', Keyword),
(r'\b(' + '|'.join(_BUILTIN_LIB) + r')\b', Name.Builtin),
(r'\+=|-=|\^=|\*=|/=|#=|##=|<=|>=|=', Operator),
(r'\+\+|--|->|\+|-|##|#|\*|/|<|>|&&|\^|~|\|\|\?|:', Operator),
(r'\b(mod=|lt=|le=|eq=|ne=|ge=|gt=|not=|and=|or=|xor=)', Operator),
(r'\b(mod|lt|le|eq|ne|ge|gt|not|and|or|xor)\b', Operator),
(r'\b[0-9](L|B|S|UL|ULL|LL)?\b', Number),
(r'.', Text),
]
}
class RdLexer(RegexLexer):
"""
Pygments Lexer for R documentation (Rd) files
This is a very minimal implementation, highlighting little more
than the macros. A description of Rd syntax is found in `Writing R
Extensions <http://cran.r-project.org/doc/manuals/R-exts.html>`_
and `Parsing Rd files <developer.r-project.org/parseRd.pdf>`_.
*New in Pygments 1.6.*
"""
name = 'Rd'
aliases = ['rd']
filenames = ['*.Rd']
mimetypes = ['text/x-r-doc']
# To account for verbatim / LaTeX-like / and R-like areas
# would require parsing.
tokens = {
'root' : [
# catch escaped brackets and percent sign
(r'\\[\\{}%]', String.Escape),
# comments
(r'%.*$', Comment),
# special macros with no arguments
(r'\\(?:cr|l?dots|R|tab)\b', Keyword.Constant),
# macros
(r'\\[a-zA-Z]+\b', Keyword),
# special preprocessor macros
(r'^\s*#(?:ifn?def|endif).*\b', Comment.Preproc),
# non-escaped brackets
(r'[{}]', Name.Builtin),
# everything else
(r'[^\\%\n{}]+', Text),
(r'.', Text),
]
}
| agpl-3.0 |
ashellwig/Ethereum-Mutual-Fund | tests/scenarios/fuel/run.py | 4 | 1429 | import random
from utils import constrained_sum_sample_pos, arr_str
scenario_description = (
"During the fueling period of the DAO, send enough ether from all "
"accounts to create tokens and then assert that the user's balance is "
"indeed correct and that the minimum fueling goal has been reached"
)
def run(ctx):
ctx.assert_scenario_ran('deploy')
creation_secs = ctx.remaining_time()
ctx.total_supply = (
ctx.args.deploy_min_tokens_to_create + random.randint(1, 100)
)
ctx.token_amounts = constrained_sum_sample_pos(
len(ctx.accounts), ctx.total_supply
)
ctx.create_js_file(substitutions={
"dao_abi": ctx.dao_abi,
"dao_address": ctx.dao_address,
"wait_ms": (creation_secs-3)*1000,
"amounts": arr_str(ctx.token_amounts)
}
)
print(
"Notice: Fueling period is {} seconds so the test will wait "
"as much".format(creation_secs)
)
adjusted_amounts = (
[x/1.5 for x in ctx.token_amounts]
if ctx.scenario_uses_extrabalance() else ctx.token_amounts
)
adjusted_supply = (
ctx.total_supply / 1.5
if ctx.scenario_uses_extrabalance() else ctx.total_supply
)
ctx.execute(expected={
"dao_fueled": True,
"total_supply": adjusted_supply,
"balances": adjusted_amounts,
"user0_after": adjusted_amounts[0]
})
| lgpl-3.0 |
wxiang7/airflow | airflow/hooks/oracle_hook.py | 3 | 3932 | import cx_Oracle
from airflow.hooks.dbapi_hook import DbApiHook
from builtins import str
from past.builtins import basestring
from datetime import datetime
import numpy
import logging
class OracleHook(DbApiHook):
"""
Interact with Oracle SQL.
"""
conn_name_attr = 'oracle_conn_id'
default_conn_name = 'oracle_default'
supports_autocommit = False
def get_conn(self):
"""
Returns a oracle connection object
Optional parameters for using a custom DSN connection (instead of using a server alias from tnsnames.ora)
The dsn (data source name) is the TNS entry (from the Oracle names server or tnsnames.ora file)
or is a string like the one returned from makedsn().
:param dsn: the host address for the Oracle server
:param service_name: the db_unique_name of the database that you are connecting to (CONNECT_DATA part of TNS)
You can set these parameters in the extra fields of your connection
as in ``{ "dsn":"some.host.address" , "service_name":"some.service.name" }``
"""
conn = self.get_connection(self.oracle_conn_id)
dsn = conn.extra_dejson.get('dsn', None)
sid = conn.extra_dejson.get('sid', None)
service_name = conn.extra_dejson.get('service_name', None)
if dsn and sid and not service_name:
dsn = cx_Oracle.makedsn(dsn, conn.port, sid)
conn = cx_Oracle.connect(conn.login, conn.password, dsn=dsn)
elif dsn and service_name and not sid:
dsn = cx_Oracle.makedsn(dsn, conn.port, service_name=service_name)
conn = cx_Oracle.connect(conn.login, conn.password, dsn=dsn)
else:
conn = cx_Oracle.connect(conn.login, conn.password, conn.host)
return conn
def insert_rows(self, table, rows, target_fields = None, commit_every = 1000):
"""
A generic way to insert a set of tuples into a table,
the whole set of inserts is treated as one transaction
Changes from standard DbApiHook implementation:
- Oracle SQL queries in cx_Oracle can not be terminated with a semicolon (';')
- Replace NaN values with NULL using numpy.nan_to_num (not using is_nan() because of input types error for strings)
- Coerce datetime cells to Oracle DATETIME format during insert
"""
if target_fields:
target_fields = ', '.join(target_fields)
target_fields = '({})'.format(target_fields)
else:
target_fields = ''
conn = self.get_conn()
cur = conn.cursor()
if self.supports_autocommit:
cur.execute('SET autocommit = 0')
conn.commit()
i = 0
for row in rows:
i += 1
l = []
for cell in row:
if isinstance(cell, basestring):
l.append("'" + str(cell).replace("'", "''") + "'")
elif cell is None:
l.append('NULL')
elif type(cell) == float and numpy.isnan(cell): #coerce numpy NaN to NULL
l.append('NULL')
elif isinstance(cell, numpy.datetime64):
l.append("'" + str(cell) + "'")
elif isinstance(cell, datetime):
l.append("to_date('" + cell.strftime('%Y-%m-%d %H:%M:%S') + "','YYYY-MM-DD HH24:MI:SS')")
else:
l.append(str(cell))
values = tuple(l)
sql = 'INSERT /*+ APPEND */ INTO {0} {1} VALUES ({2})'.format(table, target_fields, ','.join(values))
cur.execute(sql)
if i % commit_every == 0:
conn.commit()
logging.info('Loaded {i} into {table} rows so far'.format(**locals()))
conn.commit()
cur.close()
conn.close()
logging.info('Done loading. Loaded a total of {i} rows'.format(**locals()))
| apache-2.0 |
loopCM/chromium | build/linux/unbundle/replace_gyp_files.py | 1 | 2163 | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Replaces gyp files in tree with files from here that
make the build use system libraries.
"""
import os.path
import shutil
import sys
REPLACEMENTS = {
'use_system_expat': 'third_party/expat/expat.gyp',
'use_system_ffmpeg': 'third_party/ffmpeg/ffmpeg.gyp',
'use_system_flac': 'third_party/flac/flac.gyp',
'use_system_harfbuzz': 'third_party/harfbuzz-ng/harfbuzz.gyp',
'use_system_icu': 'third_party/icu/icu.gyp',
'use_system_jsoncpp': 'third_party/jsoncpp/jsoncpp.gyp',
'use_system_libevent': 'third_party/libevent/libevent.gyp',
'use_system_libjpeg': 'third_party/libjpeg/libjpeg.gyp',
'use_system_libpng': 'third_party/libpng/libpng.gyp',
'use_system_libusb': 'third_party/libusb/libusb.gyp',
'use_system_libvpx': 'third_party/libvpx/libvpx.gyp',
'use_system_libwebp': 'third_party/libwebp/libwebp.gyp',
'use_system_libxml': 'third_party/libxml/libxml.gyp',
'use_system_libxslt': 'third_party/libxslt/libxslt.gyp',
'use_system_opus': 'third_party/opus/opus.gyp',
'use_system_re2': 'third_party/re2/re2.gyp',
'use_system_snappy': 'third_party/snappy/snappy.gyp',
'use_system_speex': 'third_party/speex/speex.gyp',
'use_system_sqlite': 'third_party/sqlite/sqlite.gyp',
'use_system_v8': 'v8/tools/gyp/v8.gyp',
'use_system_zlib': 'third_party/zlib/zlib.gyp',
}
def DoMain(argv):
my_dirname = os.path.dirname(__file__)
source_tree_root = os.path.abspath(
os.path.join(my_dirname, '..', '..', '..'))
for flag, path in REPLACEMENTS.items():
# Accept arguments in gyp command-line syntax, and ignore other
# parameters, so that the caller can re-use command-line for this
# script and gyp.
if '-D%s=1' % flag not in argv:
continue
# Copy the gyp file from directory of this script to target path.
shutil.copyfile(os.path.join(my_dirname, os.path.basename(path)),
os.path.join(source_tree_root, path))
return 0
if __name__ == '__main__':
sys.exit(DoMain(sys.argv))
| bsd-3-clause |
atosatto/ansible | lib/ansible/modules/identity/ipa/ipa_role.py | 42 | 11140 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ipa_role
author: Thomas Krahn (@Nosmoht)
short_description: Manage FreeIPA role
description:
- Add, modify and delete a role within FreeIPA server using FreeIPA API
options:
cn:
description:
- Role name.
- Can not be changed as it is the unique identifier.
required: true
aliases: ['name']
description:
description:
- A description of this role-group.
required: false
group:
description:
- List of group names assign to this role.
- If an empty list is passed all assigned groups will be unassigned from the role.
- If option is omitted groups will not be checked or changed.
- If option is passed all assigned groups that are not passed will be unassigned from the role.
host:
description:
- List of host names to assign.
- If an empty list is passed all assigned hosts will be unassigned from the role.
- If option is omitted hosts will not be checked or changed.
- If option is passed all assigned hosts that are not passed will be unassigned from the role.
required: false
hostgroup:
description:
- List of host group names to assign.
- If an empty list is passed all assigned host groups will be removed from the role.
- If option is omitted host groups will not be checked or changed.
- If option is passed all assigned hostgroups that are not passed will be unassigned from the role.
required: false
service:
description:
- List of service names to assign.
- If an empty list is passed all assigned services will be removed from the role.
- If option is omitted services will not be checked or changed.
- If option is passed all assigned services that are not passed will be removed from the role.
required: false
state:
description: State to ensure
required: false
default: "present"
choices: ["present", "absent"]
user:
description:
- List of user names to assign.
- If an empty list is passed all assigned users will be removed from the role.
- If option is omitted users will not be checked or changed.
required: false
ipa_port:
description: Port of IPA server
required: false
default: 443
ipa_host:
description: IP or hostname of IPA server
required: false
default: "ipa.example.com"
ipa_user:
description: Administrative account used on IPA server
required: false
default: "admin"
ipa_pass:
description: Password of administrative user
required: true
ipa_prot:
description: Protocol used by IPA server
required: false
default: "https"
choices: ["http", "https"]
validate_certs:
description:
- This only applies if C(ipa_prot) is I(https).
- If set to C(no), the SSL certificates will not be validated.
- This should only set to C(no) used on personally controlled sites using self-signed certificates.
required: false
default: true
version_added: "2.3"
'''
EXAMPLES = '''
# Ensure role is present
- ipa_role:
name: dba
description: Database Administrators
state: present
user:
- pinky
- brain
ipa_host: ipa.example.com
ipa_user: admin
ipa_pass: topsecret
# Ensure role with certain details
- ipa_role:
name: another-role
description: Just another role
group:
- editors
host:
- host01.example.com
hostgroup:
- hostgroup01
service:
- service01
# Ensure role is absent
- ipa_role:
name: dba
state: absent
ipa_host: ipa.example.com
ipa_user: admin
ipa_pass: topsecret
'''
RETURN = '''
role:
description: Role as returned by IPA API.
returned: always
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
from ansible.module_utils.ipa import IPAClient
class RoleIPAClient(IPAClient):
def __init__(self, module, host, port, protocol):
super(RoleIPAClient, self).__init__(module, host, port, protocol)
def role_find(self, name):
return self._post_json(method='role_find', name=None, item={'all': True, 'cn': name})
def role_add(self, name, item):
return self._post_json(method='role_add', name=name, item=item)
def role_mod(self, name, item):
return self._post_json(method='role_mod', name=name, item=item)
def role_del(self, name):
return self._post_json(method='role_del', name=name)
def role_add_member(self, name, item):
return self._post_json(method='role_add_member', name=name, item=item)
def role_add_group(self, name, item):
return self.role_add_member(name=name, item={'group': item})
def role_add_host(self, name, item):
return self.role_add_member(name=name, item={'host': item})
def role_add_hostgroup(self, name, item):
return self.role_add_member(name=name, item={'hostgroup': item})
def role_add_service(self, name, item):
return self.role_add_member(name=name, item={'service': item})
def role_add_user(self, name, item):
return self.role_add_member(name=name, item={'user': item})
def role_remove_member(self, name, item):
return self._post_json(method='role_remove_member', name=name, item=item)
def role_remove_group(self, name, item):
return self.role_remove_member(name=name, item={'group': item})
def role_remove_host(self, name, item):
return self.role_remove_member(name=name, item={'host': item})
def role_remove_hostgroup(self, name, item):
return self.role_remove_member(name=name, item={'hostgroup': item})
def role_remove_service(self, name, item):
return self.role_remove_member(name=name, item={'service': item})
def role_remove_user(self, name, item):
return self.role_remove_member(name=name, item={'user': item})
def get_role_dict(description=None):
data = {}
if description is not None:
data['description'] = description
return data
def get_role_diff(client, ipa_role, module_role):
return client.get_diff(ipa_data=ipa_role, module_data=module_role)
def ensure(module, client):
state = module.params['state']
name = module.params['name']
group = module.params['group']
host = module.params['host']
hostgroup = module.params['hostgroup']
service = module.params['service']
user = module.params['user']
module_role = get_role_dict(description=module.params['description'])
ipa_role = client.role_find(name=name)
changed = False
if state == 'present':
if not ipa_role:
changed = True
if not module.check_mode:
ipa_role = client.role_add(name=name, item=module_role)
else:
diff = get_role_diff(client, ipa_role, module_role)
if len(diff) > 0:
changed = True
if not module.check_mode:
data = {}
for key in diff:
data[key] = module_role.get(key)
client.role_mod(name=name, item=data)
if group is not None:
changed = client.modify_if_diff(name, ipa_role.get('member_group', []), group,
client.role_add_group,
client.role_remove_group) or changed
if host is not None:
changed = client.modify_if_diff(name, ipa_role.get('member_host', []), host,
client.role_add_host,
client.role_remove_host) or changed
if hostgroup is not None:
changed = client.modify_if_diff(name, ipa_role.get('member_hostgroup', []), hostgroup,
client.role_add_hostgroup,
client.role_remove_hostgroup) or changed
if service is not None:
changed = client.modify_if_diff(name, ipa_role.get('member_service', []), service,
client.role_add_service,
client.role_remove_service) or changed
if user is not None:
changed = client.modify_if_diff(name, ipa_role.get('member_user', []), user,
client.role_add_user,
client.role_remove_user) or changed
else:
if ipa_role:
changed = True
if not module.check_mode:
client.role_del(name)
return changed, client.role_find(name=name)
def main():
module = AnsibleModule(
argument_spec=dict(
cn=dict(type='str', required=True, aliases=['name']),
description=dict(type='str', required=False),
group=dict(type='list', required=False),
host=dict(type='list', required=False),
hostgroup=dict(type='list', required=False),
service=dict(type='list', required=False),
state=dict(type='str', required=False, default='present', choices=['present', 'absent']),
user=dict(type='list', required=False),
ipa_prot=dict(type='str', required=False, default='https', choices=['http', 'https']),
ipa_host=dict(type='str', required=False, default='ipa.example.com'),
ipa_port=dict(type='int', required=False, default=443),
ipa_user=dict(type='str', required=False, default='admin'),
ipa_pass=dict(type='str', required=True, no_log=True),
validate_certs=dict(type='bool', required=False, default=True),
),
supports_check_mode=True,
)
client = RoleIPAClient(module=module,
host=module.params['ipa_host'],
port=module.params['ipa_port'],
protocol=module.params['ipa_prot'])
try:
client.login(username=module.params['ipa_user'],
password=module.params['ipa_pass'])
changed, role = ensure(module, client)
module.exit_json(changed=changed, role=role)
except Exception:
e = get_exception()
module.fail_json(msg=str(e))
if __name__ == '__main__':
main()
| gpl-3.0 |
airanmehr/bio | Scripts/TimeSeriesPaper/Plot/ThetaH.py | 1 | 2742 | '''
Copyleft Dec 13, 2015 Arya Iranmehr, PhD Student, Bafna Lab, UC San Diego, Email: airanmehr@gmail.com
'''
import Utils.Estimate as e
from Utils import Simulation
reload(e)
import numpy as np; np.set_printoptions(linewidth=200, precision=5, suppress=True)
import pandas as pd; pd.options.display.max_rows=50;pd.options.display.expand_frame_repr=False
import pylab as plt;
import os; home=os.path.expanduser('~') +'/'
plt.figure(figsize=(12,10))
s=0.0
sim = Simulation.Simulation.load(s=s, theta=200, L=50000, experimentID=0)
y=pd.DataFrame(np.array([[Xrt.dot(Xrt) for Xrt in Xt.T] for Xt in sim.X0t]),index=sim.getGenerationTimes())
n=200;bins=200;k=bins;alpha=(n-1.)/(2*n)
a=e.Estimate.getEstimate(sim.X0t, n=n, method='faywu',bins=bins);a.index=sim.getGenerationTimes()
y.mean(1).plot(ax=plt.gca(),label=r'||x||2');((alpha)*a).mean(1).plot(ax=plt.gca())
n=200;bins=10;k=bins;alpha=(n-1.)/(2*n)
a=e.Estimate.getEstimate(sim.X0t, n=n, method='faywu',bins=bins);a.index=sim.getGenerationTimes();((alpha)*a).mean(1).plot(ax=plt.gca());
sim.getAverageHAF().mean(1).plot(ax=plt.gca())
plt.grid();plt.legend([r'$\|x\|^2$',r'$\theta_H$ (using 100-bin-histogram)',r'$\theta_H$ (using 10-bin-histogram)',r'$\frac{1}{n}$AverageHAF'],loc='best')
plt.xlabel('Generations')
plt.title('s={}'.format(s))
# plt.savefig(Simulation.paperFiguresPath+'thetaHs{:02.0f}.png'.format(s*100))
plt.show()
print e.Estimate.getAllEstimates(sim.H0.values)
bins=10
print 'bins={} watterson={} pi={} td={} faywu={}'.format(bins,e.Estimate.getEstimate(sim.X0, n=n, method='watterson',bins=bins),e.Estimate.getEstimate(sim.X0, n=n, method='pi',bins=bins),e.Estimate.getEstimate(sim.X0, n=n, method='tajimaD',bins=bins), e.Estimate.getEstimate(sim.X0, n=n, method='faywu',bins=bins))
bins=20
print 'bins={} watterson={} pi={} td={} faywu={}'.format(bins,e.Estimate.getEstimate(sim.X0, n=n, method='watterson',bins=bins),e.Estimate.getEstimate(sim.X0, n=n, method='pi',bins=bins),e.Estimate.getEstimate(sim.X0, n=n, method='tajimaD',bins=bins), e.Estimate.getEstimate(sim.X0, n=n, method='faywu',bins=bins))
bins=-1
print 'bins={} watterson={} pi={} td={} faywu={}'.format(bins,e.Estimate.getEstimate(sim.X0, n=n, method='watterson',bins=bins),e.Estimate.getEstimate(sim.X0, n=n, method='pi',bins=bins),e.Estimate.getEstimate(sim.X0, n=n, method='tajimaD',bins=bins), e.Estimate.getEstimate(sim.X0, n=n, method='faywu',bins=bins))
bins=200
print 'bins={} watterson={} pi={} td={} faywu={}'.format(bins,e.Estimate.getEstimate(sim.X0, n=n, method='watterson',bins=bins),e.Estimate.getEstimate(sim.X0, n=n, method='pi',bins=bins),e.Estimate.getEstimate(sim.X0, n=n, method='tajimaD',bins=bins), e.Estimate.getEstimate(sim.X0, n=n, method='faywu',bins=bins)) | mit |
tkstman/lab4 | lib/werkzeug/testsuite/internal.py | 102 | 2940 | # -*- coding: utf-8 -*-
"""
werkzeug.testsuite.internal
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Internal tests.
:copyright: (c) 2013 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import unittest
from datetime import datetime
from warnings import filterwarnings, resetwarnings
from werkzeug.testsuite import WerkzeugTestCase
from werkzeug.wrappers import Request, Response
from werkzeug import _internal as internal
from werkzeug.test import create_environ
class InternalTestCase(WerkzeugTestCase):
def test_date_to_unix(self):
assert internal._date_to_unix(datetime(1970, 1, 1)) == 0
assert internal._date_to_unix(datetime(1970, 1, 1, 1, 0, 0)) == 3600
assert internal._date_to_unix(datetime(1970, 1, 1, 1, 1, 1)) == 3661
x = datetime(2010, 2, 15, 16, 15, 39)
assert internal._date_to_unix(x) == 1266250539
def test_easteregg(self):
req = Request.from_values('/?macgybarchakku')
resp = Response.force_type(internal._easteregg(None), req)
assert b'About Werkzeug' in resp.get_data()
assert b'the Swiss Army knife of Python web development' in resp.get_data()
def test_wrapper_internals(self):
req = Request.from_values(data={'foo': 'bar'}, method='POST')
req._load_form_data()
assert req.form.to_dict() == {'foo': 'bar'}
# second call does not break
req._load_form_data()
assert req.form.to_dict() == {'foo': 'bar'}
# check reprs
assert repr(req) == "<Request 'http://localhost/' [POST]>"
resp = Response()
assert repr(resp) == '<Response 0 bytes [200 OK]>'
resp.set_data('Hello World!')
assert repr(resp) == '<Response 12 bytes [200 OK]>'
resp.response = iter(['Test'])
assert repr(resp) == '<Response streamed [200 OK]>'
# unicode data does not set content length
response = Response([u'Hällo Wörld'])
headers = response.get_wsgi_headers(create_environ())
assert u'Content-Length' not in headers
response = Response([u'Hällo Wörld'.encode('utf-8')])
headers = response.get_wsgi_headers(create_environ())
assert u'Content-Length' in headers
# check for internal warnings
filterwarnings('error', category=Warning)
response = Response()
environ = create_environ()
response.response = 'What the...?'
self.assert_raises(Warning, lambda: list(response.iter_encoded()))
self.assert_raises(Warning, lambda: list(response.get_app_iter(environ)))
response.direct_passthrough = True
self.assert_raises(Warning, lambda: list(response.iter_encoded()))
self.assert_raises(Warning, lambda: list(response.get_app_iter(environ)))
resetwarnings()
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(InternalTestCase))
return suite
| apache-2.0 |
Liftboard-Rider/volatility | volatility/plugins/connections.py | 45 | 3051 | # Volatility
#
# Authors:
# Mike Auty <mike.auty@gmail.com>
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
#pylint: disable-msg=C0111
import volatility.plugins.common as common
import volatility.win32.network as network
import volatility.cache as cache
import volatility.utils as utils
import volatility.debug as debug
class Connections(common.AbstractWindowsCommand):
"""
Print list of open connections [Windows XP and 2003 Only]
---------------------------------------------
This module follows the handle table in tcpip.sys and prints
current connections.
Note that if you are using a hibernated image this might not work
because Windows closes all connections before hibernating. You might
find it more effective to do connscan instead.
"""
def __init__(self, config, *args, **kwargs):
common.AbstractWindowsCommand.__init__(self, config, *args, **kwargs)
config.add_option("PHYSICAL-OFFSET", short_option = 'P', default = False,
cache_invalidator = False,
help = "Physical Offset", action = "store_true")
@staticmethod
def is_valid_profile(profile):
return (profile.metadata.get('os', 'unknown') == 'windows' and
profile.metadata.get('major', 0) == 5)
def render_text(self, outfd, data):
offsettype = "(V)" if not self._config.PHYSICAL_OFFSET else "(P)"
self.table_header(outfd,
[("Offset{0}".format(offsettype), "[addrpad]"),
("Local Address", "25"),
("Remote Address", "25"),
("Pid", "")
])
for conn in data:
if not self._config.PHYSICAL_OFFSET:
offset = conn.obj_offset
else:
offset = conn.obj_vm.vtop(conn.obj_offset)
local = "{0}:{1}".format(conn.LocalIpAddress, conn.LocalPort)
remote = "{0}:{1}".format(conn.RemoteIpAddress, conn.RemotePort)
self.table_row(outfd, offset, local, remote, conn.Pid)
@cache.CacheDecorator("tests/connections")
def calculate(self):
addr_space = utils.load_as(self._config)
if not self.is_valid_profile(addr_space.profile):
debug.error("This command does not support the selected profile.")
return network.determine_connections(addr_space)
| gpl-2.0 |
nitzmahone/ansible | lib/ansible/modules/cloud/vmware/vmware_local_role_facts.py | 42 | 4176 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2018, Abhijeet Kasurde <akasurde@redhat.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: vmware_local_role_facts
short_description: Gather facts about local roles on an ESXi host
description:
- This module can be used to gather facts about local role facts on an ESXi host
version_added: 2.7
author:
- Abhijeet Kasurde (@Akasurde)
notes:
- Tested on ESXi 6.5
- Be sure that the ESXi user used for login, has the appropriate rights to view roles
- The module returns a list of dict in version 2.8 and above.
requirements:
- "python >= 2.6"
- PyVmomi
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
- name: Gather facts about local role from an ESXi
vmware_local_role_facts:
hostname: '{{ esxi_hostname }}'
username: '{{ esxi_username }}'
password: '{{ esxi_password }}'
register: fact_details
delegate_to: localhost
- name: Get Admin privileges
set_fact:
admin_priv: "{{ fact_details.local_role_facts['Admin']['privileges'] }}"
- debug:
msg: "{{ admin_priv }}"
'''
RETURN = r'''
local_role_facts:
description: Facts about role present on ESXi host
returned: always
type: dict
sample: [
{
"privileges": [
"Alarm.Acknowledge",
"Alarm.Create",
"Alarm.Delete",
"Alarm.DisableActions",
],
"role_id": -12,
"role_info_label": "Ansible User",
"role_info_summary": "Ansible Automation user",
"role_name": "AnsiUser1",
"role_system": true
},
{
"privileges": [],
"role_id": -5,
"role_info_label": "No access",
"role_info_summary": "Used for restricting granted access",
"role_name": "NoAccess",
"role_system": true
},
{
"privileges": [
"System.Anonymous",
"System.View"
],
"role_id": -3,
"role_info_label": "View",
"role_info_summary": "Visibility access (cannot be granted)",
"role_name": "View",
"role_system": true
}
]
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import PyVmomi, vmware_argument_spec
class VMwareLocalRoleFacts(PyVmomi):
"""Class to manage local role facts"""
def __init__(self, module):
super(VMwareLocalRoleFacts, self).__init__(module)
self.module = module
self.params = module.params
if self.content.authorizationManager is None:
self.module.fail_json(
msg="Failed to get local authorization manager settings.",
details="It seems that '%s' is a vCenter server instead of an ESXi server" % self.params['hostname']
)
def gather_local_role_facts(self):
"""Gather facts about local roles"""
results = list()
for role in self.content.authorizationManager.roleList:
results.append(
dict(
role_name=role.name,
role_id=role.roleId,
privileges=[priv_name for priv_name in role.privilege],
role_system=role.system,
role_info_label=role.info.label,
role_info_summary=role.info.summary,
)
)
self.module.exit_json(changed=False, local_role_facts=results)
def main():
"""Main"""
argument_spec = vmware_argument_spec()
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
vmware_local_role_facts = VMwareLocalRoleFacts(module)
vmware_local_role_facts.gather_local_role_facts()
if __name__ == '__main__':
main()
| gpl-3.0 |
theshadowx/enigma2 | lib/python/Screens/Scart.py | 30 | 1774 | from Screen import Screen
from MessageBox import MessageBox
from Components.AVSwitch import AVSwitch
from Tools import Notifications
class Scart(Screen):
def __init__(self, session, start_visible=True):
Screen.__init__(self, session)
self.msgBox = None
self.notificationVisible = None
self.avswitch = AVSwitch()
if start_visible:
self.onExecBegin.append(self.showMessageBox)
self.msgVisible = None
else:
self.msgVisible = False
def showMessageBox(self):
if self.msgVisible is None:
self.onExecBegin.remove(self.showMessageBox)
self.msgVisible = False
if not self.msgVisible:
self.msgVisible = True
self.avswitch.setInput("SCART")
if not self.session.in_exec:
self.notificationVisible = True
Notifications.AddNotificationWithCallback(self.MsgBoxClosed, MessageBox, _("If you see this, something is wrong with\nyour scart connection. Press OK to return."), MessageBox.TYPE_ERROR, msgBoxID = "scart_msgbox")
else:
self.msgBox = self.session.openWithCallback(self.MsgBoxClosed, MessageBox, _("If you see this, something is wrong with\nyour scart connection. Press OK to return."), MessageBox.TYPE_ERROR)
def MsgBoxClosed(self, *val):
self.msgBox = None
self.switchToTV()
def switchToTV(self, *val):
if self.msgVisible:
if self.msgBox:
self.msgBox.close() # ... MsgBoxClosed -> switchToTV again..
return
self.avswitch.setInput("ENCODER")
self.msgVisible = False
if self.notificationVisible:
self.avswitch.setInput("ENCODER")
self.notificationVisible = False
for notification in Notifications.current_notifications:
try:
if notification[1].msgBoxID == "scart_msgbox":
notification[1].close()
except:
print "other notification is open. try another one."
| gpl-2.0 |
peterbarker/ardupilot-1 | Tools/LogAnalyzer/tests/TestVibration.py | 261 | 3069 | from LogAnalyzer import Test,TestResult
import DataflashLog
import numpy
class TestVibration(Test):
'''test for accelerometer vibration (accX/accY/accZ) within recommendations'''
def __init__(self):
Test.__init__(self)
self.name = "Vibration"
def run(self, logdata, verbose):
self.result = TestResult()
if logdata.vehicleType != "ArduCopter":
self.result.status = TestResult.StatusType.NA
return
# constants
gravity = -9.81
aimRangeWarnXY = 1.5
aimRangeFailXY = 3.0
aimRangeWarnZ = 2.0 # gravity +/- aim range
aimRangeFailZ = 5.0 # gravity +/- aim range
if not "IMU" in logdata.channels:
self.result.status = TestResult.StatusType.UNKNOWN
self.result.statusMessage = "No IMU log data"
return
# find some stable LOITER data to analyze, at least 10 seconds
chunks = DataflashLog.DataflashLogHelper.findLoiterChunks(logdata, minLengthSeconds=10, noRCInputs=True)
if not chunks:
self.result.status = TestResult.StatusType.UNKNOWN
self.result.statusMessage = "No stable LOITER log data found"
return
# for now we'll just use the first (largest) chunk of LOITER data
# TODO: ignore the first couple of secs to avoid bad data during transition - or can we check more analytically that we're stable?
# TODO: accumulate all LOITER chunks over min size, or just use the largest one?
startLine = chunks[0][0]
endLine = chunks[0][1]
#print "TestVibration using LOITER chunk from lines %s to %s" % (`startLine`, `endLine`)
def getStdDevIMU(logdata, channelName, startLine,endLine):
loiterData = logdata.channels["IMU"][channelName].getSegment(startLine,endLine)
numpyData = numpy.array(loiterData.dictData.values())
return numpy.std(numpyData)
# use 2x standard deviations as the metric, so if 95% of samples lie within the aim range we're good
stdDevX = abs(2 * getStdDevIMU(logdata,"AccX",startLine,endLine))
stdDevY = abs(2 * getStdDevIMU(logdata,"AccY",startLine,endLine))
stdDevZ = abs(2 * getStdDevIMU(logdata,"AccZ",startLine,endLine))
if (stdDevX > aimRangeFailXY) or (stdDevY > aimRangeFailXY) or (stdDevZ > aimRangeFailZ):
self.result.status = TestResult.StatusType.FAIL
self.result.statusMessage = "Vibration too high (X:%.2fg, Y:%.2fg, Z:%.2fg)" % (stdDevX,stdDevY,stdDevZ)
elif (stdDevX > aimRangeWarnXY) or (stdDevY > aimRangeWarnXY) or (stdDevZ > aimRangeWarnZ):
self.result.status = TestResult.StatusType.WARN
self.result.statusMessage = "Vibration slightly high (X:%.2fg, Y:%.2fg, Z:%.2fg)" % (stdDevX,stdDevY,stdDevZ)
else:
self.result.status = TestResult.StatusType.GOOD
self.result.statusMessage = "Good vibration values (X:%.2fg, Y:%.2fg, Z:%.2fg)" % (stdDevX,stdDevY,stdDevZ)
| gpl-3.0 |
louiskun/flaskGIT | venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/gb2312prober.py | 2994 | 1681 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import GB2312DistributionAnalysis
from .mbcssm import GB2312SMModel
class GB2312Prober(MultiByteCharSetProber):
def __init__(self):
MultiByteCharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(GB2312SMModel)
self._mDistributionAnalyzer = GB2312DistributionAnalysis()
self.reset()
def get_charset_name(self):
return "GB2312"
| mit |
l04m33/moses | moses/staff/__init__.py | 1 | 1245 | #
# Moses' Staff: A transparent proxy that works with moses.py, for Linux systems.
#
# Use the `-h` option to see usage info.
#
#
# The MIT License (MIT)
# Copyright (c) 2015 Kay Z.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
| mit |
alexzoo/python | selenium_tests/env/lib/python3.6/site-packages/pip/_vendor/webencodings/__init__.py | 340 | 10576 | # coding: utf8
"""
webencodings
~~~~~~~~~~~~
This is a Python implementation of the `WHATWG Encoding standard
<http://encoding.spec.whatwg.org/>`. See README for details.
:copyright: Copyright 2012 by Simon Sapin
:license: BSD, see LICENSE for details.
"""
from __future__ import unicode_literals
import codecs
from .labels import LABELS
VERSION = '0.5'
# Some names in Encoding are not valid Python aliases. Remap these.
PYTHON_NAMES = {
'iso-8859-8-i': 'iso-8859-8',
'x-mac-cyrillic': 'mac-cyrillic',
'macintosh': 'mac-roman',
'windows-874': 'cp874'}
CACHE = {}
def ascii_lower(string):
r"""Transform (only) ASCII letters to lower case: A-Z is mapped to a-z.
:param string: An Unicode string.
:returns: A new Unicode string.
This is used for `ASCII case-insensitive
<http://encoding.spec.whatwg.org/#ascii-case-insensitive>`_
matching of encoding labels.
The same matching is also used, among other things,
for `CSS keywords <http://dev.w3.org/csswg/css-values/#keywords>`_.
This is different from the :meth:`~py:str.lower` method of Unicode strings
which also affect non-ASCII characters,
sometimes mapping them into the ASCII range:
>>> keyword = u'Bac\N{KELVIN SIGN}ground'
>>> assert keyword.lower() == u'background'
>>> assert ascii_lower(keyword) != keyword.lower()
>>> assert ascii_lower(keyword) == u'bac\N{KELVIN SIGN}ground'
"""
# This turns out to be faster than unicode.translate()
return string.encode('utf8').lower().decode('utf8')
def lookup(label):
"""
Look for an encoding by its label.
This is the spec’s `get an encoding
<http://encoding.spec.whatwg.org/#concept-encoding-get>`_ algorithm.
Supported labels are listed there.
:param label: A string.
:returns:
An :class:`Encoding` object, or :obj:`None` for an unknown label.
"""
# Only strip ASCII whitespace: U+0009, U+000A, U+000C, U+000D, and U+0020.
label = ascii_lower(label.strip('\t\n\f\r '))
name = LABELS.get(label)
if name is None:
return None
encoding = CACHE.get(name)
if encoding is None:
if name == 'x-user-defined':
from .x_user_defined import codec_info
else:
python_name = PYTHON_NAMES.get(name, name)
# Any python_name value that gets to here should be valid.
codec_info = codecs.lookup(python_name)
encoding = Encoding(name, codec_info)
CACHE[name] = encoding
return encoding
def _get_encoding(encoding_or_label):
"""
Accept either an encoding object or label.
:param encoding: An :class:`Encoding` object or a label string.
:returns: An :class:`Encoding` object.
:raises: :exc:`~exceptions.LookupError` for an unknown label.
"""
if hasattr(encoding_or_label, 'codec_info'):
return encoding_or_label
encoding = lookup(encoding_or_label)
if encoding is None:
raise LookupError('Unknown encoding label: %r' % encoding_or_label)
return encoding
class Encoding(object):
"""Reresents a character encoding such as UTF-8,
that can be used for decoding or encoding.
.. attribute:: name
Canonical name of the encoding
.. attribute:: codec_info
The actual implementation of the encoding,
a stdlib :class:`~codecs.CodecInfo` object.
See :func:`codecs.register`.
"""
def __init__(self, name, codec_info):
self.name = name
self.codec_info = codec_info
def __repr__(self):
return '<Encoding %s>' % self.name
#: The UTF-8 encoding. Should be used for new content and formats.
UTF8 = lookup('utf-8')
_UTF16LE = lookup('utf-16le')
_UTF16BE = lookup('utf-16be')
def decode(input, fallback_encoding, errors='replace'):
"""
Decode a single string.
:param input: A byte string
:param fallback_encoding:
An :class:`Encoding` object or a label string.
The encoding to use if :obj:`input` does note have a BOM.
:param errors: Type of error handling. See :func:`codecs.register`.
:raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
:return:
A ``(output, encoding)`` tuple of an Unicode string
and an :obj:`Encoding`.
"""
# Fail early if `encoding` is an invalid label.
fallback_encoding = _get_encoding(fallback_encoding)
bom_encoding, input = _detect_bom(input)
encoding = bom_encoding or fallback_encoding
return encoding.codec_info.decode(input, errors)[0], encoding
def _detect_bom(input):
"""Return (bom_encoding, input), with any BOM removed from the input."""
if input.startswith(b'\xFF\xFE'):
return _UTF16LE, input[2:]
if input.startswith(b'\xFE\xFF'):
return _UTF16BE, input[2:]
if input.startswith(b'\xEF\xBB\xBF'):
return UTF8, input[3:]
return None, input
def encode(input, encoding=UTF8, errors='strict'):
"""
Encode a single string.
:param input: An Unicode string.
:param encoding: An :class:`Encoding` object or a label string.
:param errors: Type of error handling. See :func:`codecs.register`.
:raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
:return: A byte string.
"""
return _get_encoding(encoding).codec_info.encode(input, errors)[0]
def iter_decode(input, fallback_encoding, errors='replace'):
"""
"Pull"-based decoder.
:param input:
An iterable of byte strings.
The input is first consumed just enough to determine the encoding
based on the precense of a BOM,
then consumed on demand when the return value is.
:param fallback_encoding:
An :class:`Encoding` object or a label string.
The encoding to use if :obj:`input` does note have a BOM.
:param errors: Type of error handling. See :func:`codecs.register`.
:raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
:returns:
An ``(output, encoding)`` tuple.
:obj:`output` is an iterable of Unicode strings,
:obj:`encoding` is the :obj:`Encoding` that is being used.
"""
decoder = IncrementalDecoder(fallback_encoding, errors)
generator = _iter_decode_generator(input, decoder)
encoding = next(generator)
return generator, encoding
def _iter_decode_generator(input, decoder):
"""Return a generator that first yields the :obj:`Encoding`,
then yields output chukns as Unicode strings.
"""
decode = decoder.decode
input = iter(input)
for chunck in input:
output = decode(chunck)
if output:
assert decoder.encoding is not None
yield decoder.encoding
yield output
break
else:
# Input exhausted without determining the encoding
output = decode(b'', final=True)
assert decoder.encoding is not None
yield decoder.encoding
if output:
yield output
return
for chunck in input:
output = decode(chunck)
if output:
yield output
output = decode(b'', final=True)
if output:
yield output
def iter_encode(input, encoding=UTF8, errors='strict'):
"""
“Pull”-based encoder.
:param input: An iterable of Unicode strings.
:param encoding: An :class:`Encoding` object or a label string.
:param errors: Type of error handling. See :func:`codecs.register`.
:raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
:returns: An iterable of byte strings.
"""
# Fail early if `encoding` is an invalid label.
encode = IncrementalEncoder(encoding, errors).encode
return _iter_encode_generator(input, encode)
def _iter_encode_generator(input, encode):
for chunck in input:
output = encode(chunck)
if output:
yield output
output = encode('', final=True)
if output:
yield output
class IncrementalDecoder(object):
"""
“Push”-based decoder.
:param fallback_encoding:
An :class:`Encoding` object or a label string.
The encoding to use if :obj:`input` does note have a BOM.
:param errors: Type of error handling. See :func:`codecs.register`.
:raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
"""
def __init__(self, fallback_encoding, errors='replace'):
# Fail early if `encoding` is an invalid label.
self._fallback_encoding = _get_encoding(fallback_encoding)
self._errors = errors
self._buffer = b''
self._decoder = None
#: The actual :class:`Encoding` that is being used,
#: or :obj:`None` if that is not determined yet.
#: (Ie. if there is not enough input yet to determine
#: if there is a BOM.)
self.encoding = None # Not known yet.
def decode(self, input, final=False):
"""Decode one chunk of the input.
:param input: A byte string.
:param final:
Indicate that no more input is available.
Must be :obj:`True` if this is the last call.
:returns: An Unicode string.
"""
decoder = self._decoder
if decoder is not None:
return decoder(input, final)
input = self._buffer + input
encoding, input = _detect_bom(input)
if encoding is None:
if len(input) < 3 and not final: # Not enough data yet.
self._buffer = input
return ''
else: # No BOM
encoding = self._fallback_encoding
decoder = encoding.codec_info.incrementaldecoder(self._errors).decode
self._decoder = decoder
self.encoding = encoding
return decoder(input, final)
class IncrementalEncoder(object):
"""
“Push”-based encoder.
:param encoding: An :class:`Encoding` object or a label string.
:param errors: Type of error handling. See :func:`codecs.register`.
:raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
.. method:: encode(input, final=False)
:param input: An Unicode string.
:param final:
Indicate that no more input is available.
Must be :obj:`True` if this is the last call.
:returns: A byte string.
"""
def __init__(self, encoding=UTF8, errors='strict'):
encoding = _get_encoding(encoding)
self.encode = encoding.codec_info.incrementalencoder(errors).encode
| apache-2.0 |
chdecultot/frappe | frappe/tests/test_db.py | 8 | 1343 | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import unittest
import frappe
class TestDB(unittest.TestCase):
def test_get_value(self):
self.assertEqual(frappe.db.get_value("User", {"name": ["=", "Administrator"]}), "Administrator")
self.assertEqual(frappe.db.get_value("User", {"name": ["like", "Admin%"]}), "Administrator")
self.assertNotEquals(frappe.db.get_value("User", {"name": ["!=", "Guest"]}), "Guest")
self.assertEqual(frappe.db.get_value("User", {"name": ["<", "B"]}), "Administrator")
self.assertEqual(frappe.db.get_value("User", {"name": ["<=", "Administrator"]}), "Administrator")
self.assertEqual(frappe.db.sql("""select name from `tabUser` where name > "s" order by modified desc""")[0][0],
frappe.db.get_value("User", {"name": [">", "s"]}))
self.assertEqual(frappe.db.sql("""select name from `tabUser` where name >= "t" order by modified desc""")[0][0],
frappe.db.get_value("User", {"name": [">=", "t"]}))
def test_escape(self):
frappe.db.escape("香港濟生堂製藥有限公司 - IT".encode("utf-8"))
# def test_multiple_queries(self):
# # implicit commit
# self.assertRaises(frappe.SQLError, frappe.db.sql, """select name from `tabUser`; truncate `tabEmail Queue`""")
| mit |
unho/virtaal | virtaal/plugins/tm/tmcontroller.py | 3 | 10411 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2008-2011 Zuza Software Foundation
# Copyright 2016 F Wolff
#
# This file is part of Virtaal.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import gobject
import os.path
from translate.lang.data import forceunicode, normalize
from virtaal.controllers.basecontroller import BaseController
class TMController(BaseController):
"""The logic-filled glue between the TM view and -model."""
__gtype_name__ = 'TMController'
__gsignals__ = {
'start-query': (gobject.SIGNAL_RUN_FIRST, None, (object,))
}
QUERY_DELAY = 300
"""The delay after a unit is selected (C{Cursor}'s "cursor-changed" event)
before the TM is queried."""
# INITIALIZERS #
def __init__(self, main_controller, config={}):
from virtaal.common import GObjectWrapper
GObjectWrapper.__init__(self)
self.config = config
self.main_controller = main_controller
self.disabled_model_names = ['basetmmodel'] + self.config.get('disabled_models', [])
self.max_matches = self.config.get('max_matches', 5)
self.min_quality = self.config.get('min_quality', 75)
# plug-in signals:
self._signal_ids = {}
# tm query delay:
self._delay_id = None
from tmview import TMView
self.storecursor = None
self.view = TMView(self, self.max_matches)
self._load_models()
self._connect_plugin()
def _connect_plugin(self):
self._store_loaded_id = self.main_controller.store_controller.connect('store-loaded', self._on_store_loaded)
self._store_closed_id = self.main_controller.store_controller.connect('store-closed', self._on_store_closed)
if self.main_controller.store_controller.get_store() is not None:
self._on_store_loaded(self.main_controller.store_controller)
self.view._should_show_tmwindow = True
if self.main_controller.mode_controller is not None:
self._mode_selected_id = self.main_controller.mode_controller.connect('mode-selected', self._on_mode_selected)
def _load_models(self):
from virtaal.controllers.plugincontroller import PluginController
self.plugin_controller = PluginController(self, 'TMModel')
self.plugin_controller.PLUGIN_CLASS_INFO_ATTRIBS = ['display_name', 'description']
new_dirs = []
for dir in self.plugin_controller.PLUGIN_DIRS:
new_dirs.append(os.path.join(dir, 'tm', 'models'))
self.plugin_controller.PLUGIN_DIRS = new_dirs
from models.basetmmodel import BaseTMModel
self.plugin_controller.PLUGIN_INTERFACE = BaseTMModel
self.plugin_controller.PLUGIN_MODULES = ['virtaal_plugins.tm.models', 'virtaal.plugins.tm.models']
self.plugin_controller.get_disabled_plugins = lambda *args: self.disabled_model_names
self._model_signal_ids = {}
def on_plugin_enabled(plugin_ctrlr, plugin):
self._model_signal_ids[plugin] = plugin.connect('match-found', self.accept_response)
def on_plugin_disabled(plugin_ctrlr, plugin):
plugin.disconnect(self._model_signal_ids[plugin])
self._signal_ids['plugin-enabled'] = self.plugin_controller.connect('plugin-enabled', on_plugin_enabled)
self._signal_ids['plugin-disabled'] = self.plugin_controller.connect('plugin-disabled', on_plugin_disabled)
self.plugin_controller.load_plugins()
# METHODS #
def accept_response(self, tmmodel, query_str, matches):
"""Accept a query-response from the model.
(This method is used as Model-Controller communications)"""
if not self.storecursor:
# File closed since the query was started
return
query_str = forceunicode(query_str)
if query_str != self.current_query or not matches:
return
# Perform some sanity checks on matches first
for match in matches:
if not isinstance(match.get('quality', 0), int):
match['quality'] = int(match['quality'] or 0)
if 'tmsource' not in match or match['tmsource'] is None:
match['tmsource'] = tmmodel.display_name
match['query_str'] = query_str
anything_new = False
for match in matches:
curr_targets = [normalize(m['target']) for m in self.matches]
if normalize(match['target']) not in curr_targets:
# Let's insert at the end to prioritise existing matches over
# new ones. We rely on the guarantee of sort stability. This
# way an existing 100% will be above a new 100%.
self.matches.append(match)
anything_new = True
else:
norm_match_target = normalize(match['target'])
prevmatch = [m for m in self.matches if normalize(m['target']) == norm_match_target][0]
if 'quality' not in prevmatch or not prevmatch['quality']:
# Matches without quality are assumed to be less appropriate
# (ie. MT matches) than matches with an associated quality.
self.matches.remove(prevmatch)
self.matches.append(match)
anything_new = True
if not anything_new:
return
self.matches.sort(key=lambda x: 'quality' in x and x['quality'] or 0, reverse=True)
self.matches = self.matches[:self.max_matches]
# Only call display_matches if necessary:
if self.matches:
self.view.display_matches(self.matches)
def destroy(self):
# Destroy the view
self.view.hide()
self.view.destroy()
# Disconnect signals
self.main_controller.store_controller.disconnect(self._store_loaded_id)
if getattr(self, '_cursor_changed_id', None):
self.main_controller.store_controller.cursor.disconnect(self._cursor_changed_id)
if getattr(self, '_mode_selected_id', None):
self.main_controller.mode_controller.disconnect(self._mode_selected_id)
if getattr(self, '_target_focused_id', None):
self.main_controller.unit_controller.view.disconnect(self._target_focused_id)
self.plugin_controller.shutdown()
def select_match(self, match_data):
"""Handle a match-selection event.
(This method is used as View-Controller communications)"""
unit_controller = self.main_controller.unit_controller
target_n = unit_controller.view.focused_target_n
old_text = unit_controller.view.get_target_n(target_n)
textbox = unit_controller.view.targets[target_n]
self.main_controller.undo_controller.push_current_text(textbox)
unit_controller.set_unit_target(target_n, forceunicode(match_data['target']))
def send_tm_query(self, unit=None):
"""Send a new query to the TM engine.
(This method is used as Controller-Model communications)"""
if unit is not None:
self.unit = unit
self.current_query = self.unit.source
self.matches = []
self.view.clear()
self.emit('start-query', self.unit)
def start_query(self):
"""Start a TM query after C{self.QUERY_DELAY} milliseconds."""
if not self.storecursor:
return
if not hasattr(self, 'unit'):
self.unit = self.storecursor.deref()
self.unit_view = self.main_controller.unit_controller.view
if getattr(self, '_target_focused_id', None) and getattr(self, 'unit_view', None):
self.unit_view.disconnect(self._target_focused_id)
self._target_focused_id = self.unit_view.connect('target-focused', self._on_target_focused)
self.view.hide()
def start_query():
self._delay_id = None
self.send_tm_query()
return False
if self._delay_id:
gobject.source_remove(self._delay_id)
self._delay_id = gobject.timeout_add(self.QUERY_DELAY, start_query)
# EVENT HANDLERS #
def _on_cursor_changed(self, cursor):
self.storecursor = cursor
if cursor is None:
# this can happen if we close a big file before it finished loading
return
self.unit = cursor.deref()
if self.unit is None:
return
if self.view.active and self.unit.istranslated():
self.view.mnu_suggestions.set_active(False)
elif not self.view.active and not self.unit.istranslated():
self.view.mnu_suggestions.set_active(True)
return self.start_query()
def _on_mode_selected(self, modecontroller, mode):
self.view.update_geometry()
def _on_store_closed(self, storecontroller):
if hasattr(self, '_cursor_changed_id') and self.storecursor:
self.storecursor.disconnect(self._cursor_changed_id)
self.storecursor = None
self._cursor_changed_id = 0
self.view.hide()
def _on_store_loaded(self, storecontroller):
"""Disconnect from the previous store's cursor and connect to the new one."""
if getattr(self, '_cursor_changed_id', None) and self.storecursor:
self.storecursor.disconnect(self._cursor_changed_id)
self.storecursor = storecontroller.cursor
self._cursor_changed_id = self.storecursor.connect('cursor-changed', self._on_cursor_changed)
def handle_first_unit():
self._on_cursor_changed(self.storecursor)
return False
gobject.idle_add(handle_first_unit)
def _on_target_focused(self, unitcontroller, target_n):
#import logging
#logging.debug('target_n: %d' % (target_n))
self.view.update_geometry()
| gpl-2.0 |
googleads/google-ads-python | google/ads/googleads/v7/errors/__init__.py | 1 | 4326 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
__all__ = (
"AccountBudgetProposalErrorEnum",
"AccountLinkErrorEnum",
"AdCustomizerErrorEnum",
"AdErrorEnum",
"AdGroupAdErrorEnum",
"AdGroupBidModifierErrorEnum",
"AdGroupCriterionErrorEnum",
"AdGroupErrorEnum",
"AdGroupFeedErrorEnum",
"AdParameterErrorEnum",
"AdSharingErrorEnum",
"AdxErrorEnum",
"AssetErrorEnum",
"AssetLinkErrorEnum",
"AuthenticationErrorEnum",
"AuthorizationErrorEnum",
"BatchJobErrorEnum",
"BiddingErrorEnum",
"BiddingStrategyErrorEnum",
"BillingSetupErrorEnum",
"CampaignBudgetErrorEnum",
"CampaignCriterionErrorEnum",
"CampaignDraftErrorEnum",
"CampaignErrorEnum",
"CampaignExperimentErrorEnum",
"CampaignFeedErrorEnum",
"CampaignSharedSetErrorEnum",
"ChangeEventErrorEnum",
"ChangeStatusErrorEnum",
"CollectionSizeErrorEnum",
"ContextErrorEnum",
"ConversionActionErrorEnum",
"ConversionAdjustmentUploadErrorEnum",
"ConversionCustomVariableErrorEnum",
"ConversionUploadErrorEnum",
"CountryCodeErrorEnum",
"CriterionErrorEnum",
"CurrencyCodeErrorEnum",
"CustomAudienceErrorEnum",
"CustomInterestErrorEnum",
"CustomerClientLinkErrorEnum",
"CustomerErrorEnum",
"CustomerFeedErrorEnum",
"CustomerManagerLinkErrorEnum",
"CustomerUserAccessErrorEnum",
"DatabaseErrorEnum",
"DateErrorEnum",
"DateRangeErrorEnum",
"DistinctErrorEnum",
"EnumErrorEnum",
"ErrorCode",
"ErrorDetails",
"ErrorLocation",
"ExtensionFeedItemErrorEnum",
"ExtensionSettingErrorEnum",
"FeedAttributeReferenceErrorEnum",
"FeedErrorEnum",
"FeedItemErrorEnum",
"FeedItemSetErrorEnum",
"FeedItemSetLinkErrorEnum",
"FeedItemTargetErrorEnum",
"FeedItemValidationErrorEnum",
"FeedMappingErrorEnum",
"FieldErrorEnum",
"FieldMaskErrorEnum",
"FunctionErrorEnum",
"FunctionParsingErrorEnum",
"GeoTargetConstantSuggestionErrorEnum",
"GoogleAdsError",
"GoogleAdsFailure",
"HeaderErrorEnum",
"IdErrorEnum",
"ImageErrorEnum",
"InternalErrorEnum",
"InvoiceErrorEnum",
"KeywordPlanAdGroupErrorEnum",
"KeywordPlanAdGroupKeywordErrorEnum",
"KeywordPlanCampaignErrorEnum",
"KeywordPlanCampaignKeywordErrorEnum",
"KeywordPlanErrorEnum",
"KeywordPlanIdeaErrorEnum",
"LabelErrorEnum",
"LanguageCodeErrorEnum",
"ListOperationErrorEnum",
"ManagerLinkErrorEnum",
"MediaBundleErrorEnum",
"MediaFileErrorEnum",
"MediaUploadErrorEnum",
"MultiplierErrorEnum",
"MutateErrorEnum",
"NewResourceCreationErrorEnum",
"NotAllowlistedErrorEnum",
"NotEmptyErrorEnum",
"NullErrorEnum",
"OfflineUserDataJobErrorEnum",
"OperationAccessDeniedErrorEnum",
"OperatorErrorEnum",
"PartialFailureErrorEnum",
"PaymentsAccountErrorEnum",
"PolicyFindingDetails",
"PolicyFindingErrorEnum",
"PolicyValidationParameterErrorEnum",
"PolicyViolationDetails",
"PolicyViolationErrorEnum",
"QueryErrorEnum",
"QuotaErrorDetails",
"QuotaErrorEnum",
"RangeErrorEnum",
"ReachPlanErrorEnum",
"RecommendationErrorEnum",
"RegionCodeErrorEnum",
"RequestErrorEnum",
"ResourceAccessDeniedErrorEnum",
"ResourceCountDetails",
"ResourceCountLimitExceededErrorEnum",
"SettingErrorEnum",
"SharedCriterionErrorEnum",
"SharedSetErrorEnum",
"SizeLimitErrorEnum",
"StringFormatErrorEnum",
"StringLengthErrorEnum",
"ThirdPartyAppAnalyticsLinkErrorEnum",
"TimeZoneErrorEnum",
"UrlFieldErrorEnum",
"UserDataErrorEnum",
"UserListErrorEnum",
"YoutubeVideoRegistrationErrorEnum",
"AccessInvitationErrorEnum",
)
| apache-2.0 |
kyleam/pymc3 | pymc/distributions/discrete.py | 1 | 8683 | from .dist_math import *
__all__ = ['Binomial', 'BetaBin', 'Bernoulli', 'Poisson', 'NegativeBinomial',
'ConstantDist', 'ZeroInflatedPoisson', 'DiscreteUniform', 'Geometric',
'Categorical']
class Binomial(Discrete):
"""
Binomial log-likelihood. The discrete probability distribution
of the number of successes in a sequence of n independent yes/no
experiments, each of which yields success with probability p.
.. math::
f(x \mid n, p) = \frac{n!}{x!(n-x)!} p^x (1-p)^{n-x}
Parameters
----------
n : int
Number of Bernoulli trials, n > x
p : float
Probability of success in each trial, :math:`p \in [0,1]`
.. note::
- :math:`E(X)=np`
- :math:`Var(X)=np(1-p)`
"""
def __init__(self, n, p, *args, **kwargs):
super(Binomial, self).__init__(*args, **kwargs)
self.n = n
self.p = p
self.mode = cast(round(n * p), 'int8')
def logp(self, value):
n = self.n
p = self.p
return bound(
logpow(p, value) + logpow(
1 - p, n - value) + factln(
n) - factln(value) - factln(n - value),
0 <= value, value <= n,
0 <= p, p <= 1)
class BetaBin(Discrete):
"""
Beta-binomial log-likelihood. Equivalent to binomial random
variables with probabilities drawn from a
:math:`\texttt{Beta}(\alpha,\beta)` distribution.
.. math::
f(x \mid \alpha, \beta, n) = \frac{\Gamma(\alpha + \beta)}{\Gamma(\alpha)} \frac{\Gamma(n+1)}{\Gamma(x+1)\Gamma(n-x+1)} \frac{\Gamma(\alpha + x)\Gamma(n+\beta-x)}{\Gamma(\alpha+\beta+n)}
Parameters
----------
alpha : float
alpha > 0
beta : float
beta > 0
n : int
n=x,x+1,...
.. note::
- :math:`E(X)=n\frac{\alpha}{\alpha+\beta}`
- :math:`Var(X)=n\frac{\alpha \beta}{(\alpha+\beta)^2(\alpha+\beta+1)}`
"""
def __init__(self, alpha, beta, n, *args, **kwargs):
super(BetaBin, self).__init__(*args, **kwargs)
self.alpha = alpha
self.beta = beta
self.n = n
self.mode = cast(round(alpha / (alpha + beta)), 'int8')
def logp(self, value):
alpha = self.alpha
beta = self.beta
n = self.n
return bound(gammaln(alpha + beta) - gammaln(alpha) - gammaln(beta) +
gammaln(n + 1) - gammaln(value + 1) - gammaln(n - value + 1) +
gammaln(alpha + value) + gammaln(n + beta - value) - gammaln(beta + alpha + n),
0 <= value, value <= n,
alpha > 0,
beta > 0)
class Bernoulli(Discrete):
"""Bernoulli log-likelihood
The Bernoulli distribution describes the probability of successes (x=1) and
failures (x=0).
.. math:: f(x \mid p) = p^{x} (1-p)^{1-x}
Parameters
----------
p : float
Probability of success. :math:`0 < p < 1`.
.. note::
- :math:`E(x)= p`
- :math:`Var(x)= p(1-p)`
"""
def __init__(self, p, *args, **kwargs):
super(Bernoulli, self).__init__(*args, **kwargs)
self.p = p
self.mode = cast(round(p), 'int8')
def logp(self, value):
p = self.p
return bound(
switch(value, log(p), log(1 - p)),
0 <= p, p <= 1)
class Poisson(Discrete):
"""
Poisson log-likelihood.
The Poisson is a discrete probability
distribution. It is often used to model the number of events
occurring in a fixed period of time when the times at which events
occur are independent. The Poisson distribution can be derived as
a limiting case of the binomial distribution.
.. math::
f(x \mid \mu) = \frac{e^{-\mu}\mu^x}{x!}
Parameters
----------
mu : float
Expected number of occurrences during the given interval, :math:`\mu \geq 0`.
.. note::
- :math:`E(x)=\mu`
- :math:`Var(x)=\mu`
"""
def __init__(self, mu, *args, **kwargs):
super(Poisson, self).__init__(*args, **kwargs)
self.mu = mu
self.mode = floor(mu).astype('int32')
def logp(self, value):
mu = self.mu
return bound(
logpow(mu, value) - factln(value) - mu,
mu >= 0, value >= 0)
class NegativeBinomial(Discrete):
"""
Negative binomial log-likelihood.
The negative binomial distribution describes a Poisson random variable
whose rate parameter is gamma distributed. PyMC's chosen parameterization
is based on this mixture interpretation.
.. math::
f(x \mid \mu, \alpha) = \frac{\Gamma(x+\alpha)}{x! \Gamma(\alpha)} (\alpha/(\mu+\alpha))^\alpha (\mu/(\mu+\alpha))^x
Parameters
----------
mu : float
mu > 0
alpha : float
alpha > 0
.. note::
- :math:`E[x]=\mu`
"""
def __init__(self, mu, alpha, *args, **kwargs):
super(NegativeBinomial, self).__init__(*args, **kwargs)
self.mu = mu
self.alpha = alpha
self.mode = floor(mu).astype('int32')
def logp(self, value):
mu = self.mu
alpha = self.alpha
# Return Poisson when alpha gets very large
pois = bound(logpow(mu, value) - factln(value) - mu,
mu > 0,
value >= 0)
negbinom = bound(gammaln(value + alpha) - factln(value) - gammaln(alpha) +
logpow(mu / (mu + alpha), value) + logpow(alpha / (mu + alpha), alpha),
mu > 0, alpha > 0, value >= 0)
return switch(alpha > 1e10,
pois,
negbinom)
class Geometric(Discrete):
"""
Geometric log-likelihood. The probability that the first success in a
sequence of Bernoulli trials occurs on the x'th trial.
.. math::
f(x \mid p) = p(1-p)^{x-1}
Parameters
----------
p : float
Probability of success on an individual trial, :math:`p \in [0,1]`
.. note::
- :math:`E(X)=1/p`
- :math:`Var(X)=\frac{1-p}{p^2}`
"""
def __init__(self, p, *args, **kwargs):
super(Geometric, self).__init__(*args, **kwargs)
self.p = p
self.mode = 1
def logp(self, value):
p = self.p
return bound(log(p) + logpow(1 - p, value - 1),
0 <= p, p <= 1, value >= 1)
class DiscreteUniform(Discrete):
"""
Discrete uniform distribution.
.. math::
f(x \mid lower, upper) = \frac{1}{upper-lower}
Parameters
----------
lower : int
Lower limit.
upper : int
Upper limit (upper > lower).
"""
def __init__(self, lower, upper, *args, **kwargs):
super(DiscreteUniform, self).__init__(*args, **kwargs)
self.lower, self.upper = floor(lower).astype('int32'), floor(upper).astype('int32')
self.mode = floor((upper - lower) / 2.).astype('int32')
def logp(self, value):
upper = self.upper
lower = self.lower
return bound(
-log(upper - lower + 1),
lower <= value, value <= upper)
class Categorical(Discrete):
"""
Categorical log-likelihood. The most general discrete distribution.
.. math:: f(x=i \mid p) = p_i
for :math:`i \in 0 \ldots k-1`.
Parameters
----------
p : float
:math:`p > 0`, :math:`\sum p = 1`
"""
def __init__(self, p, *args, **kwargs):
super(Categorical, self).__init__(*args, **kwargs)
self.k = p.shape[0]
self.p = p
self.mode = argmax(p)
def logp(self, value):
p = self.p
k = self.k
return bound(log(p[value]),
value >= 0,
value <= (k - 1),
le(abs(sum(p) - 1), 1e-5))
class ConstantDist(Discrete):
"""
Constant log-likelihood with parameter c={0}.
Parameters
----------
value : float or int
Data value(s)
"""
def __init__(self, c, *args, **kwargs):
super(ConstantDist, self).__init__(*args, **kwargs)
self.mean = self.median = self.mode = self.c = c
def logp(self, value):
c = self.c
return bound(0, eq(value, c))
class ZeroInflatedPoisson(Discrete):
def __init__(self, theta, z, *args, **kwargs):
super(ZeroInflatedPoisson, self).__init__(*args, **kwargs)
self.theta = theta
self.z = z
self.pois = Poisson.dist(theta)
self.const = ConstantDist.dist(0)
self.mode = self.pois.mode
def logp(self, value):
z = self.z
return switch(z,
self.pois.logp(value),
self.const.logp(value))
| apache-2.0 |
sysalexis/kbengine | kbe/src/lib/python/Lib/ssl.py | 67 | 34420 | # Wrapper module for _ssl, providing some additional facilities
# implemented in Python. Written by Bill Janssen.
"""This module provides some more Pythonic support for SSL.
Object types:
SSLSocket -- subtype of socket.socket which does SSL over the socket
Exceptions:
SSLError -- exception raised for I/O errors
Functions:
cert_time_to_seconds -- convert time string used for certificate
notBefore and notAfter functions to integer
seconds past the Epoch (the time values
returned from time.time())
fetch_server_certificate (HOST, PORT) -- fetch the certificate provided
by the server running on HOST at port PORT. No
validation of the certificate is performed.
Integer constants:
SSL_ERROR_ZERO_RETURN
SSL_ERROR_WANT_READ
SSL_ERROR_WANT_WRITE
SSL_ERROR_WANT_X509_LOOKUP
SSL_ERROR_SYSCALL
SSL_ERROR_SSL
SSL_ERROR_WANT_CONNECT
SSL_ERROR_EOF
SSL_ERROR_INVALID_ERROR_CODE
The following group define certificate requirements that one side is
allowing/requiring from the other side:
CERT_NONE - no certificates from the other side are required (or will
be looked at if provided)
CERT_OPTIONAL - certificates are not required, but if provided will be
validated, and if validation fails, the connection will
also fail
CERT_REQUIRED - certificates are required, and will be validated, and
if validation fails, the connection will also fail
The following constants identify various SSL protocol variants:
PROTOCOL_SSLv2
PROTOCOL_SSLv3
PROTOCOL_SSLv23
PROTOCOL_TLSv1
PROTOCOL_TLSv1_1
PROTOCOL_TLSv1_2
The following constants identify various SSL alert message descriptions as per
http://www.iana.org/assignments/tls-parameters/tls-parameters.xml#tls-parameters-6
ALERT_DESCRIPTION_CLOSE_NOTIFY
ALERT_DESCRIPTION_UNEXPECTED_MESSAGE
ALERT_DESCRIPTION_BAD_RECORD_MAC
ALERT_DESCRIPTION_RECORD_OVERFLOW
ALERT_DESCRIPTION_DECOMPRESSION_FAILURE
ALERT_DESCRIPTION_HANDSHAKE_FAILURE
ALERT_DESCRIPTION_BAD_CERTIFICATE
ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE
ALERT_DESCRIPTION_CERTIFICATE_REVOKED
ALERT_DESCRIPTION_CERTIFICATE_EXPIRED
ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN
ALERT_DESCRIPTION_ILLEGAL_PARAMETER
ALERT_DESCRIPTION_UNKNOWN_CA
ALERT_DESCRIPTION_ACCESS_DENIED
ALERT_DESCRIPTION_DECODE_ERROR
ALERT_DESCRIPTION_DECRYPT_ERROR
ALERT_DESCRIPTION_PROTOCOL_VERSION
ALERT_DESCRIPTION_INSUFFICIENT_SECURITY
ALERT_DESCRIPTION_INTERNAL_ERROR
ALERT_DESCRIPTION_USER_CANCELLED
ALERT_DESCRIPTION_NO_RENEGOTIATION
ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION
ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE
ALERT_DESCRIPTION_UNRECOGNIZED_NAME
ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE
ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE
ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY
"""
import textwrap
import re
import sys
import os
from collections import namedtuple
from enum import Enum as _Enum
import _ssl # if we can't import it, let the error propagate
from _ssl import OPENSSL_VERSION_NUMBER, OPENSSL_VERSION_INFO, OPENSSL_VERSION
from _ssl import _SSLContext
from _ssl import (
SSLError, SSLZeroReturnError, SSLWantReadError, SSLWantWriteError,
SSLSyscallError, SSLEOFError,
)
from _ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED
from _ssl import (VERIFY_DEFAULT, VERIFY_CRL_CHECK_LEAF, VERIFY_CRL_CHECK_CHAIN,
VERIFY_X509_STRICT)
from _ssl import txt2obj as _txt2obj, nid2obj as _nid2obj
from _ssl import RAND_status, RAND_egd, RAND_add, RAND_bytes, RAND_pseudo_bytes
def _import_symbols(prefix):
for n in dir(_ssl):
if n.startswith(prefix):
globals()[n] = getattr(_ssl, n)
_import_symbols('OP_')
_import_symbols('ALERT_DESCRIPTION_')
_import_symbols('SSL_ERROR_')
from _ssl import HAS_SNI, HAS_ECDH, HAS_NPN
from _ssl import PROTOCOL_SSLv3, PROTOCOL_SSLv23, PROTOCOL_TLSv1
from _ssl import _OPENSSL_API_VERSION
_PROTOCOL_NAMES = {
PROTOCOL_TLSv1: "TLSv1",
PROTOCOL_SSLv23: "SSLv23",
PROTOCOL_SSLv3: "SSLv3",
}
try:
from _ssl import PROTOCOL_SSLv2
_SSLv2_IF_EXISTS = PROTOCOL_SSLv2
except ImportError:
_SSLv2_IF_EXISTS = None
else:
_PROTOCOL_NAMES[PROTOCOL_SSLv2] = "SSLv2"
try:
from _ssl import PROTOCOL_TLSv1_1, PROTOCOL_TLSv1_2
except ImportError:
pass
else:
_PROTOCOL_NAMES[PROTOCOL_TLSv1_1] = "TLSv1.1"
_PROTOCOL_NAMES[PROTOCOL_TLSv1_2] = "TLSv1.2"
if sys.platform == "win32":
from _ssl import enum_certificates, enum_crls
from socket import socket, AF_INET, SOCK_STREAM, create_connection
from socket import SOL_SOCKET, SO_TYPE
import base64 # for DER-to-PEM translation
import errno
socket_error = OSError # keep that public name in module namespace
if _ssl.HAS_TLS_UNIQUE:
CHANNEL_BINDING_TYPES = ['tls-unique']
else:
CHANNEL_BINDING_TYPES = []
# Disable weak or insecure ciphers by default
# (OpenSSL's default setting is 'DEFAULT:!aNULL:!eNULL')
# Enable a better set of ciphers by default
# This list has been explicitly chosen to:
# * Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE)
# * Prefer ECDHE over DHE for better performance
# * Prefer any AES-GCM over any AES-CBC for better performance and security
# * Then Use HIGH cipher suites as a fallback
# * Then Use 3DES as fallback which is secure but slow
# * Finally use RC4 as a fallback which is problematic but needed for
# compatibility some times.
# * Disable NULL authentication, NULL encryption, and MD5 MACs for security
# reasons
_DEFAULT_CIPHERS = (
'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:ECDH+RC4:'
'DH+RC4:RSA+RC4:!aNULL:!eNULL:!MD5'
)
# Restricted and more secure ciphers for the server side
# This list has been explicitly chosen to:
# * Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE)
# * Prefer ECDHE over DHE for better performance
# * Prefer any AES-GCM over any AES-CBC for better performance and security
# * Then Use HIGH cipher suites as a fallback
# * Then Use 3DES as fallback which is secure but slow
# * Disable NULL authentication, NULL encryption, MD5 MACs, DSS, and RC4 for
# security reasons
_RESTRICTED_SERVER_CIPHERS = (
'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:'
'!eNULL:!MD5:!DSS:!RC4'
)
class CertificateError(ValueError):
pass
def _dnsname_match(dn, hostname, max_wildcards=1):
"""Matching according to RFC 6125, section 6.4.3
http://tools.ietf.org/html/rfc6125#section-6.4.3
"""
pats = []
if not dn:
return False
leftmost, *remainder = dn.split(r'.')
wildcards = leftmost.count('*')
if wildcards > max_wildcards:
# Issue #17980: avoid denials of service by refusing more
# than one wildcard per fragment. A survery of established
# policy among SSL implementations showed it to be a
# reasonable choice.
raise CertificateError(
"too many wildcards in certificate DNS name: " + repr(dn))
# speed up common case w/o wildcards
if not wildcards:
return dn.lower() == hostname.lower()
# RFC 6125, section 6.4.3, subitem 1.
# The client SHOULD NOT attempt to match a presented identifier in which
# the wildcard character comprises a label other than the left-most label.
if leftmost == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append('[^.]+')
elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
# RFC 6125, section 6.4.3, subitem 3.
# The client SHOULD NOT attempt to match a presented identifier
# where the wildcard character is embedded within an A-label or
# U-label of an internationalized domain name.
pats.append(re.escape(leftmost))
else:
# Otherwise, '*' matches any dotless string, e.g. www*
pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
# add the remaining fragments, ignore any wildcards
for frag in remainder:
pats.append(re.escape(frag))
pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
return pat.match(hostname)
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate, match_hostname needs a "
"SSL socket or SSL context with either "
"CERT_OPTIONAL or CERT_REQUIRED")
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or "
"subjectAltName fields were found")
DefaultVerifyPaths = namedtuple("DefaultVerifyPaths",
"cafile capath openssl_cafile_env openssl_cafile openssl_capath_env "
"openssl_capath")
def get_default_verify_paths():
"""Return paths to default cafile and capath.
"""
parts = _ssl.get_default_verify_paths()
# environment vars shadow paths
cafile = os.environ.get(parts[0], parts[1])
capath = os.environ.get(parts[2], parts[3])
return DefaultVerifyPaths(cafile if os.path.isfile(cafile) else None,
capath if os.path.isdir(capath) else None,
*parts)
class _ASN1Object(namedtuple("_ASN1Object", "nid shortname longname oid")):
"""ASN.1 object identifier lookup
"""
__slots__ = ()
def __new__(cls, oid):
return super().__new__(cls, *_txt2obj(oid, name=False))
@classmethod
def fromnid(cls, nid):
"""Create _ASN1Object from OpenSSL numeric ID
"""
return super().__new__(cls, *_nid2obj(nid))
@classmethod
def fromname(cls, name):
"""Create _ASN1Object from short name, long name or OID
"""
return super().__new__(cls, *_txt2obj(name, name=True))
class Purpose(_ASN1Object, _Enum):
"""SSLContext purpose flags with X509v3 Extended Key Usage objects
"""
SERVER_AUTH = '1.3.6.1.5.5.7.3.1'
CLIENT_AUTH = '1.3.6.1.5.5.7.3.2'
class SSLContext(_SSLContext):
"""An SSLContext holds various SSL-related configuration options and
data, such as certificates and possibly a private key."""
__slots__ = ('protocol', '__weakref__')
_windows_cert_stores = ("CA", "ROOT")
def __new__(cls, protocol, *args, **kwargs):
self = _SSLContext.__new__(cls, protocol)
if protocol != _SSLv2_IF_EXISTS:
self.set_ciphers(_DEFAULT_CIPHERS)
return self
def __init__(self, protocol):
self.protocol = protocol
def wrap_socket(self, sock, server_side=False,
do_handshake_on_connect=True,
suppress_ragged_eofs=True,
server_hostname=None):
return SSLSocket(sock=sock, server_side=server_side,
do_handshake_on_connect=do_handshake_on_connect,
suppress_ragged_eofs=suppress_ragged_eofs,
server_hostname=server_hostname,
_context=self)
def set_npn_protocols(self, npn_protocols):
protos = bytearray()
for protocol in npn_protocols:
b = bytes(protocol, 'ascii')
if len(b) == 0 or len(b) > 255:
raise SSLError('NPN protocols must be 1 to 255 in length')
protos.append(len(b))
protos.extend(b)
self._set_npn_protocols(protos)
def _load_windows_store_certs(self, storename, purpose):
certs = bytearray()
for cert, encoding, trust in enum_certificates(storename):
# CA certs are never PKCS#7 encoded
if encoding == "x509_asn":
if trust is True or purpose.oid in trust:
certs.extend(cert)
self.load_verify_locations(cadata=certs)
return certs
def load_default_certs(self, purpose=Purpose.SERVER_AUTH):
if not isinstance(purpose, _ASN1Object):
raise TypeError(purpose)
if sys.platform == "win32":
for storename in self._windows_cert_stores:
self._load_windows_store_certs(storename, purpose)
else:
self.set_default_verify_paths()
def create_default_context(purpose=Purpose.SERVER_AUTH, *, cafile=None,
capath=None, cadata=None):
"""Create a SSLContext object with default settings.
NOTE: The protocol and settings may change anytime without prior
deprecation. The values represent a fair balance between maximum
compatibility and security.
"""
if not isinstance(purpose, _ASN1Object):
raise TypeError(purpose)
context = SSLContext(PROTOCOL_SSLv23)
# SSLv2 considered harmful.
context.options |= OP_NO_SSLv2
# SSLv3 has problematic security and is only required for really old
# clients such as IE6 on Windows XP
context.options |= OP_NO_SSLv3
# disable compression to prevent CRIME attacks (OpenSSL 1.0+)
context.options |= getattr(_ssl, "OP_NO_COMPRESSION", 0)
if purpose == Purpose.SERVER_AUTH:
# verify certs and host name in client mode
context.verify_mode = CERT_REQUIRED
context.check_hostname = True
elif purpose == Purpose.CLIENT_AUTH:
# Prefer the server's ciphers by default so that we get stronger
# encryption
context.options |= getattr(_ssl, "OP_CIPHER_SERVER_PREFERENCE", 0)
# Use single use keys in order to improve forward secrecy
context.options |= getattr(_ssl, "OP_SINGLE_DH_USE", 0)
context.options |= getattr(_ssl, "OP_SINGLE_ECDH_USE", 0)
# disallow ciphers with known vulnerabilities
context.set_ciphers(_RESTRICTED_SERVER_CIPHERS)
if cafile or capath or cadata:
context.load_verify_locations(cafile, capath, cadata)
elif context.verify_mode != CERT_NONE:
# no explicit cafile, capath or cadata but the verify mode is
# CERT_OPTIONAL or CERT_REQUIRED. Let's try to load default system
# root CA certificates for the given purpose. This may fail silently.
context.load_default_certs(purpose)
return context
def _create_stdlib_context(protocol=PROTOCOL_SSLv23, *, cert_reqs=None,
check_hostname=False, purpose=Purpose.SERVER_AUTH,
certfile=None, keyfile=None,
cafile=None, capath=None, cadata=None):
"""Create a SSLContext object for Python stdlib modules
All Python stdlib modules shall use this function to create SSLContext
objects in order to keep common settings in one place. The configuration
is less restrict than create_default_context()'s to increase backward
compatibility.
"""
if not isinstance(purpose, _ASN1Object):
raise TypeError(purpose)
context = SSLContext(protocol)
# SSLv2 considered harmful.
context.options |= OP_NO_SSLv2
if cert_reqs is not None:
context.verify_mode = cert_reqs
context.check_hostname = check_hostname
if keyfile and not certfile:
raise ValueError("certfile must be specified")
if certfile or keyfile:
context.load_cert_chain(certfile, keyfile)
# load CA root certs
if cafile or capath or cadata:
context.load_verify_locations(cafile, capath, cadata)
elif context.verify_mode != CERT_NONE:
# no explicit cafile, capath or cadata but the verify mode is
# CERT_OPTIONAL or CERT_REQUIRED. Let's try to load default system
# root CA certificates for the given purpose. This may fail silently.
context.load_default_certs(purpose)
return context
class SSLSocket(socket):
"""This class implements a subtype of socket.socket that wraps
the underlying OS socket in an SSL context when necessary, and
provides read and write methods over that channel."""
def __init__(self, sock=None, keyfile=None, certfile=None,
server_side=False, cert_reqs=CERT_NONE,
ssl_version=PROTOCOL_SSLv23, ca_certs=None,
do_handshake_on_connect=True,
family=AF_INET, type=SOCK_STREAM, proto=0, fileno=None,
suppress_ragged_eofs=True, npn_protocols=None, ciphers=None,
server_hostname=None,
_context=None):
if _context:
self._context = _context
else:
if server_side and not certfile:
raise ValueError("certfile must be specified for server-side "
"operations")
if keyfile and not certfile:
raise ValueError("certfile must be specified")
if certfile and not keyfile:
keyfile = certfile
self._context = SSLContext(ssl_version)
self._context.verify_mode = cert_reqs
if ca_certs:
self._context.load_verify_locations(ca_certs)
if certfile:
self._context.load_cert_chain(certfile, keyfile)
if npn_protocols:
self._context.set_npn_protocols(npn_protocols)
if ciphers:
self._context.set_ciphers(ciphers)
self.keyfile = keyfile
self.certfile = certfile
self.cert_reqs = cert_reqs
self.ssl_version = ssl_version
self.ca_certs = ca_certs
self.ciphers = ciphers
# Can't use sock.type as other flags (such as SOCK_NONBLOCK) get
# mixed in.
if sock.getsockopt(SOL_SOCKET, SO_TYPE) != SOCK_STREAM:
raise NotImplementedError("only stream sockets are supported")
if server_side and server_hostname:
raise ValueError("server_hostname can only be specified "
"in client mode")
if self._context.check_hostname and not server_hostname:
if HAS_SNI:
raise ValueError("check_hostname requires server_hostname")
else:
raise ValueError("check_hostname requires server_hostname, "
"but it's not supported by your OpenSSL "
"library")
self.server_side = server_side
self.server_hostname = server_hostname
self.do_handshake_on_connect = do_handshake_on_connect
self.suppress_ragged_eofs = suppress_ragged_eofs
if sock is not None:
socket.__init__(self,
family=sock.family,
type=sock.type,
proto=sock.proto,
fileno=sock.fileno())
self.settimeout(sock.gettimeout())
sock.detach()
elif fileno is not None:
socket.__init__(self, fileno=fileno)
else:
socket.__init__(self, family=family, type=type, proto=proto)
# See if we are connected
try:
self.getpeername()
except OSError as e:
if e.errno != errno.ENOTCONN:
raise
connected = False
else:
connected = True
self._closed = False
self._sslobj = None
self._connected = connected
if connected:
# create the SSL object
try:
self._sslobj = self._context._wrap_socket(self, server_side,
server_hostname)
if do_handshake_on_connect:
timeout = self.gettimeout()
if timeout == 0.0:
# non-blocking
raise ValueError("do_handshake_on_connect should not be specified for non-blocking sockets")
self.do_handshake()
except (OSError, ValueError):
self.close()
raise
@property
def context(self):
return self._context
@context.setter
def context(self, ctx):
self._context = ctx
self._sslobj.context = ctx
def dup(self):
raise NotImplemented("Can't dup() %s instances" %
self.__class__.__name__)
def _checkClosed(self, msg=None):
# raise an exception here if you wish to check for spurious closes
pass
def _check_connected(self):
if not self._connected:
# getpeername() will raise ENOTCONN if the socket is really
# not connected; note that we can be connected even without
# _connected being set, e.g. if connect() first returned
# EAGAIN.
self.getpeername()
def read(self, len=0, buffer=None):
"""Read up to LEN bytes and return them.
Return zero-length string on EOF."""
self._checkClosed()
if not self._sslobj:
raise ValueError("Read on closed or unwrapped SSL socket.")
try:
if buffer is not None:
v = self._sslobj.read(len, buffer)
else:
v = self._sslobj.read(len or 1024)
return v
except SSLError as x:
if x.args[0] == SSL_ERROR_EOF and self.suppress_ragged_eofs:
if buffer is not None:
return 0
else:
return b''
else:
raise
def write(self, data):
"""Write DATA to the underlying SSL channel. Returns
number of bytes of DATA actually transmitted."""
self._checkClosed()
if not self._sslobj:
raise ValueError("Write on closed or unwrapped SSL socket.")
return self._sslobj.write(data)
def getpeercert(self, binary_form=False):
"""Returns a formatted version of the data in the
certificate provided by the other end of the SSL channel.
Return None if no certificate was provided, {} if a
certificate was provided, but not validated."""
self._checkClosed()
self._check_connected()
return self._sslobj.peer_certificate(binary_form)
def selected_npn_protocol(self):
self._checkClosed()
if not self._sslobj or not _ssl.HAS_NPN:
return None
else:
return self._sslobj.selected_npn_protocol()
def cipher(self):
self._checkClosed()
if not self._sslobj:
return None
else:
return self._sslobj.cipher()
def compression(self):
self._checkClosed()
if not self._sslobj:
return None
else:
return self._sslobj.compression()
def send(self, data, flags=0):
self._checkClosed()
if self._sslobj:
if flags != 0:
raise ValueError(
"non-zero flags not allowed in calls to send() on %s" %
self.__class__)
try:
v = self._sslobj.write(data)
except SSLError as x:
if x.args[0] == SSL_ERROR_WANT_READ:
return 0
elif x.args[0] == SSL_ERROR_WANT_WRITE:
return 0
else:
raise
else:
return v
else:
return socket.send(self, data, flags)
def sendto(self, data, flags_or_addr, addr=None):
self._checkClosed()
if self._sslobj:
raise ValueError("sendto not allowed on instances of %s" %
self.__class__)
elif addr is None:
return socket.sendto(self, data, flags_or_addr)
else:
return socket.sendto(self, data, flags_or_addr, addr)
def sendmsg(self, *args, **kwargs):
# Ensure programs don't send data unencrypted if they try to
# use this method.
raise NotImplementedError("sendmsg not allowed on instances of %s" %
self.__class__)
def sendall(self, data, flags=0):
self._checkClosed()
if self._sslobj:
if flags != 0:
raise ValueError(
"non-zero flags not allowed in calls to sendall() on %s" %
self.__class__)
amount = len(data)
count = 0
while (count < amount):
v = self.send(data[count:])
count += v
return amount
else:
return socket.sendall(self, data, flags)
def recv(self, buflen=1024, flags=0):
self._checkClosed()
if self._sslobj:
if flags != 0:
raise ValueError(
"non-zero flags not allowed in calls to recv() on %s" %
self.__class__)
return self.read(buflen)
else:
return socket.recv(self, buflen, flags)
def recv_into(self, buffer, nbytes=None, flags=0):
self._checkClosed()
if buffer and (nbytes is None):
nbytes = len(buffer)
elif nbytes is None:
nbytes = 1024
if self._sslobj:
if flags != 0:
raise ValueError(
"non-zero flags not allowed in calls to recv_into() on %s" %
self.__class__)
return self.read(nbytes, buffer)
else:
return socket.recv_into(self, buffer, nbytes, flags)
def recvfrom(self, buflen=1024, flags=0):
self._checkClosed()
if self._sslobj:
raise ValueError("recvfrom not allowed on instances of %s" %
self.__class__)
else:
return socket.recvfrom(self, buflen, flags)
def recvfrom_into(self, buffer, nbytes=None, flags=0):
self._checkClosed()
if self._sslobj:
raise ValueError("recvfrom_into not allowed on instances of %s" %
self.__class__)
else:
return socket.recvfrom_into(self, buffer, nbytes, flags)
def recvmsg(self, *args, **kwargs):
raise NotImplementedError("recvmsg not allowed on instances of %s" %
self.__class__)
def recvmsg_into(self, *args, **kwargs):
raise NotImplementedError("recvmsg_into not allowed on instances of "
"%s" % self.__class__)
def pending(self):
self._checkClosed()
if self._sslobj:
return self._sslobj.pending()
else:
return 0
def shutdown(self, how):
self._checkClosed()
self._sslobj = None
socket.shutdown(self, how)
def unwrap(self):
if self._sslobj:
s = self._sslobj.shutdown()
self._sslobj = None
return s
else:
raise ValueError("No SSL wrapper around " + str(self))
def _real_close(self):
self._sslobj = None
socket._real_close(self)
def do_handshake(self, block=False):
"""Perform a TLS/SSL handshake."""
self._check_connected()
timeout = self.gettimeout()
try:
if timeout == 0.0 and block:
self.settimeout(None)
self._sslobj.do_handshake()
finally:
self.settimeout(timeout)
if self.context.check_hostname:
if not self.server_hostname:
raise ValueError("check_hostname needs server_hostname "
"argument")
match_hostname(self.getpeercert(), self.server_hostname)
def _real_connect(self, addr, connect_ex):
if self.server_side:
raise ValueError("can't connect in server-side mode")
# Here we assume that the socket is client-side, and not
# connected at the time of the call. We connect it, then wrap it.
if self._connected:
raise ValueError("attempt to connect already-connected SSLSocket!")
self._sslobj = self.context._wrap_socket(self, False, self.server_hostname)
try:
if connect_ex:
rc = socket.connect_ex(self, addr)
else:
rc = None
socket.connect(self, addr)
if not rc:
self._connected = True
if self.do_handshake_on_connect:
self.do_handshake()
return rc
except (OSError, ValueError):
self._sslobj = None
raise
def connect(self, addr):
"""Connects to remote ADDR, and then wraps the connection in
an SSL channel."""
self._real_connect(addr, False)
def connect_ex(self, addr):
"""Connects to remote ADDR, and then wraps the connection in
an SSL channel."""
return self._real_connect(addr, True)
def accept(self):
"""Accepts a new connection from a remote client, and returns
a tuple containing that new connection wrapped with a server-side
SSL channel, and the address of the remote client."""
newsock, addr = socket.accept(self)
newsock = self.context.wrap_socket(newsock,
do_handshake_on_connect=self.do_handshake_on_connect,
suppress_ragged_eofs=self.suppress_ragged_eofs,
server_side=True)
return newsock, addr
def get_channel_binding(self, cb_type="tls-unique"):
"""Get channel binding data for current connection. Raise ValueError
if the requested `cb_type` is not supported. Return bytes of the data
or None if the data is not available (e.g. before the handshake).
"""
if cb_type not in CHANNEL_BINDING_TYPES:
raise ValueError("Unsupported channel binding type")
if cb_type != "tls-unique":
raise NotImplementedError(
"{0} channel binding type not implemented"
.format(cb_type))
if self._sslobj is None:
return None
return self._sslobj.tls_unique_cb()
def wrap_socket(sock, keyfile=None, certfile=None,
server_side=False, cert_reqs=CERT_NONE,
ssl_version=PROTOCOL_SSLv23, ca_certs=None,
do_handshake_on_connect=True,
suppress_ragged_eofs=True,
ciphers=None):
return SSLSocket(sock=sock, keyfile=keyfile, certfile=certfile,
server_side=server_side, cert_reqs=cert_reqs,
ssl_version=ssl_version, ca_certs=ca_certs,
do_handshake_on_connect=do_handshake_on_connect,
suppress_ragged_eofs=suppress_ragged_eofs,
ciphers=ciphers)
# some utility functions
def cert_time_to_seconds(cert_time):
"""Takes a date-time string in standard ASN1_print form
("MON DAY 24HOUR:MINUTE:SEC YEAR TIMEZONE") and return
a Python time value in seconds past the epoch."""
import time
return time.mktime(time.strptime(cert_time, "%b %d %H:%M:%S %Y GMT"))
PEM_HEADER = "-----BEGIN CERTIFICATE-----"
PEM_FOOTER = "-----END CERTIFICATE-----"
def DER_cert_to_PEM_cert(der_cert_bytes):
"""Takes a certificate in binary DER format and returns the
PEM version of it as a string."""
f = str(base64.standard_b64encode(der_cert_bytes), 'ASCII', 'strict')
return (PEM_HEADER + '\n' +
textwrap.fill(f, 64) + '\n' +
PEM_FOOTER + '\n')
def PEM_cert_to_DER_cert(pem_cert_string):
"""Takes a certificate in ASCII PEM format and returns the
DER-encoded version of it as a byte sequence"""
if not pem_cert_string.startswith(PEM_HEADER):
raise ValueError("Invalid PEM encoding; must start with %s"
% PEM_HEADER)
if not pem_cert_string.strip().endswith(PEM_FOOTER):
raise ValueError("Invalid PEM encoding; must end with %s"
% PEM_FOOTER)
d = pem_cert_string.strip()[len(PEM_HEADER):-len(PEM_FOOTER)]
return base64.decodebytes(d.encode('ASCII', 'strict'))
def get_server_certificate(addr, ssl_version=PROTOCOL_SSLv3, ca_certs=None):
"""Retrieve the certificate from the server at the specified address,
and return it as a PEM-encoded string.
If 'ca_certs' is specified, validate the server cert against it.
If 'ssl_version' is specified, use it in the connection attempt."""
host, port = addr
if ca_certs is not None:
cert_reqs = CERT_REQUIRED
else:
cert_reqs = CERT_NONE
context = _create_stdlib_context(ssl_version,
cert_reqs=cert_reqs,
cafile=ca_certs)
with create_connection(addr) as sock:
with context.wrap_socket(sock) as sslsock:
dercert = sslsock.getpeercert(True)
return DER_cert_to_PEM_cert(dercert)
def get_protocol_name(protocol_code):
return _PROTOCOL_NAMES.get(protocol_code, '<unknown>')
| lgpl-3.0 |
romonzaman/newfies-dialer | newfies/dialer_settings/south_migrations/0002_add_max_number_contact.py | 4 | 2301 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'DialerSetting.max_number_contact'
db.add_column('dialer_setting', 'max_number_contact',
self.gf('django.db.models.fields.IntegerField')(default=1000000),
keep_default=False)
def backwards(self, orm):
# Deleting field 'DialerSetting.max_number_contact'
db.delete_column('dialer_setting', 'max_number_contact')
models = {
u'dialer_settings.dialersetting': {
'Meta': {'object_name': 'DialerSetting', 'db_table': "'dialer_setting'"},
'blacklist': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'callmaxduration': ('django.db.models.fields.IntegerField', [], {'default': "'1800'", 'null': 'True', 'blank': 'True'}),
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_calltimeout': ('django.db.models.fields.IntegerField', [], {'default': "'45'", 'null': 'True', 'blank': 'True'}),
'max_frequency': ('django.db.models.fields.IntegerField', [], {'default': "'100'", 'null': 'True', 'blank': 'True'}),
'max_number_campaign': ('django.db.models.fields.IntegerField', [], {'default': '100'}),
'max_number_contact': ('django.db.models.fields.IntegerField', [], {'default': '1000000'}),
'max_number_subscriber_campaign': ('django.db.models.fields.IntegerField', [], {'default': '100000'}),
'maxretry': ('django.db.models.fields.IntegerField', [], {'default': "'3'", 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'updated_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'whitelist': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['dialer_settings']
| mpl-2.0 |
neillc/zookeepr | zkpylons/controllers/product_category.py | 3 | 5696 | import logging
from pylons import request, response, session, tmpl_context as c
from zkpylons.lib.helpers import redirect_to
from pylons.decorators import validate
from pylons.decorators.rest import dispatch_on
from formencode import validators, htmlfill, ForEach, Invalid
from formencode.variabledecode import NestedVariables
from zkpylons.lib.base import BaseController, render
from zkpylons.lib.ssl_requirement import enforce_ssl
from zkpylons.lib.validators import BaseSchema, ProductValidator
import zkpylons.lib.helpers as h
from authkit.authorize.pylons_adaptors import authorize
from authkit.permissions import ValidAuthKitUser
from zkpylons.lib.mail import email
from zkpylons.model import meta
from zkpylons.model.product import Product, ProductInclude
from zkpylons.model.product_category import ProductCategory
from zkpylons.config.lca_info import lca_info
log = logging.getLogger(__name__)
class NotExistingProductCategoryValidator(validators.FancyValidator):
def validate_python(self, values, state):
product_category = ProductCategory.find_by_name(values['product_category']['name'])
if product_category != None and product_category != c.product_category:
message = "Duplicate product category name"
error_dict = {'product_category.name': "Category name already in use"}
raise Invalid(message, values, state, error_dict=error_dict)
class ProductCategorySchema(BaseSchema):
name = validators.String(not_empty=True)
description = validators.String(not_empty=True)
note = validators.String()
display = validators.String(not_empty=True)
display_mode = validators.String()
display_order = validators.Int(min=0, max=2000000, not_empty=True)
invoice_free_products = validators.Bool(if_missing=False)
min_qty = validators.Int(min=0, max=2000000)
max_qty = validators.Int(min=0, max=2000000)
# TODO: check that min_qty <= max_qty
class NewProductCategorySchema(BaseSchema):
product_category = ProductCategorySchema()
pre_validators = [NestedVariables]
chained_validators = [NotExistingProductCategoryValidator()]
class EditProductCategorySchema(BaseSchema):
product_category = ProductCategorySchema()
pre_validators = [NestedVariables]
class ProductCategoryController(BaseController):
@enforce_ssl(required_all=True)
@authorize(h.auth.has_organiser_role)
def __before__(self, **kwargs):
pass
@dispatch_on(POST="_new")
def new(self):
return render('/product_category/new.mako')
@validate(schema=NewProductCategorySchema(), form='new', post_only=True, on_get=True, variable_decode=True)
def _new(self):
results = self.form_result['product_category']
c.product_category = ProductCategory(**results)
meta.Session.add(c.product_category)
meta.Session.commit()
h.flash("Category created")
redirect_to(action='view', id=c.product_category.id)
def view(self, id):
c.product_category = ProductCategory.find_by_id(id)
return render('/product_category/view.mako')
def stats(self, id):
c.can_edit = True
c.product_category = ProductCategory.find_by_id(id)
c.product_categories = ProductCategory.find_all()
return render('/product_category/stats.mako')
def index(self):
c.can_edit = True
c.product_category_collection = ProductCategory.find_all()
return render('/product_category/list.mako')
@dispatch_on(POST="_edit")
def edit(self, id):
c.product_category = ProductCategory.find_by_id(id)
defaults = h.object_to_defaults(c.product_category, 'product_category')
form = render('/product_category/edit.mako')
return htmlfill.render(form, defaults)
@validate(schema=EditProductCategorySchema(), form='edit', post_only=True, on_get=True, variable_decode=True)
def _edit(self, id):
product_category = ProductCategory.find_by_id(id)
for key in self.form_result['product_category']:
setattr(product_category, key, self.form_result['product_category'][key])
# update the objects with the validated form data
meta.Session.commit()
h.flash("The product_category has been updated successfully.")
redirect_to(action='view', id=id)
@dispatch_on(POST="_delete")
def delete(self, id):
"""Delete the product_category
GET will return a form asking for approval.
POST requests will delete the item.
"""
c.product_category = ProductCategory.find_by_id(id)
return render('/product_category/confirm_delete.mako')
@validate(schema=None, form='delete', post_only=True, on_get=True, variable_decode=True)
def _delete(self, id):
c.product_category = ProductCategory.find_by_id(id)
# For some reason cascading isn't working for me. Likely I just don't understand SA so I'll do it this way:
# first delete all of the products
for product in c.product_category.products:
# We also delete all of the productincludes for the products
for include in ProductInclude.find_by_product(product.id):
meta.Session.delete(include)
meta.Session.commit()
meta.Session.delete(product)
meta.Session.commit()
# Also delete any includes of the category
for include in ProductInclude.find_by_category(id):
meta.Session.delete(include)
meta.Session.commit()
meta.Session.delete(c.product_category)
meta.Session.commit()
h.flash("Category has been deleted.")
redirect_to('index')
| gpl-2.0 |
opsdroid/opsdroid | opsdroid/connector/telegram/events.py | 3 | 2644 | """Events for the Telegram Connector."""
from opsdroid import events
class Poll(events.Event):
"""Event class that triggers when a poll is sent."""
def __init__(self, poll, question, options, total_votes, *args, **kwargs):
"""Contain some attributes that you can access.
- ``poll`` - The extracted poll details from the payload
- ``question`` - The question asked in the poll
- ``options`` - An array containing all options in the poll
- ``total_votes`` - Sum of total votes that the poll received
Telegram allows you to create polls or quizzes, this type of message also
contains a lot of different things that you can access with the ``poll``
attribute, such as if the poll is closed, if it allows multiple answers, etc.
"""
super().__init__(*args, **kwargs)
self.poll = poll
self.question = question
self.options = options
self.total_votes = total_votes
class Contact(events.Event):
"""Event class that triggers when a contact is sent."""
def __init__(self, contact, phone_number, first_name, *args, **kwargs):
"""Contain some attributes that you can access.
- ``contact`` - The extracted contact details from the payload
- ``phone_numer`` - Extracted phone number from contact
- ``first_name`` - Extracted first name from contact
Your contact event might contain other information such as the
contact last name or a ``vcard`` field, you can use the ``contact``
attribute to access more information if available.
"""
super().__init__(*args, **kwargs)
self.contact = contact
self.phone_number = phone_number
self.first_name = first_name
class Location(events.Event):
"""Event class that triggers when a location message is sent."""
def __init__(self, location, latitude, longitude, *args, **kwargs):
"""Contain some attributes that you can access.
- ``location`` - The extracted location details from the payload
- ``latitude`` - Extracted latitude from the payload
- ``longitude`` - Extracted longitude from the payload
Since Telegram doesn't add any information to the location other than
the latitude and longitude, you can probably just access these attributes,
we decided to include the location attribute in case Telegram adds more
useful things to his message type.
"""
super().__init__(*args, **kwargs)
self.location = location
self.latitude = latitude
self.longitude = longitude
| apache-2.0 |
linjoahow/w17test_1 | man.py | 15 | 21779 |
import cherrypy
# 這是 MAN 類別的定義
'''
# 在 application 中導入子模組
import programs.cdag30.man as cdag30_man
# 加入 cdag30 模組下的 man.py 且以子模組 man 對應其 MAN() 類別
root.cdag30.man = cdag30_man.MAN()
# 完成設定後, 可以利用
/cdag30/man/assembly
# 呼叫 man.py 中 MAN 類別的 assembly 方法
'''
class MAN(object):
# 各組利用 index 引導隨後的程式執行
@cherrypy.expose
def index(self, *args, **kwargs):
outstring = '''
這是 2015CDA 協同專案下的cdag7 模組下的 MAN 類別.<br /><br />
<!-- 這裡採用相對連結, 而非網址的絕對連結 (這一段為 html 註解) -->
<a href="assembly">執行 MAN 類別中的 assembly 方法</a><br /><br />
請確定下列零件於 V:/home/lego/man 目錄中, 且開啟空白 Creo 組立檔案.<br />
<a href="/static/lego_man.7z">lego_man.7z</a>(滑鼠右鍵存成 .7z 檔案)<br />
'''
return outstring
@cherrypy.expose
def assembly(self, *args, **kwargs):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<script type="text/javascript" src="/static/weblink/pfcUtils.js"></script>
<script type="text/javascript" src="/static/weblink/wl_header.js"></script>
</head>
<body>
</script><script language="JavaScript">
/*設計一個零件組立函式*/
// featID 為 第組立件第一個組立零件的編號
// inc 則為 part1 的組立順序編號,一個入組立檔編號為 featID+0
// part2 為外加的零件名稱
function axis_plane_assembly(session, assembly, transf, featID, inc, part2, axis1, plane1, axis2, plane2){
var descr = pfcCreate("pfcModelDescriptor").CreateFromFileName ("v:/home/lego/man/"+part2);
var componentModel = session.GetModelFromDescr(descr);
var componentModel = session.RetrieveModel(descr);
if (componentModel != void null)
{
var asmcomp = assembly.AssembleComponent (componentModel, transf);
}
var ids = pfcCreate("intseq");
ids.Append(featID+inc);
var subPath = pfcCreate("MpfcAssembly").CreateComponentPath(assembly, ids);
subassembly = subPath.Leaf;
var asmDatums = new Array(axis1, plane1);
var compDatums = new Array(axis2, plane2);
var relation = new Array (pfcCreate("pfcComponentConstraintType").ASM_CONSTRAINT_ALIGN, pfcCreate("pfcComponentConstraintType").ASM_CONSTRAINT_MATE);
var relationItem = new Array(pfcCreate("pfcModelItemType").ITEM_AXIS, pfcCreate("pfcModelItemType").ITEM_SURFACE);
var constrs = pfcCreate("pfcComponentConstraints");
for (var i = 0; i < 2; i++)
{
var asmItem = subassembly.GetItemByName (relationItem[i], asmDatums [i]);
if (asmItem == void null)
{
interactFlag = true;
continue;
}
var compItem = componentModel.GetItemByName (relationItem[i], compDatums [i]);
if (compItem == void null)
{
interactFlag = true;
continue;
}
var MpfcSelect = pfcCreate ("MpfcSelect");
var asmSel = MpfcSelect.CreateModelItemSelection (asmItem, subPath);
var compSel = MpfcSelect.CreateModelItemSelection (compItem, void null);
var constr = pfcCreate("pfcComponentConstraint").Create (relation[i]);
constr.AssemblyReference = asmSel;
constr.ComponentReference = compSel;
constr.Attributes = pfcCreate("pfcConstraintAttributes").Create (true, false);
constrs.Append(constr);
}
asmcomp.SetConstraints(constrs, void null);
}
// 以上為 axis_plane_assembly() 函式
//
function three_plane_assembly(session, assembly, transf, featID, inc, part2, plane1, plane2, plane3, plane4, plane5, plane6){
var descr = pfcCreate("pfcModelDescriptor").CreateFromFileName ("v:/home/lego/man/"+part2);
var componentModel = session.GetModelFromDescr(descr);
var componentModel = session.RetrieveModel(descr);
if (componentModel != void null)
{
var asmcomp = assembly.AssembleComponent (componentModel, transf);
}
var ids = pfcCreate("intseq");
ids.Append(featID+inc);
var subPath = pfcCreate("MpfcAssembly").CreateComponentPath(assembly, ids);
subassembly = subPath.Leaf;
var constrs = pfcCreate("pfcComponentConstraints");
var asmDatums = new Array(plane1, plane2, plane3);
var compDatums = new Array(plane4, plane5, plane6);
var MpfcSelect = pfcCreate("MpfcSelect");
for (var i = 0; i < 3; i++)
{
var asmItem = subassembly.GetItemByName(pfcCreate("pfcModelItemType").ITEM_SURFACE, asmDatums[i]);
if (asmItem == void null)
{
interactFlag = true;
continue;
}
var compItem = componentModel.GetItemByName(pfcCreate("pfcModelItemType").ITEM_SURFACE, compDatums[i]);
if (compItem == void null)
{
interactFlag = true;
continue;
}
var asmSel = MpfcSelect.CreateModelItemSelection(asmItem, subPath);
var compSel = MpfcSelect.CreateModelItemSelection(compItem, void null);
var constr = pfcCreate("pfcComponentConstraint").Create(pfcCreate("pfcComponentConstraintType").ASM_CONSTRAINT_MATE);
constr.AssemblyReference = asmSel;
constr.ComponentReference = compSel;
constr.Attributes = pfcCreate("pfcConstraintAttributes").Create (false, false);
constrs.Append(constr);
}
asmcomp.SetConstraints(constrs, void null);
}
// 以上為 three_plane_assembly() 函式
//
// 假如 Creo 所在的操作系統不是 Windows 環境
if (!pfcIsWindows())
// 則啟動對應的 UniversalXPConnect 執行權限 (等同 Windows 下的 ActiveX)
netscape.security.PrivilegeManager.enablePrivilege("UniversalXPConnect");
// pfcGetProESession() 是位於 pfcUtils.js 中的函式, 確定此 JavaScript 是在嵌入式瀏覽器中執行
var session = pfcGetProESession();
// 設定 config option, 不要使用元件組立流程中內建的假設約束條件
session.SetConfigOption("comp_placement_assumptions","no");
// 建立擺放零件的位置矩陣, Pro/Web.Link 中的變數無法直接建立, 必須透過 pfcCreate() 建立
var identityMatrix = pfcCreate("pfcMatrix3D");
// 建立 identity 位置矩陣
for (var x = 0; x < 4; x++)
for (var y = 0; y < 4; y++)
{
if (x == y)
identityMatrix.Set(x, y, 1.0);
else
identityMatrix.Set(x, y, 0.0);
}
// 利用 identityMatrix 建立 transf 座標轉換矩陣
var transf = pfcCreate("pfcTransform3D").Create(identityMatrix);
// 取得目前的工作目錄
var currentDir = session.getCurrentDirectory();
// 以目前已開檔的空白組立檔案, 作為 model
var model = session.CurrentModel;
// 查驗有無 model, 或 model 類別是否為組立件, 若不符合條件則丟出錯誤訊息
if (model == void null || model.Type != pfcCreate("pfcModelType").MDL_ASSEMBLY)
throw new Error (0, "Current model is not an assembly.");
// 將此模型設為組立物件
var assembly = model;
/**---------------------- LEGO_BODY--------------------**/
// 設定零件的 descriptor 物件變數
var descr = pfcCreate("pfcModelDescriptor").CreateFromFileName("v:/home/lego/man/LEGO_BODY.prt");
// 若零件在 session 則直接取用
var componentModel = session.GetModelFromDescr(descr);
// 若零件不在 session 則從工作目錄中載入 session
var componentModel = session.RetrieveModel(descr);
// 若零件已經在 session 中則放入組立檔中
if (componentModel != void null)
{
// 注意這個 asmcomp 即為設定約束條件的本體
// asmcomp 為特徵物件, 直接將零件, 以 transf 座標轉換矩陣方位放入組立檔案中
var asmcomp = assembly.AssembleComponent(componentModel, transf);
}
// 建立約束條件變數
var constrs = pfcCreate("pfcComponentConstraints");
// 設定組立檔中的三個定位面, 注意內定名稱與 Pro/E WF 中的 ASM_D_FRONT 不同, 而是 ASM_FRONT, 可在組立件->info->model 中查詢定位面名稱
// 組立檔案中的 Datum 名稱也可以利用 View->plane tag display 查詢名稱
// 建立組立參考面所組成的陣列
var asmDatums = new Array("ASM_FRONT", "ASM_TOP", "ASM_RIGHT");
// 設定零件檔中的三個定位面, 名稱與 Pro/E WF 中相同
var compDatums = new Array("FRONT", "TOP", "RIGHT");
// 建立 ids 變數, intseq 為 sequence of integers 為資料類別, 使用者可以經由整數索引擷取此資料類別的元件, 第一個索引為 0
// intseq 等同 Python 的數列資料?
var ids = pfcCreate("intseq");
// 利用 assembly 物件模型, 建立路徑變數
var path = pfcCreate("MpfcAssembly").CreateComponentPath(assembly, ids);
// 採用互動式設定相關的變數, MpfcSelect 為 Module level class 中的一種
var MpfcSelect = pfcCreate("MpfcSelect");
// 利用迴圈分別約束組立與零件檔中的三個定位平面
for (var i = 0; i < 3; i++)
{
// 設定組立參考面, 也就是 "ASM_FRONT", "ASM_TOP", "ASM_RIGHT" 等三個 datum planes
var asmItem = assembly.GetItemByName (pfcCreate("pfcModelItemType").ITEM_SURFACE, asmDatums[i]);
// 若無對應的組立參考面, 則啟用互動式平面選擇表單 flag
if (asmItem == void null)
{
interactFlag = true;
continue;
}
// 設定零件參考面, 也就是 "FRONT", "TOP", "RIGHT" 等三個 datum planes
var compItem = componentModel.GetItemByName (pfcCreate ("pfcModelItemType").ITEM_SURFACE, compDatums[i]);
// 若無對應的零件參考面, 則啟用互動式平面選擇表單 flag
if (compItem == void null)
{
interactFlag = true;
continue;
}
// 因為 asmItem 為組立件中的定位特徵, 必須透過 path 才能取得
var asmSel = MpfcSelect.CreateModelItemSelection(asmItem, path);
// 而 compItem 則為零件, 沒有 path 路徑, 因此第二變數為 null
var compSel = MpfcSelect.CreateModelItemSelection(compItem, void null);
// 利用 ASM_CONSTRAINT_ALIGN 對齊組立約束建立約束變數
var constr = pfcCreate("pfcComponentConstraint").Create (pfcCreate ("pfcComponentConstraintType").ASM_CONSTRAINT_ALIGN);
// 設定約束條件的組立參考與元件參考選擇
constr.AssemblyReference = asmSel;
constr.ComponentReference = compSel;
// 第一個變數為強制變數, 第二個為忽略變數
// 強制變數為 false, 表示不強制約束, 只有透過點與線對齊時需設為 true
// 忽略變數為 false, 約束條件在更新模型時是否忽略, 設為 false 表示不忽略
// 通常在組立 closed chain 機構時, 忽略變數必須設為 true, 才能完成約束
// 因為三個面絕對約束, 因此輸入變數為 false, false
constr.Attributes = pfcCreate("pfcConstraintAttributes").Create (false, false);
// 將互動選擇相關資料, 附加在程式約束變數之後
constrs.Append(constr);
}
// 設定組立約束條件
asmcomp.SetConstraints (constrs, void null);
/**---------------------- LEGO_ARM_RT 右手上臂--------------------**/
var descr = pfcCreate ("pfcModelDescriptor").CreateFromFileName ("v:/home/lego/man/LEGO_ARM_RT.prt");
var componentModel = session.GetModelFromDescr(descr);
var componentModel = session.RetrieveModel(descr);
if (componentModel != void null)
{
// 注意這個 asmcomp 即為設定約束條件的本體
// asmcomp 為特徵物件,直接將零件, 以 transf 座標轉換放入組立檔案中
var asmcomp = assembly.AssembleComponent (componentModel, transf);
}
// 取得 assembly 項下的元件 id, 因為只有一個零件, 採用 index 0 取出其 featID
var components = assembly.ListFeaturesByType(true, pfcCreate ("pfcFeatureType").FEATTYPE_COMPONENT);
// 此一 featID 為組立件中的第一個零件編號, 也就是樂高人偶的 body
var featID = components.Item(0).Id;
ids.Append(featID);
// 在 assembly 模型中建立子零件所對應的路徑
var subPath = pfcCreate("MpfcAssembly").CreateComponentPath(assembly, ids);
subassembly = subPath.Leaf;
// 以下針對 body 的 A_13 軸與 DTM1 基準面及右臂的 A_4 軸線與 DTM1 進行對齊與面接約束
var asmDatums = new Array("A_13", "DTM1");
var compDatums = new Array("A_4", "DTM1");
// 組立的關係變數為對齊與面接
var relation = new Array (pfcCreate ("pfcComponentConstraintType").ASM_CONSTRAINT_ALIGN, pfcCreate ("pfcComponentConstraintType").ASM_CONSTRAINT_MATE);
// 組立元件則為軸與平面
var relationItem = new Array(pfcCreate("pfcModelItemType").ITEM_AXIS, pfcCreate("pfcModelItemType").ITEM_SURFACE);
// 建立約束條件變數, 軸採對齊而基準面則以面接進行約束
var constrs = pfcCreate ("pfcComponentConstraints");
for (var i = 0; i < 2; i++)
{
// 設定組立參考面, asmItem 為 model item
var asmItem = subassembly.GetItemByName (relationItem[i], asmDatums [i]);
// 若無對應的組立參考面, 則啟用互動式平面選擇表單 flag
if (asmItem == void null)
{
interactFlag = true;
continue;
}
// 設定零件參考面, compItem 為 model item
var compItem = componentModel.GetItemByName (relationItem[i], compDatums[i]);
if (compItem == void null)
{
interactFlag = true;
continue;
}
// 採用互動式設定相關的變數
var MpfcSelect = pfcCreate ("MpfcSelect");
var asmSel = MpfcSelect.CreateModelItemSelection (asmItem, subPath);
var compSel = MpfcSelect.CreateModelItemSelection (compItem, void null);
var constr = pfcCreate("pfcComponentConstraint").Create (relation[i]);
constr.AssemblyReference = asmSel;
constr.ComponentReference = compSel;
// 因為透過軸線對齊, 第一 force 變數需設為 true
constr.Attributes = pfcCreate("pfcConstraintAttributes").Create (true, false);
// 將互動選擇相關資料, 附加在程式約束變數之後
constrs.Append(constr);
}
// 設定組立約束條件, 以 asmcomp 特徵進行約束條件設定
// 請注意, 第二個變數必須為 void null 表示零件對零件進行約束, 若為 subPath, 則零件會與原始零件的平面進行約束
asmcomp.SetConstraints (constrs, void null);
/**---------------------- LEGO_ARM_LT 左手上臂--------------------**/
var descr = pfcCreate ("pfcModelDescriptor").CreateFromFileName ("v:/home/lego/man/LEGO_ARM_LT.prt");
var componentModel = session.GetModelFromDescr(descr);
var componentModel = session.RetrieveModel(descr);
if (componentModel != void null)
{
// 注意這個 asmcomp 即為設定約束條件的本體
// asmcomp 為特徵物件,直接將零件, 以 transf 座標轉換放入組立檔案中
var asmcomp = assembly.AssembleComponent(componentModel, transf);
}
// 取得 assembly 項下的元件 id, 因為只有一個零件, 採用 index 0 取出其 featID
var components = assembly.ListFeaturesByType(true, pfcCreate ("pfcFeatureType").FEATTYPE_COMPONENT);
var ids = pfcCreate ("intseq");
// 因為左臂也是與 body 進行約束條件組立, 因此取 body 的 featID
// 至此右臂 id 應該是 featID+1, 而左臂則是 featID+2
ids.Append(featID);
// 在 assembly 模型中建立子零件所對應的路徑
var subPath = pfcCreate("MpfcAssembly").CreateComponentPath(assembly, ids);
subassembly = subPath.Leaf;
var asmDatums = new Array("A_9", "DTM2");
var compDatums = new Array("A_4", "DTM1");
var relation = new Array (pfcCreate ("pfcComponentConstraintType").ASM_CONSTRAINT_ALIGN, pfcCreate ("pfcComponentConstraintType").ASM_CONSTRAINT_MATE);
var relationItem = new Array(pfcCreate("pfcModelItemType").ITEM_AXIS, pfcCreate("pfcModelItemType").ITEM_SURFACE);
// 建立約束條件變數
var constrs = pfcCreate ("pfcComponentConstraints");
for (var i = 0; i < 2; i++)
{
// 設定組立參考面, asmItem 為 model item
var asmItem = subassembly.GetItemByName (relationItem[i], asmDatums [i]);
// 若無對應的組立參考面, 則啟用互動式平面選擇表單 flag
if (asmItem == void null)
{
interactFlag = true;
continue;
}
// 設定零件參考面, compItem 為 model item
var compItem = componentModel.GetItemByName (relationItem[i], compDatums [i]);
if (compItem == void null)
{
interactFlag = true;
continue;
}
// 採用互動式設定相關的變數
var MpfcSelect = pfcCreate ("MpfcSelect");
var asmSel = MpfcSelect.CreateModelItemSelection (asmItem, subPath);
var compSel = MpfcSelect.CreateModelItemSelection (compItem, void null);
var constr = pfcCreate("pfcComponentConstraint").Create (relation[i]);
constr.AssemblyReference = asmSel;
constr.ComponentReference = compSel;
constr.Attributes = pfcCreate("pfcConstraintAttributes").Create (true, false);
// 將互動選擇相關資料, 附加在程式約束變數之後
constrs.Append(constr);
}
// 設定組立約束條件, 以 asmcomp 特徵進行約束條件設定
// 請注意, 第二個變數必須為 void null 表示零件對零件進行約束, 若為 subPath, 則零件會與原始零件的平面進行約束
asmcomp.SetConstraints (constrs, void null);
/**---------------------- LEGO_HAND 右手手腕--------------------**/
// 右手臂 LEGO_ARM_RT.prt 基準 A_2, DTM2
// 右手腕 LEGO_HAND.prt 基準 A_1, DTM3
var descr = pfcCreate ("pfcModelDescriptor").CreateFromFileName ("v:/home/lego/man/LEGO_HAND.prt");
var componentModel = session.GetModelFromDescr(descr);
var componentModel = session.RetrieveModel(descr);
if (componentModel != void null)
{
// 注意這個 asmcomp 即為設定約束條件的本體
// asmcomp 為特徵物件,直接將零件, 以 transf 座標轉換放入組立檔案中
var asmcomp = assembly.AssembleComponent (componentModel, transf);
}
// 取得 assembly 項下的元件 id, 因為只有一個零件, 採用 index 0 取出其 featID
var components = assembly.ListFeaturesByType(true, pfcCreate ("pfcFeatureType").FEATTYPE_COMPONENT);
var ids = pfcCreate ("intseq");
// 組立件中 LEGO_BODY.prt 編號為 featID
// LEGO_ARM_RT.prt 則是組立件第二個置入的零件, 編號為 featID+1
ids.Append(featID+1);
// 在 assembly 模型中, 根據子零件的編號, 建立子零件所對應的路徑
var subPath = pfcCreate("MpfcAssembly").CreateComponentPath(assembly, ids);
subassembly = subPath.Leaf;
// 以下針對 LEGO_ARM_RT 的 A_2 軸與 DTM2 基準面及 HAND 的 A_1 軸線與 DTM3 進行對齊與面接約束
var asmDatums = new Array("A_2", "DTM2");
var compDatums = new Array("A_1", "DTM3");
// 組立的關係變數為對齊與面接
var relation = new Array (pfcCreate ("pfcComponentConstraintType").ASM_CONSTRAINT_ALIGN, pfcCreate ("pfcComponentConstraintType").ASM_CONSTRAINT_MATE);
// 組立元件則為軸與平面
var relationItem = new Array(pfcCreate("pfcModelItemType").ITEM_AXIS, pfcCreate("pfcModelItemType").ITEM_SURFACE);
// 建立約束條件變數, 軸採對齊而基準面則以面接進行約束
var constrs = pfcCreate ("pfcComponentConstraints");
for (var i = 0; i < 2; i++)
{
// 設定組立參考面, asmItem 為 model item
var asmItem = subassembly.GetItemByName (relationItem[i], asmDatums [i]);
// 若無對應的組立參考面, 則啟用互動式平面選擇表單 flag
if (asmItem == void null)
{
interactFlag = true;
continue;
}
// 設定零件參考面, compItem 為 model item
var compItem = componentModel.GetItemByName (relationItem[i], compDatums [i]);
if (compItem == void null)
{
interactFlag = true;
continue;
}
// 採用互動式設定相關的變數
var MpfcSelect = pfcCreate("MpfcSelect");
var asmSel = MpfcSelect.CreateModelItemSelection(asmItem, subPath);
var compSel = MpfcSelect.CreateModelItemSelection (compItem, void null);
var constr = pfcCreate("pfcComponentConstraint").Create (relation[i]);
constr.AssemblyReference = asmSel;
constr.ComponentReference = compSel;
// 因為透過軸線對齊, 第一 force 變數需設為 true
constr.Attributes = pfcCreate("pfcConstraintAttributes").Create (true, false);
// 將互動選擇相關資料, 附加在程式約束變數之後
constrs.Append(constr);
}
// 設定組立約束條件, 以 asmcomp 特徵進行約束條件設定
// 請注意, 第二個變數必須為 void null 表示零件對零件進行約束, 若為 subPath, 則零件會與原始零件的平面進行約束
asmcomp.SetConstraints (constrs, void null);
// 利用函式呼叫組立左手 HAND
axis_plane_assembly(session, assembly, transf, featID, 2,
"LEGO_HAND.prt", "A_2", "DTM2", "A_1", "DTM3");
// 利用函式呼叫組立人偶頭部 HEAD
// BODY id 為 featID+0, 以 A_2 及 DTM3 約束
// HEAD 則直接呼叫檔案名稱, 以 A_2, DTM2 約束
axis_plane_assembly(session, assembly, transf, featID, 0,
"LEGO_HEAD.prt", "A_2", "DTM3", "A_2", "DTM2");
// Body 與 WAIST 採三個平面約束組立
// Body 組立面為 DTM4, DTM5, DTM6
// WAIST 組立面為 DTM1, DTM2, DTM3
three_plane_assembly(session, assembly, transf, featID, 0, "LEGO_WAIST.prt", "DTM4", "DTM5", "DTM6", "DTM1", "DTM2", "DTM3");
// 右腳
axis_plane_assembly(session, assembly, transf, featID, 6,
"LEGO_LEG_RT.prt", "A_8", "DTM4", "A_10", "DTM1");
// 左腳
axis_plane_assembly(session, assembly, transf, featID, 6,
"LEGO_LEG_LT.prt", "A_8", "DTM5", "A_10", "DTM1");
// 紅帽
axis_plane_assembly(session, assembly, transf, featID, 5,
"LEGO_HAT.prt", "A_2", "TOP", "A_2", "FRONT");
// regenerate 並且 repaint 組立檔案
assembly.Regenerate (void null);
session.GetModelWindow (assembly).Repaint();
</script>
</body>
</html>
'''
return outstring | gpl-3.0 |
jzoldak/edx-platform | common/lib/calc/calc/functions.py | 279 | 1521 | """
Provide the mathematical functions that numpy doesn't.
Specifically, the secant/cosecant/cotangents and their inverses and
hyperbolic counterparts
"""
import numpy
# Normal Trig
def sec(arg):
"""
Secant
"""
return 1 / numpy.cos(arg)
def csc(arg):
"""
Cosecant
"""
return 1 / numpy.sin(arg)
def cot(arg):
"""
Cotangent
"""
return 1 / numpy.tan(arg)
# Inverse Trig
# http://en.wikipedia.org/wiki/Inverse_trigonometric_functions#Relationships_among_the_inverse_trigonometric_functions
def arcsec(val):
"""
Inverse secant
"""
return numpy.arccos(1. / val)
def arccsc(val):
"""
Inverse cosecant
"""
return numpy.arcsin(1. / val)
def arccot(val):
"""
Inverse cotangent
"""
if numpy.real(val) < 0:
return -numpy.pi / 2 - numpy.arctan(val)
else:
return numpy.pi / 2 - numpy.arctan(val)
# Hyperbolic Trig
def sech(arg):
"""
Hyperbolic secant
"""
return 1 / numpy.cosh(arg)
def csch(arg):
"""
Hyperbolic cosecant
"""
return 1 / numpy.sinh(arg)
def coth(arg):
"""
Hyperbolic cotangent
"""
return 1 / numpy.tanh(arg)
# And their inverses
def arcsech(val):
"""
Inverse hyperbolic secant
"""
return numpy.arccosh(1. / val)
def arccsch(val):
"""
Inverse hyperbolic cosecant
"""
return numpy.arcsinh(1. / val)
def arccoth(val):
"""
Inverse hyperbolic cotangent
"""
return numpy.arctanh(1. / val)
| agpl-3.0 |
techdragon/django | tests/files/tests.py | 16 | 12423 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import gzip
import os
import struct
import tempfile
import unittest
from io import BytesIO, StringIO, TextIOWrapper
from django.core.files import File
from django.core.files.base import ContentFile
from django.core.files.move import file_move_safe
from django.core.files.temp import NamedTemporaryFile
from django.core.files.uploadedfile import SimpleUploadedFile, UploadedFile
from django.test import mock
from django.utils import six
from django.utils._os import upath
try:
from PIL import Image
except ImportError:
Image = None
else:
from django.core.files import images
class FileTests(unittest.TestCase):
def test_unicode_uploadedfile_name(self):
uf = UploadedFile(name='¿Cómo?', content_type='text')
self.assertIs(type(repr(uf)), str)
def test_unicode_file_name(self):
f = File(None, 'djángö')
self.assertIs(type(repr(f)), str)
def test_context_manager(self):
orig_file = tempfile.TemporaryFile()
base_file = File(orig_file)
with base_file as f:
self.assertIs(base_file, f)
self.assertFalse(f.closed)
self.assertTrue(f.closed)
self.assertTrue(orig_file.closed)
def test_namedtemporaryfile_closes(self):
"""
The symbol django.core.files.NamedTemporaryFile is assigned as
a different class on different operating systems. In
any case, the result should minimally mock some of the API of
tempfile.NamedTemporaryFile from the Python standard library.
"""
tempfile = NamedTemporaryFile()
self.assertTrue(hasattr(tempfile, "closed"))
self.assertFalse(tempfile.closed)
tempfile.close()
self.assertTrue(tempfile.closed)
def test_file_mode(self):
# Should not set mode to None if it is not present.
# See #14681, stdlib gzip module crashes if mode is set to None
file = SimpleUploadedFile("mode_test.txt", b"content")
self.assertFalse(hasattr(file, 'mode'))
gzip.GzipFile(fileobj=file)
def test_file_iteration(self):
"""
File objects should yield lines when iterated over.
Refs #22107.
"""
file = File(BytesIO(b'one\ntwo\nthree'))
self.assertEqual(list(file), [b'one\n', b'two\n', b'three'])
def test_file_iteration_windows_newlines(self):
"""
#8149 - File objects with \r\n line endings should yield lines
when iterated over.
"""
f = File(BytesIO(b'one\r\ntwo\r\nthree'))
self.assertEqual(list(f), [b'one\r\n', b'two\r\n', b'three'])
def test_file_iteration_mac_newlines(self):
"""
#8149 - File objects with \r line endings should yield lines
when iterated over.
"""
f = File(BytesIO(b'one\rtwo\rthree'))
self.assertEqual(list(f), [b'one\r', b'two\r', b'three'])
def test_file_iteration_mixed_newlines(self):
f = File(BytesIO(b'one\rtwo\nthree\r\nfour'))
self.assertEqual(list(f), [b'one\r', b'two\n', b'three\r\n', b'four'])
def test_file_iteration_with_unix_newline_at_chunk_boundary(self):
f = File(BytesIO(b'one\ntwo\nthree'))
# Set chunk size to create a boundary after \n:
# b'one\n...
# ^
f.DEFAULT_CHUNK_SIZE = 4
self.assertEqual(list(f), [b'one\n', b'two\n', b'three'])
def test_file_iteration_with_windows_newline_at_chunk_boundary(self):
f = File(BytesIO(b'one\r\ntwo\r\nthree'))
# Set chunk size to create a boundary between \r and \n:
# b'one\r\n...
# ^
f.DEFAULT_CHUNK_SIZE = 4
self.assertEqual(list(f), [b'one\r\n', b'two\r\n', b'three'])
def test_file_iteration_with_mac_newline_at_chunk_boundary(self):
f = File(BytesIO(b'one\rtwo\rthree'))
# Set chunk size to create a boundary after \r:
# b'one\r...
# ^
f.DEFAULT_CHUNK_SIZE = 4
self.assertEqual(list(f), [b'one\r', b'two\r', b'three'])
def test_file_iteration_with_text(self):
f = File(StringIO('one\ntwo\nthree'))
self.assertEqual(list(f), ['one\n', 'two\n', 'three'])
def test_readable(self):
with tempfile.TemporaryFile() as temp, File(temp, name='something.txt') as test_file:
self.assertTrue(test_file.readable())
self.assertFalse(test_file.readable())
def test_writable(self):
with tempfile.TemporaryFile() as temp, File(temp, name='something.txt') as test_file:
self.assertTrue(test_file.writable())
self.assertFalse(test_file.writable())
with tempfile.TemporaryFile('rb') as temp, File(temp, name='something.txt') as test_file:
self.assertFalse(test_file.writable())
def test_seekable(self):
with tempfile.TemporaryFile() as temp, File(temp, name='something.txt') as test_file:
self.assertTrue(test_file.seekable())
self.assertFalse(test_file.seekable())
def test_io_wrapper(self):
content = "vive l'été\n"
with tempfile.TemporaryFile() as temp, File(temp, name='something.txt') as test_file:
test_file.write(content.encode('utf-8'))
test_file.seek(0)
wrapper = TextIOWrapper(test_file, 'utf-8', newline='\n')
self.assertEqual(wrapper.read(), content)
# The following seek() call is required on Windows Python 2 when
# switching from reading to writing.
wrapper.seek(0, 2)
wrapper.write(content)
wrapper.seek(0)
self.assertEqual(wrapper.read(), content * 2)
test_file = wrapper.detach()
test_file.seek(0)
self.assertEqual(test_file.read(), (content * 2).encode('utf-8'))
class NoNameFileTestCase(unittest.TestCase):
"""
Other examples of unnamed files may be tempfile.SpooledTemporaryFile or
urllib.urlopen()
"""
def test_noname_file_default_name(self):
self.assertIsNone(File(BytesIO(b'A file with no name')).name)
def test_noname_file_get_size(self):
self.assertEqual(File(BytesIO(b'A file with no name')).size, 19)
class ContentFileTestCase(unittest.TestCase):
def test_content_file_default_name(self):
self.assertIsNone(ContentFile(b"content").name)
def test_content_file_custom_name(self):
"""
Test that the constructor of ContentFile accepts 'name' (#16590).
"""
name = "I can have a name too!"
self.assertEqual(ContentFile(b"content", name=name).name, name)
def test_content_file_input_type(self):
"""
Test that ContentFile can accept both bytes and unicode and that the
retrieved content is of the same type.
"""
self.assertIsInstance(ContentFile(b"content").read(), bytes)
if six.PY3:
self.assertIsInstance(ContentFile("español").read(), six.text_type)
else:
self.assertIsInstance(ContentFile("español").read(), bytes)
class DimensionClosingBug(unittest.TestCase):
"""
Test that get_image_dimensions() properly closes files (#8817)
"""
@unittest.skipUnless(Image, "Pillow not installed")
def test_not_closing_of_files(self):
"""
Open files passed into get_image_dimensions() should stay opened.
"""
empty_io = BytesIO()
try:
images.get_image_dimensions(empty_io)
finally:
self.assertTrue(not empty_io.closed)
@unittest.skipUnless(Image, "Pillow not installed")
def test_closing_of_filenames(self):
"""
get_image_dimensions() called with a filename should closed the file.
"""
# We need to inject a modified open() builtin into the images module
# that checks if the file was closed properly if the function is
# called with a filename instead of an file object.
# get_image_dimensions will call our catching_open instead of the
# regular builtin one.
class FileWrapper(object):
_closed = []
def __init__(self, f):
self.f = f
def __getattr__(self, name):
return getattr(self.f, name)
def close(self):
self._closed.append(True)
self.f.close()
def catching_open(*args):
return FileWrapper(open(*args))
images.open = catching_open
try:
images.get_image_dimensions(os.path.join(os.path.dirname(upath(__file__)), "test1.png"))
finally:
del images.open
self.assertTrue(FileWrapper._closed)
class InconsistentGetImageDimensionsBug(unittest.TestCase):
"""
Test that get_image_dimensions() works properly after various calls
using a file handler (#11158)
"""
@unittest.skipUnless(Image, "Pillow not installed")
def test_multiple_calls(self):
"""
Multiple calls of get_image_dimensions() should return the same size.
"""
img_path = os.path.join(os.path.dirname(upath(__file__)), "test.png")
with open(img_path, 'rb') as fh:
image = images.ImageFile(fh)
image_pil = Image.open(fh)
size_1 = images.get_image_dimensions(image)
size_2 = images.get_image_dimensions(image)
self.assertEqual(image_pil.size, size_1)
self.assertEqual(size_1, size_2)
@unittest.skipUnless(Image, "Pillow not installed")
def test_bug_19457(self):
"""
Regression test for #19457
get_image_dimensions fails on some pngs, while Image.size is working good on them
"""
img_path = os.path.join(os.path.dirname(upath(__file__)), "magic.png")
size = images.get_image_dimensions(img_path)
with open(img_path, 'rb') as fh:
self.assertEqual(size, Image.open(fh).size)
@unittest.skipUnless(Image, "Pillow not installed")
class GetImageDimensionsTests(unittest.TestCase):
def test_invalid_image(self):
"""
get_image_dimensions() should return (None, None) for the dimensions of
invalid images (#24441).
brokenimg.png is not a valid image and it has been generated by:
$ echo "123" > brokenimg.png
"""
img_path = os.path.join(os.path.dirname(upath(__file__)), "brokenimg.png")
with open(img_path, 'rb') as fh:
size = images.get_image_dimensions(fh)
self.assertEqual(size, (None, None))
def test_valid_image(self):
"""
get_image_dimensions() should catch struct.error while feeding the PIL
Image parser (#24544).
Emulates the Parser feed error. Since the error is raised on every feed
attempt, the resulting image size should be invalid: (None, None).
"""
img_path = os.path.join(os.path.dirname(upath(__file__)), "test.png")
with mock.patch('PIL.ImageFile.Parser.feed', side_effect=struct.error):
with open(img_path, 'rb') as fh:
size = images.get_image_dimensions(fh)
self.assertEqual(size, (None, None))
class FileMoveSafeTests(unittest.TestCase):
def test_file_move_overwrite(self):
handle_a, self.file_a = tempfile.mkstemp()
handle_b, self.file_b = tempfile.mkstemp()
# file_move_safe should raise an IOError exception if destination file exists and allow_overwrite is False
with self.assertRaises(IOError):
file_move_safe(self.file_a, self.file_b, allow_overwrite=False)
# should allow it and continue on if allow_overwrite is True
self.assertIsNone(file_move_safe(self.file_a, self.file_b, allow_overwrite=True))
os.close(handle_a)
os.close(handle_b)
class SpooledTempTests(unittest.TestCase):
def test_in_memory_spooled_temp(self):
with tempfile.SpooledTemporaryFile() as temp:
temp.write(b"foo bar baz quux\n")
django_file = File(temp, name="something.txt")
self.assertEqual(django_file.size, 17)
def test_written_spooled_temp(self):
with tempfile.SpooledTemporaryFile(max_size=4) as temp:
temp.write(b"foo bar baz quux\n")
django_file = File(temp, name="something.txt")
self.assertEqual(django_file.size, 17)
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.