text
stringlengths 4
1.02M
| meta
dict |
|---|---|
import os
import signal
import subprocess
import sys
import time
import unittest
class SIGUSR1Exception(Exception):
pass
class InterProcessSignalTests(unittest.TestCase):
def setUp(self):
self.got_signals = {'SIGHUP': 0, 'SIGUSR1': 0, 'SIGALRM': 0}
def sighup_handler(self, signum, frame):
self.got_signals['SIGHUP'] += 1
def sigusr1_handler(self, signum, frame):
self.got_signals['SIGUSR1'] += 1
raise SIGUSR1Exception
def wait_signal(self, child, signame, exc_class=None):
try:
if child is not None:
# This wait should be interrupted by exc_class
# (if set)
child.wait()
timeout = 10.0
deadline = time.monotonic() + timeout
while time.monotonic() < deadline:
if self.got_signals[signame]:
return
signal.pause()
except BaseException as exc:
if exc_class is not None and isinstance(exc, exc_class):
# got the expected exception
return
raise
self.fail('signal %s not received after %s seconds'
% (signame, timeout))
def subprocess_send_signal(self, pid, signame):
code = 'import os, signal; os.kill(%s, signal.%s)' % (pid, signame)
args = [sys.executable, '-I', '-c', code]
return subprocess.Popen(args)
def test_interprocess_signal(self):
# Install handlers. This function runs in a sub-process, so we
# don't worry about re-setting the default handlers.
signal.signal(signal.SIGHUP, self.sighup_handler)
signal.signal(signal.SIGUSR1, self.sigusr1_handler)
signal.signal(signal.SIGUSR2, signal.SIG_IGN)
signal.signal(signal.SIGALRM, signal.default_int_handler)
# Let the sub-processes know who to send signals to.
pid = str(os.getpid())
with self.subprocess_send_signal(pid, "SIGHUP") as child:
self.wait_signal(child, 'SIGHUP')
self.assertEqual(self.got_signals, {'SIGHUP': 1, 'SIGUSR1': 0,
'SIGALRM': 0})
with self.subprocess_send_signal(pid, "SIGUSR1") as child:
self.wait_signal(child, 'SIGUSR1', SIGUSR1Exception)
self.assertEqual(self.got_signals, {'SIGHUP': 1, 'SIGUSR1': 1,
'SIGALRM': 0})
with self.subprocess_send_signal(pid, "SIGUSR2") as child:
# Nothing should happen: SIGUSR2 is ignored
child.wait()
signal.alarm(1)
self.wait_signal(None, 'SIGALRM', KeyboardInterrupt)
self.assertEqual(self.got_signals, {'SIGHUP': 1, 'SIGUSR1': 1,
'SIGALRM': 0})
if __name__ == "__main__":
unittest.main()
|
{
"content_hash": "d196ef48334647c6dbe950d3c127912a",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 75,
"avg_line_length": 34.095238095238095,
"alnum_prop": 0.5715782122905028,
"repo_name": "MalloyPower/parsing-python",
"id": "d3ae170983bc36a2f54bd8c9b1bc32e7caee7ffb",
"size": "2864",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "front-end/testsuite-python-lib/Python-3.6.0/Lib/test/signalinterproctester.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1963"
},
{
"name": "Lex",
"bytes": "238458"
},
{
"name": "Makefile",
"bytes": "4513"
},
{
"name": "OCaml",
"bytes": "412695"
},
{
"name": "Python",
"bytes": "17319"
},
{
"name": "Rascal",
"bytes": "523063"
},
{
"name": "Yacc",
"bytes": "429659"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import pytest
# Note: "chromosomes" worked previous the bug fix
@pytest.mark.models('en')
@pytest.mark.parametrize('word,lemmas', [("chromosomes", ["chromosome"]), ("endosomes", ["endosome"]), ("colocalizes", ["colocaliz", "colocalize"])])
def test_issue781(EN, word, lemmas):
lemmatizer = EN.Defaults.create_lemmatizer()
assert sorted(lemmatizer(word, 'noun', morphology={'number': 'plur'})) == sorted(lemmas)
|
{
"content_hash": "74c18348dcebb6ea3b627665db31b06a",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 149,
"avg_line_length": 42,
"alnum_prop": 0.6991341991341992,
"repo_name": "aikramer2/spaCy",
"id": "805e6822831708166c769ff88f6435b198c05888",
"size": "478",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "spacy/tests/regression/test_issue781.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "103274"
},
{
"name": "C++",
"bytes": "161734"
},
{
"name": "CSS",
"bytes": "42943"
},
{
"name": "HTML",
"bytes": "902655"
},
{
"name": "JavaScript",
"bytes": "17993"
},
{
"name": "Python",
"bytes": "191529488"
},
{
"name": "Shell",
"bytes": "1068"
}
],
"symlink_target": ""
}
|
#!python3
# -*- coding:utf-8 -*-
import os
import sys
import time
import ctypes
import shutil
import subprocess
IsPy3 = sys.version_info[0] >= 3
if IsPy3:
import winreg
else:
import codecs
import _winreg as winreg
BuildType = 'Release'
IsRebuild = True
Build = 'Rebuild'
Update = False
Copy = False
CleanAll = False
BuildTimeout = 30*60
Bit = 'Win32'
Dlllib = 'dll'
MSBuild = None
IncrediBuild = None
UseMSBuild = True #默认用MSBuild编译,如果为False则用IncrediBuild编译
#不同项目只需修改下面5个变量
SlnFile = '../LogSender.sln' #相对于本py脚本路径的相对路径
UpdateDir = [] #相对于本py脚本路径的相对路径,填空不更新
ExecBatList = [] #相对于本py脚本路径的相对路径,编译前调用的脚本,可填空,执行bat会先cd到bat目录再执行
MSBuildFirstProjects = [r'LogSender'] #使用MSBuild需要工程文件在解决方案sln中的路径
# MSBuild首先编译的项目,填空不指定顺序
IncrediBuildFirstProjects = ['LogSender'] #使用IncrediBuild只需工程名字
#IncrediBuild首先编译的项目,填空不指定顺序
class ConsoleColor():
'''This class defines the values of color for printing on console window'''
Black = 0
DarkBlue = 1
DarkGreen = 2
DarkCyan = 3
DarkRed = 4
DarkMagenta = 5
DarkYellow = 6
Gray = 7
DarkGray = 8
Blue = 9
Green = 10
Cyan = 11
Red = 12
Magenta = 13
Yellow = 14
White = 15
class Coord(ctypes.Structure):
_fields_ = [('X', ctypes.c_short), ('Y', ctypes.c_short)]
class SmallRect(ctypes.Structure):
_fields_ = [('Left', ctypes.c_short),
('Top', ctypes.c_short),
('Right', ctypes.c_short),
('Bottom', ctypes.c_short),
]
class ConsoleScreenBufferInfo(ctypes.Structure):
_fields_ = [('dwSize', Coord),
('dwCursorPosition', Coord),
('wAttributes', ctypes.c_uint),
('srWindow', SmallRect),
('dwMaximumWindowSize', Coord),
]
class Win32API():
'''Some native methods for python calling'''
StdOutputHandle = -11
ConsoleOutputHandle = None
DefaultColor = None
@staticmethod
def SetConsoleColor(color):
'''Change the text color on console window'''
if not Win32API.DefaultColor:
if not Win32API.ConsoleOutputHandle:
Win32API.ConsoleOutputHandle = ctypes.windll.kernel32.GetStdHandle(Win32API.StdOutputHandle)
bufferInfo = ConsoleScreenBufferInfo()
ctypes.windll.kernel32.GetConsoleScreenBufferInfo(Win32API.ConsoleOutputHandle, ctypes.byref(bufferInfo))
Win32API.DefaultColor = int(bufferInfo.wAttributes & 0xFF)
if IsPy3:
sys.stdout.flush() # need flush stdout in python 3
ctypes.windll.kernel32.SetConsoleTextAttribute(Win32API.ConsoleOutputHandle, color)
@staticmethod
def ResetConsoleColor():
'''Reset the default text color on console window'''
if IsPy3:
sys.stdout.flush() # need flush stdout in python 3
ctypes.windll.kernel32.SetConsoleTextAttribute(Win32API.ConsoleOutputHandle, Win32API.DefaultColor)
class Logger():
LogFile = '@AutomationLog.txt'
LineSep = '\n'
@staticmethod
def Write(log, consoleColor = -1, writeToFile = True, printToStdout = True):
'''
consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen
if consoleColor == -1, use default color
'''
if printToStdout:
isValidColor = (consoleColor >= ConsoleColor.Black and consoleColor <= ConsoleColor.White)
if isValidColor:
Win32API.SetConsoleColor(consoleColor)
try:
sys.stdout.write(log)
except UnicodeError as e:
Win32API.SetConsoleColor(ConsoleColor.Red)
isValidColor = True
sys.stdout.write(str(type(e)) + ' can\'t print the log!\n')
if isValidColor:
Win32API.ResetConsoleColor()
if not writeToFile:
return
if IsPy3:
logFile = open(Logger.LogFile, 'a+', encoding = 'utf-8')
else:
logFile = codecs.open(Logger.LogFile, 'a+', 'utf-8')
try:
logFile.write(log)
# logFile.flush() # need flush in python 3, otherwise log won't be saved
except Exception as ex:
logFile.close()
sys.stdout.write('can not write log with exception: {0} {1}'.format(type(ex), ex))
@staticmethod
def WriteLine(log, consoleColor = -1, writeToFile = True, printToStdout = True):
'''
consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen
if consoleColor == -1, use default color
'''
Logger.Write(log + Logger.LineSep, consoleColor, writeToFile, printToStdout)
@staticmethod
def Log(log, consoleColor = -1, writeToFile = True, printToStdout = True):
'''
consoleColor: value in class ConsoleColor, such as ConsoleColor.DarkGreen
if consoleColor == -1, use default color
'''
t = time.localtime()
log = '{0}-{1:02}-{2:02} {3:02}:{4:02}:{5:02} - {6}{7}'.format(t.tm_year, t.tm_mon, t.tm_mday,
t.tm_hour, t.tm_min, t.tm_sec, log, Logger.LineSep)
Logger.Write(log, consoleColor, writeToFile, printToStdout)
@staticmethod
def DeleteLog():
if os.path.exists(Logger.LogFile):
os.remove(Logger.LogFile)
def GetMSBuildPath():
if Bit == 'Win32':
cmd = 'call "%VS120COMNTOOLS%..\\..\\VC\\vcvarsall.bat" x86\nwhere msbuild'
elif Bit == 'x64':
cmd = 'call "%VS120COMNTOOLS%..\\..\\VC\\vcvarsall.bat" amd64\nwhere msbuild'
ftemp = open('GetMSBuildPath.bat', 'wt')
ftemp.write(cmd)
ftemp.close()
p = subprocess.Popen('GetMSBuildPath.bat', stdout = subprocess.PIPE)
p.wait()
lines = p.stdout.read().decode().splitlines()
os.remove('GetMSBuildPath.bat')
for line in lines:
if 'MSBuild.exe' in line:
return line
def GetIncrediBuildPath():
try:
key=winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, r'SOFTWARE\Classes\IncrediBuild.MonitorFile\shell\open\command')
value, typeId = winreg.QueryValueEx(key, '')
if value:
start = value.find('"')
end = value.find('"', start + 1)
path = value[start+1:end]
buildConsole = os.path.join(os.path.dirname(path), 'BuildConsole.exe')
return buildConsole
except FileNotFoundError as e:
Logger.WriteLine('can not find IncrediBuild', ConsoleColor.Red)
def UpdateCode():
# put git to path first
if not shutil.which('git.exe'):
Logger.Log('找不到git.exe. 请确认安装git时将git\bin目录路径加入到环境变量path中!!!\n, 跳过更新代码!!!', ConsoleColor.Yellow)
return false
oldDir = os.getcwd()
for dir in UpdateDir:
os.chdir(dir)
ret = os.system('git pull')
os.chdir(oldDir)
if ret != 0:
Logger.Log('update {0} failed'.format(dir), ConsoleColor.Yellow)
return false
return True
def BuildProject(cmd):
for i in range(6):
Logger.WriteLine(cmd, ConsoleColor.Cyan)
buildFailed = True
startTime = time.time()
p = subprocess.Popen(cmd) #IncrediBuild不能使用stdout=subprocess.PIPE,否则会导致p.wait()不返回,可能是IncrediBuild的bug
if IsPy3:
try:
buildFailed = p.wait(BuildTimeout)
except subprocess.TimeoutExpired as e:
Logger.Log('{0}'.format(e), ConsoleColor.Yellow)
p.kill()
else:
buildFailed = p.wait()
if not UseMSBuild:
#IncrediBuild的返回值不能说明编译是否成功,需要提取输出判断
fin = open('IncrediBuild.log')
for line in fin:
if line.startswith('=========='):
Logger.Write(line, ConsoleColor.Cyan, writeToFile = True if IsPy3 else False)
if IsPy3:
start = line.find('失败') + 3 #========== 生成: 成功 1 个,失败 0 个,最新 0 个,跳过 0 个 ==========
else:#为了兼容py2做的特殊处理,很恶心
start = 0
n2 = 0
while 1:
if line[start].isdigit():
n2 += 1
if n2 == 2:
break
start = line.find(' ', start)
start += 1
end = line.find(' ', start)
failCount = int(line[start:end])
buildFailed = failCount > 0
else:
Logger.Write(line, ConsoleColor.Red, writeToFile = True if IsPy3 else False, printToStdout = True if ' error ' in line else False)
fin.close()
costTime = time.time() - startTime
Logger.WriteLine('build cost time: {0:.1f}s\n'.format(costTime), ConsoleColor.Green)
if not buildFailed:
return True
return False
def BuildAllProjects():
buildSuccess = False
cmds = []
if UseMSBuild:
if IsRebuild:
if CleanAll:
cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, 'Debug'))
cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, 'Release'))
else:
cmds.append('{0} {1} /t:Clean /p:Configuration={2} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, BuildType))
for project in MSBuildFirstProjects:
cmds.append('{0} {1} /t:{2} /p:Configuration={3};platform={4} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, project, BuildType, Bit))
cmds.append('{0} {1} /p:Configuration={2};platform={3} /nologo /maxcpucount /filelogger /consoleloggerparameters:ErrorsOnly'.format(MSBuild, SlnFile, BuildType, Bit))
else: #IncrediBuild
if IsRebuild:
if CleanAll:
cmds.append('"{0}" {1} /clean /cfg="{2}|{3}" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, 'Debug', Bit))
cmds.append('"{0}" {1} /clean /cfg="{2}|{3}" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, 'Release', Bit))
else:
cmds.append('"{0}" {1} /clean /cfg="{2}|{3}" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, BuildType, Bit))
for project in IncrediBuildFirstProjects:
cmds.append('"{0}" {1} /build /prj={2} /cfg="{3}|{4}" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, project, BuildType, Bit))
cmds.append('"{0}" {1} /build /cfg="{2}|{3}" /nologo /out=IncrediBuild.log'.format(IncrediBuild, SlnFile, BuildType, Bit))
for cmd in cmds:
buildSuccess = BuildProject(cmd)
if not buildSuccess:
break
return buildSuccess
def main():
if UseMSBuild:
if not os.path.exists(MSBuild):
Logger.Log('can not find msbuild.exe', ConsoleColor.Red)
return 1
else:
if not os.path.exists(IncrediBuild):
Logger.Log('can not find msbuild.exe', ConsoleColor.Red)
return 1
dir = os.path.dirname(__file__)
if dir:
oldDir = os.getcwd()
os.chdir(dir)
if Update:
if not UpdateCode():
return 1
Logger.Log('git update succeed', ConsoleColor.Green)
if Copy:
for bat in ExecBatList:
oldBatDir = os.getcwd()
batDir = os.path.dirname(bat)
batName = os.path.basename(bat)
if batDir:
os.chdir(batDir)
start = time.clock()
os.system(batName)
Logger.Log('run "{}" cost {:.1f} seconds'.format(batName, time.clock() - start), ConsoleColor.Green)
if batDir:
os.chdir(oldBatDir)
buildSuccess = BuildAllProjects()
if buildSuccess:
Logger.Log('build succeed', ConsoleColor.Green)
else:
Logger.Log('build failed', ConsoleColor.Red)
if dir:
os.chdir(oldDir)
return 0 if buildSuccess else 1
if __name__ == '__main__':
Logger.Log('run with argv ' + str(sys.argv), ConsoleColor.Green)
sys.argv = [x.lower() for x in sys.argv]
start_time = time.time()
if 'debug' in sys.argv:
BuildType = 'Debug'
if 'lib' in sys.argv:
Dlllib = 'lib'
SlnFile = '../LogSender_lib.sln'
MSBuildFirstProjects = [r'LogSender_lib']
IncrediBuildFirstProjects = ['LogSender_lib']
if '64' in sys.argv:
Bit = 'x64'
if 'build' in sys.argv:
IsRebuild = False
Build = 'Build'
if 'update' in sys.argv:
Update = True
if 'copy' in sys.argv:
Copy = True
if 'clean' in sys.argv:
CleanAll = True
if 'incredibuild' in sys.argv:
UseMSBuild = False
if UseMSBuild:
MSBuild = GetMSBuildPath()
if not MSBuild:
Logger.Log('can not find MSBuild.exe', ConsoleColor.Red)
exit(1)
else:
IncrediBuild = GetIncrediBuildPath()
if not IncrediBuild:
Logger.Log('can not find BuildConsole.exe', ConsoleColor.Red)
exit(1)
cwd = os.getcwd()
Logger.WriteLine('current dir is: {0}, {1}: {2}'.format(cwd, Build, BuildType))
ret = main()
end_time = time.time()
cost_time = end_time-start_time
Logger.WriteLine('all build cost time: {0:.2f} seconds'.format(cost_time), ConsoleColor.Green)
exit(ret)
|
{
"content_hash": "fc2efed7abbb701b4c05bdc757f5028d",
"timestamp": "",
"source": "github",
"line_count": 355,
"max_line_length": 195,
"avg_line_length": 38.23380281690141,
"alnum_prop": 0.5922051130921683,
"repo_name": "xylsxyls/xueyelingshuang",
"id": "83df2336909bd1a9e4c835462e4c194a72bc3f79",
"size": "14079",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/LogSender/scripts/rebuild_LogSender.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "70916"
},
{
"name": "C",
"bytes": "15759114"
},
{
"name": "C++",
"bytes": "10113598"
},
{
"name": "CMake",
"bytes": "226509"
},
{
"name": "COBOL",
"bytes": "20676"
},
{
"name": "HTML",
"bytes": "417"
},
{
"name": "Makefile",
"bytes": "303"
},
{
"name": "Python",
"bytes": "1481199"
},
{
"name": "QML",
"bytes": "266"
},
{
"name": "Shell",
"bytes": "93441"
}
],
"symlink_target": ""
}
|
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
brandName = "Demisto REST API"
instanceName = demisto.args().get('instanceName')
allInstances = demisto.getModules()
brandInstances = [instanceName for instanceName in allInstances if allInstances[instanceName]['brand'].lower(
) == brandName.lower() and demisto.get(allInstances[instanceName], 'state') and allInstances[instanceName]['state'] == 'active']
if brandInstances and instanceName in brandInstances:
instance = allInstances.get(instanceName)
instance['name'] = instanceName
demisto.setContext('DemsistoAPIInstances', instance)
demisto.results('yes')
else:
demisto.results('no')
|
{
"content_hash": "c8474c8c0f3d0c17e7e000650c4466a9",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 128,
"avg_line_length": 49.92857142857143,
"alnum_prop": 0.7625178826895566,
"repo_name": "VirusTotal/content",
"id": "8056f628dd8d8d3d2ea875eb6a06cfa03e744b1b",
"size": "699",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Packs/XSOAR-SimpleDevToProd/Scripts/IsDemistoRestAPIInstanceAvailable/IsDemistoRestAPIInstanceAvailable.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "2146"
},
{
"name": "HTML",
"bytes": "205901"
},
{
"name": "JavaScript",
"bytes": "1584075"
},
{
"name": "PowerShell",
"bytes": "442288"
},
{
"name": "Python",
"bytes": "47594464"
},
{
"name": "Rich Text Format",
"bytes": "480911"
},
{
"name": "Shell",
"bytes": "108066"
},
{
"name": "YARA",
"bytes": "1185"
}
],
"symlink_target": ""
}
|
from aorm import *
orm.connect_sqlite3( 'test.db' )
#
# Models
#
class Users(Model):
@property
def posts(self):
return Posts.all({
'author':self.id,
})
class Posts(Model):
pass
class Raw(Model):
pass
#
# Helpers
#
# HELPERS
def mkuser(email, password):
u=Users.create()
u.email=email
u.password=password
u.save()
return u
def mkpost(slug, title, content, user):
p=Posts.create()
p.slug=slug
p.title=title
p.content=content
p.author=user.id
p.save()
return p
#
# Real test
#
u1 = mkuser('i@some.where', 'my password')
u2 = mkuser('i@some.where.else', 'my other password')
p1 = mkpost('post1', 'post1', '''this is a post''', u1)
p2 = mkpost('post2', 'post2', '''this is another post''', u2)
p3 = mkpost('post3', 'post3', '''this is the third post''', u1)
#
# Test #1
#
print("# Expected result: post1 post3")
test1 = Users.one({ 'email':'i@some.where' })
for post in test1.posts:
print( post.slug )
#
# Test #2
#
print("# Expected result: none")
test2 = Users.one({ 'id':2 })
for post in test2.posts:
post.delete()
for post in Posts.all({ 'author':2 }):
print(post.slug)
#
# Test #3
#
print("# Expected results: 2 2")
print("Users.count after tests: %d" % Users.count())
print("Posts.count after tests: %d" % Posts.count())
|
{
"content_hash": "9ef2070ff19cb2f80fb1cb9971f49848",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 63,
"avg_line_length": 16.227848101265824,
"alnum_prop": 0.6326053042121685,
"repo_name": "maxdoom-com/aorm",
"id": "bccddc432a5313d7a49fc06d604d9350cd76be72",
"size": "1308",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/py27_sqlite/test.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "Makefile",
"bytes": "230"
},
{
"name": "Python",
"bytes": "22450"
}
],
"symlink_target": ""
}
|
"""
KubeVirt API
This is KubeVirt API an add-on for Kubernetes.
OpenAPI spec version: 1.0.0
Contact: kubevirt-dev@googlegroups.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1FeatureState(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'enabled': 'bool'
}
attribute_map = {
'enabled': 'enabled'
}
def __init__(self, enabled=None):
"""
V1FeatureState - a model defined in Swagger
"""
self._enabled = None
if enabled is not None:
self.enabled = enabled
@property
def enabled(self):
"""
Gets the enabled of this V1FeatureState.
Enabled determines if the feature should be enabled or disabled on the guest. Defaults to true.
:return: The enabled of this V1FeatureState.
:rtype: bool
"""
return self._enabled
@enabled.setter
def enabled(self, enabled):
"""
Sets the enabled of this V1FeatureState.
Enabled determines if the feature should be enabled or disabled on the guest. Defaults to true.
:param enabled: The enabled of this V1FeatureState.
:type: bool
"""
self._enabled = enabled
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1FeatureState):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
{
"content_hash": "c95195ccf0b6a9763903d84da5154cef",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 103,
"avg_line_length": 25.69918699186992,
"alnum_prop": 0.5336918696614995,
"repo_name": "kubevirt/client-python",
"id": "a12f43c6257d249e2821b45fdfb0329dba9bec41",
"size": "3178",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kubevirt/models/v1_feature_state.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4224980"
},
{
"name": "Shell",
"bytes": "2209"
}
],
"symlink_target": ""
}
|
"""
EventAdmin shell commands
Provides commands to the Pelix shell to work with the EventAdmin service
:author: Thomas Calmant
:copyright: Copyright 2016, Thomas Calmant
:license: Apache License 2.0
:version: 0.6.4
..
Copyright 2016 Thomas Calmant
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Shell constants
from pelix.shell import SERVICE_SHELL_COMMAND
# iPOPO Decorators
from pelix.ipopo.decorators import ComponentFactory, Requires, Provides, \
Instantiate
import pelix.services
# ------------------------------------------------------------------------------
# Module version
__version_info__ = (0, 6, 4)
__version__ = ".".join(str(x) for x in __version_info__)
# Documentation strings format
__docformat__ = "restructuredtext en"
# -----------------------------------------------------------------------------
@ComponentFactory("eventadmin-shell-commands-factory")
@Requires("_events", pelix.services.SERVICE_EVENT_ADMIN)
@Provides(SERVICE_SHELL_COMMAND)
@Instantiate("eventadmin-shell-commands")
class EventAdminCommands(object):
"""
EventAdmin shell commands
"""
def __init__(self):
"""
Sets up members
"""
# Injected services
self._events = None
@staticmethod
def get_namespace():
"""
Retrieves the name space of this command handler
"""
return "event"
def get_methods(self):
"""
Retrieves the list of tuples (command, method) for this command handler
"""
return [("send", self.send),
("post", self.post)]
def send(self, io_handler, topic, **kwargs):
"""
Sends an event (blocking)
"""
self._events.send(topic, kwargs)
def post(self, io_handler, topic, **kwargs):
"""
Posts an event (asynchronous)
"""
self._events.post(topic, kwargs)
|
{
"content_hash": "3e131dcbef3a9a91443143309302cf29",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 80,
"avg_line_length": 27.689655172413794,
"alnum_prop": 0.6147779161477792,
"repo_name": "isandlaTech/cohorte-devtools",
"id": "5e2053e1df56fe40cd2c87385073c1aa4e209436",
"size": "2463",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "qualifier/deploy/cohorte-home/repo/pelix/shell/eventadmin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "151318"
},
{
"name": "HTML",
"bytes": "113064"
},
{
"name": "Java",
"bytes": "172793"
},
{
"name": "JavaScript",
"bytes": "2165497"
},
{
"name": "Python",
"bytes": "13926564"
},
{
"name": "Shell",
"bytes": "1490"
}
],
"symlink_target": ""
}
|
from mds.api.v1 import v2compatlib
from mds.mrs.models import SavedProcedure
from mds.core.models import Subject
__all__ = ['run']
def run():
subject = Subject.objects.get(uuid="26da580a-fc75-4f1a-92b2-be54e2865ceb")
sp = SavedProcedure.objects.get(guid="5bcf6e9d-73a3-48db-9359-e1f110d3019f")
encounter, observations = v2compatlib.sp_to_encounter(sp,subject)
print encounter
for obs in observations:
print obs
|
{
"content_hash": "6229e5e049f75f3ed3ea9d8c7f53fe6f",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 80,
"avg_line_length": 34,
"alnum_prop": 0.7352941176470589,
"repo_name": "dekatzenel/team-k",
"id": "c87fa6b6b71a054e0c9a1299fd0298433a91fe10",
"size": "442",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mds/api/v1/tests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
from __future__ import absolute_import
from traits.trait_value import *
|
{
"content_hash": "8512fbbf434e3268ea90dd8213c2908f",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 38,
"avg_line_length": 36,
"alnum_prop": 0.7777777777777778,
"repo_name": "enthought/etsproxy",
"id": "9f8e491c6c763e080b82cd5102efb7f933256517",
"size": "87",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "enthought/traits/trait_value.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "363714"
}
],
"symlink_target": ""
}
|
from __future__ import division
from future.builtins import zip
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import collections
from matplotlib import transforms
from matplotlib import ticker
__all__ = ['hinton']
# TODO: Add yutils.mpl._coll to mpltools and use that for square collection.
class SquareCollection(collections.RegularPolyCollection):
"""Return a collection of squares."""
def __init__(self, **kwargs):
super(SquareCollection, self).__init__(4, rotation=np.pi/4., **kwargs)
def get_transform(self):
"""Return transform scaling circle areas to data space."""
ax = self.axes
pts2pixels = 72.0 / ax.figure.dpi
scale_x = pts2pixels * ax.bbox.width / ax.viewLim.width
scale_y = pts2pixels * ax.bbox.height / ax.viewLim.height
return transforms.Affine2D().scale(scale_x, scale_y)
def hinton(inarray, max_value=None, use_default_ticks=True):
"""Plot Hinton diagram for visualizing the values of a 2D array.
Plot representation of an array with positive and negative values
represented by white and black squares, respectively. The size of each
square represents the magnitude of each value.
Unlike the hinton demo in the matplotlib gallery [1]_, this implementation
uses a RegularPolyCollection to draw squares, which is much more efficient
than drawing individual Rectangles.
.. note::
This function inverts the y-axis to match the origin for arrays.
.. [1] http://matplotlib.sourceforge.net/examples/api/hinton_demo.html
Parameters
----------
inarray : array
Array to plot.
max_value : float
Any *absolute* value larger than `max_value` will be represented by a
unit square.
use_default_ticks: boolean
Disable tick-generation and generate them outside this function.
"""
ax = plt.gca()
ax.set_axis_bgcolor('gray')
# make sure we're working with a numpy array, not a numpy matrix
inarray = np.asarray(inarray)
height, width = inarray.shape
if max_value is None:
max_value = 2**np.ceil(np.log(np.max(np.abs(inarray)))/np.log(2))
values = np.clip(inarray/max_value, -1, 1)
rows, cols = np.mgrid[:height, :width]
pos = np.where(values > 0)
neg = np.where(values < 0)
for idx, color in zip([pos, neg], ['white', 'black']):
if len(idx[0]) > 0:
xy = list(zip(cols[idx], rows[idx]))
circle_areas = np.pi / 2 * np.abs(values[idx])
squares = SquareCollection(sizes=circle_areas,
offsets=xy, transOffset=ax.transData,
facecolor=color, edgecolor=color)
ax.add_collection(squares, autolim=True)
ax.axis('scaled')
# set data limits instead of using xlim, ylim.
ax.set_xlim(-0.5, width-0.5)
ax.set_ylim(height-0.5, -0.5)
if use_default_ticks:
ax.xaxis.set_major_locator(IndexLocator())
ax.yaxis.set_major_locator(IndexLocator())
class IndexLocator(ticker.Locator):
def __init__(self, max_ticks=10):
self.max_ticks = max_ticks
def __call__(self):
"""Return the locations of the ticks."""
dmin, dmax = self.axis.get_data_interval()
if dmax < self.max_ticks:
step = 1
else:
step = np.ceil(dmax / self.max_ticks)
return self.raise_if_exceeds(np.arange(0, dmax, step))
|
{
"content_hash": "dbdbd90afb0bfde6b6f287da24011672",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 78,
"avg_line_length": 35.40816326530612,
"alnum_prop": 0.6389048991354467,
"repo_name": "matteoicardi/mpltools",
"id": "08c3a9565536ab5452e7ec5f65e8e1fb5b4112ab",
"size": "3470",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "mpltools/special/hinton.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "144"
},
{
"name": "Python",
"bytes": "73729"
}
],
"symlink_target": ""
}
|
import os
from django.utils import unittest
from django.test import RequestFactory, TestCase
from django.conf import settings
import django.template.context
from django.template import Template, Context, RequestContext
from django.template.response import (TemplateResponse, SimpleTemplateResponse,
ContentNotRenderedError)
def test_processor(request):
return {'processors': 'yes'}
test_processor_name = 'regressiontests.templates.response.test_processor'
# A test middleware that installs a temporary URLConf
class CustomURLConfMiddleware(object):
def process_request(self, request):
request.urlconf = 'regressiontests.templates.alternate_urls'
class BaseTemplateResponseTest(unittest.TestCase):
# tests rely on fact that global context
# processors should only work when RequestContext is used.
def setUp(self):
self.factory = RequestFactory()
self._old_processors = settings.TEMPLATE_CONTEXT_PROCESSORS
self._old_TEMPLATE_DIRS = settings.TEMPLATE_DIRS
settings.TEMPLATE_CONTEXT_PROCESSORS = [test_processor_name]
settings.TEMPLATE_DIRS = (
os.path.join(
os.path.dirname(__file__),
'templates'
),
)
# Force re-evaluation of the contex processor list
django.template.context._standard_context_processors = None
def tearDown(self):
settings.TEMPLATE_DIRS = self._old_TEMPLATE_DIRS
settings.TEMPLATE_CONTEXT_PROCESSORS = self._old_processors
# Force re-evaluation of the contex processor list
django.template.context._standard_context_processors = None
class SimpleTemplateResponseTest(BaseTemplateResponseTest):
def _response(self, template='foo', *args, **kwargs):
return SimpleTemplateResponse(Template(template), *args, **kwargs)
def test_template_resolving(self):
response = SimpleTemplateResponse('first/test.html')
response.render()
self.assertEqual('First template\n', response.content)
templates = ['foo.html', 'second/test.html', 'first/test.html']
response = SimpleTemplateResponse(templates)
response.render()
self.assertEqual('Second template\n', response.content)
response = self._response()
response.render()
self.assertEqual(response.content, 'foo')
def test_explicit_baking(self):
# explicit baking
response = self._response()
self.assertFalse(response.is_rendered)
response.render()
self.assertTrue(response.is_rendered)
def test_render(self):
# response is not re-rendered without the render call
response = self._response().render()
self.assertEqual(response.content, 'foo')
# rebaking doesn't change the rendered content
response.template_name = Template('bar{{ baz }}')
response.render()
self.assertEqual(response.content, 'foo')
# but rendered content can be overridden by manually
# setting content
response.content = 'bar'
self.assertEqual(response.content, 'bar')
def test_iteration_unrendered(self):
# unrendered response raises an exception on iteration
response = self._response()
self.assertFalse(response.is_rendered)
def iteration():
for x in response:
pass
self.assertRaises(ContentNotRenderedError, iteration)
self.assertFalse(response.is_rendered)
def test_iteration_rendered(self):
# iteration works for rendered responses
response = self._response().render()
res = [x for x in response]
self.assertEqual(res, ['foo'])
def test_content_access_unrendered(self):
# unrendered response raises an exception when content is accessed
response = self._response()
self.assertFalse(response.is_rendered)
self.assertRaises(ContentNotRenderedError, lambda: response.content)
self.assertFalse(response.is_rendered)
def test_content_access_rendered(self):
# rendered response content can be accessed
response = self._response().render()
self.assertEqual(response.content, 'foo')
def test_set_content(self):
# content can be overriden
response = self._response()
self.assertFalse(response.is_rendered)
response.content = 'spam'
self.assertTrue(response.is_rendered)
self.assertEqual(response.content, 'spam')
response.content = 'baz'
self.assertEqual(response.content, 'baz')
def test_dict_context(self):
response = self._response('{{ foo }}{{ processors }}',
{'foo': 'bar'})
self.assertEqual(response.context_data, {'foo': 'bar'})
response.render()
self.assertEqual(response.content, 'bar')
def test_context_instance(self):
response = self._response('{{ foo }}{{ processors }}',
Context({'foo': 'bar'}))
self.assertEqual(response.context_data.__class__, Context)
response.render()
self.assertEqual(response.content, 'bar')
def test_kwargs(self):
response = self._response(content_type = 'application/json', status=504)
self.assertEqual(response['content-type'], 'application/json')
self.assertEqual(response.status_code, 504)
def test_args(self):
response = SimpleTemplateResponse('', {}, 'application/json', 504)
self.assertEqual(response['content-type'], 'application/json')
self.assertEqual(response.status_code, 504)
class TemplateResponseTest(BaseTemplateResponseTest):
def _response(self, template='foo', *args, **kwargs):
return TemplateResponse(self.factory.get('/'), Template(template),
*args, **kwargs)
def test_render(self):
response = self._response('{{ foo }}{{ processors }}').render()
self.assertEqual(response.content, 'yes')
def test_render_with_requestcontext(self):
response = self._response('{{ foo }}{{ processors }}',
{'foo': 'bar'}).render()
self.assertEqual(response.content, 'baryes')
def test_render_with_context(self):
response = self._response('{{ foo }}{{ processors }}',
Context({'foo': 'bar'})).render()
self.assertEqual(response.content, 'bar')
def test_kwargs(self):
response = self._response(content_type = 'application/json',
status=504)
self.assertEqual(response['content-type'], 'application/json')
self.assertEqual(response.status_code, 504)
def test_args(self):
response = TemplateResponse(self.factory.get('/'), '', {},
'application/json', 504)
self.assertEqual(response['content-type'], 'application/json')
self.assertEqual(response.status_code, 504)
def test_custom_app(self):
response = self._response('{{ foo }}', current_app="foobar")
rc = response.resolve_context(response.context_data)
self.assertEqual(rc.current_app, 'foobar')
class CustomURLConfTest(TestCase):
urls = 'regressiontests.templates.urls'
def setUp(self):
self.old_MIDDLEWARE_CLASSES = settings.MIDDLEWARE_CLASSES
settings.MIDDLEWARE_CLASSES = list(settings.MIDDLEWARE_CLASSES) + [
'regressiontests.templates.response.CustomURLConfMiddleware'
]
def tearDown(self):
settings.MIDDLEWARE_CLASSES = self.old_MIDDLEWARE_CLASSES
def test_custom_urlconf(self):
response = self.client.get('/template_response_view/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, 'This is where you can find the snark: /snark/')
|
{
"content_hash": "adca60501856ca01d7cf35d16e6fc5b9",
"timestamp": "",
"source": "github",
"line_count": 208,
"max_line_length": 91,
"avg_line_length": 38.13461538461539,
"alnum_prop": 0.6433434190620272,
"repo_name": "heracek/django-nonrel",
"id": "f658b35ac38b309a5c7b61f62f565fa6242d3bc5",
"size": "7932",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/regressiontests/templates/response.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "99448"
},
{
"name": "Python",
"bytes": "6777170"
},
{
"name": "Shell",
"bytes": "3519"
}
],
"symlink_target": ""
}
|
import json
from json import JSONEncoder
import logging
import re
import importlib
import inspect
from ._token import Token
from ._token_kind import TokenKind
from ._version import VERSION
from ._diagnostic import Diagnostic
JSON_FIELDS = ["Name", "Version", "VersionString", "Navigation", "Tokens", "Diagnostics", "PackageName"]
HEADER_TEXT = "# Package is parsed using api-stub-generator(version:{})".format(VERSION)
TYPE_NAME_REGEX = re.compile("(~?[a-zA-Z\d._]+)")
TYPE_OR_SEPERATOR = " or "
# Lint warnings
SOURCE_LINK_NOT_AVAILABLE = "Source definition link is not available for [{0}]. Please check and ensure type is fully qualified name in docstring"
RETURN_TYPE_MISMATCH = "Return type in type hint is not matching return type in docstring"
class ApiView:
"""Entity class that holds API view for all namespaces within a package
:param NodeIndex: nodeindex
:param str: pkg_name
:param str: pkg_version
:param str: ver_string
"""
def __init__(self, nodeindex, pkg_name="", pkg_version="", namespace = ""):
self.Name = pkg_name
self.Version = 0
self.VersionString = ""
self.Language = "Python"
self.Tokens = []
self.Navigation = []
self.Diagnostics = []
self.indent = 0
self.namespace = namespace
self.nodeindex = nodeindex
self.PackageName = pkg_name
self.add_literal(HEADER_TEXT)
self.add_new_line(2)
def add_token(self, token):
self.Tokens.append(token)
def begin_group(self, group_name=""):
"""Begin a new group in API view by shifting to right
"""
self.indent += 1
def end_group(self):
"""End current group by moving indent to left
"""
if not self.indent:
raise ValueError("Invalid intendation")
self.indent -= 1
def add_whitespace(self):
if self.indent:
self.add_token(Token(" " * (self.indent * 4)))
def add_space(self):
self.add_token(Token(" ", TokenKind.Whitespace))
def add_new_line(self, additional_line_count=0):
self.add_token(Token("", TokenKind.Newline))
for n in range(additional_line_count):
self.add_space()
self.add_token(Token("", TokenKind.Newline))
def add_punctuation(self, value, prefix_space=False, postfix_space=False):
if prefix_space:
self.add_space()
self.add_token(Token(value, TokenKind.Punctuation))
if postfix_space:
self.add_space()
def add_line_marker(self, text):
token = Token("", TokenKind.LineIdMarker)
token.set_definition_id(text)
self.add_token(token)
def add_text(self, id, text):
token = Token(text, TokenKind.Text)
token.DefinitionId = id
self.add_token(token)
def add_keyword(self, keyword, prefix_space=False, postfix_space=False):
if prefix_space:
self.add_space()
self.add_token(Token(keyword, TokenKind.Keyword))
if postfix_space:
self.add_space()
def add_type(self, type_name, line_id=None):
# This method replace full qualified internal types to short name and generate tokens
if not type_name:
return
type_name = type_name.replace(":class:", "")
logging.debug("Processing type {}".format(type_name))
# Check if multiple types are listed with 'or' seperator
# Encode multiple types with or separator into Union
if TYPE_OR_SEPERATOR in type_name:
types = [t.strip() for t in type_name.split(TYPE_OR_SEPERATOR) if t != TYPE_OR_SEPERATOR]
# Make a Union of types if multiple types are present
type_name = "Union[{}]".format(", ".join(types))
self._add_type_token(type_name, line_id)
def _add_token_for_type_name(self, type_name, line_id = None):
logging.debug("Generating tokens for type name {}".format(type_name))
token = Token(type_name, TokenKind.TypeName)
type_full_name = type_name[1:] if type_name.startswith("~") else type_name
token.set_value(type_full_name.split(".")[-1])
navigate_to_id = self.nodeindex.get_id(type_full_name)
if navigate_to_id:
token.NavigateToId = navigate_to_id
elif type_name.startswith("~") and line_id:
# Check if type name is importable. If type name is incorrect in docstring then it wont be importable
# If type name is importable then it's a valid type name. Source link wont be available if type is from
# different package
if not is_valid_type_name(type_full_name):
# Navigation ID is missing for internal type, add diagnostic error
self.add_diagnostic(SOURCE_LINK_NOT_AVAILABLE.format(token.Value), line_id)
self.add_token(token)
def _add_type_token(self, type_name, line_id = None):
# parse to get individual type name
logging.debug("Generating tokens for type {}".format(type_name))
types = re.search(TYPE_NAME_REGEX, type_name)
if types:
# Generate token for the prefix before internal type
# process internal type
# process post fix of internal type recursively to find replace more internal types
parsed_type = types.groups()[0]
index = type_name.find(parsed_type)
prefix = type_name[:index]
if prefix:
self.add_punctuation(prefix)
# process parsed type name. internal or built in
self._add_token_for_type_name(parsed_type)
postfix = type_name[index + len(parsed_type):]
# process remaining string in type recursively
self._add_type_token(postfix, line_id)
else:
# This is required group ending punctuations
self.add_punctuation(type_name)
def add_diagnostic(self, text, line_id):
self.Diagnostics.append(Diagnostic(line_id, text))
def add_member(self, name, id):
token = Token(name, TokenKind.MemberName)
token.DefinitionId = id
self.add_token(token)
def add_stringliteral(self, value):
self.add_token(Token("\u0022{}\u0022".format(value), TokenKind.StringLiteral))
def add_literal(self, value):
self.add_token(Token(value, TokenKind.Literal))
def add_navigation(self, navigation):
self.Navigation.append(navigation)
class APIViewEncoder(JSONEncoder):
"""Encoder to generate json for APIview object
"""
def default(self, obj):
obj_dict = {}
if (
isinstance(obj, ApiView)
or isinstance(obj, Token)
or isinstance(obj, Navigation)
or isinstance(obj, NavigationTag)
or isinstance(obj, Diagnostic)
):
# Remove fields in APIview that are not required in json
if isinstance(obj, ApiView):
for key in JSON_FIELDS:
if key in obj.__dict__:
obj_dict[key] = obj.__dict__[key]
elif isinstance(obj, Token):
obj_dict = obj.__dict__
# Remove properties from serialization to reduce size if property is not set
if not obj.DefinitionId:
del obj_dict["DefinitionId"]
if not obj.NavigateToId:
del obj_dict["NavigateToId"]
elif isinstance(obj, Diagnostic):
obj_dict = obj.__dict__
if not obj.HelpLinkUri:
del obj_dict["HelpLinkUri"]
else:
obj_dict = obj.__dict__
return obj_dict
elif isinstance(obj, TokenKind) or isinstance(obj, Kind):
return obj.value # {"__enum__": obj.value}
else:
try:
JSONEncoder.default(self, obj)
except:
logging.error("Failed to serialize using default serialization for {}. Serializing using object dict.".format(obj))
return obj_dict
class NavigationTag:
def __init__(self, kind):
self.TypeKind = kind
class Kind:
type_class = "class"
type_enum = "enum"
type_method = "method"
type_module = "namespace"
type_package = "assembly"
class Navigation:
"""Navigation model to be added into tokens files. List of Navigation object represents the tree panel in tool"""
def __init__(self, text, nav_id):
self.Text = text
self.NavigationId = nav_id
self.ChildItems = []
self.Tags = None
def set_tag(self, tag):
self.Tags = tag
def add_child(self, child):
self.ChildItems.append(child)
def is_valid_type_name(type_name):
try:
module_end_index = type_name.rfind(".")
if module_end_index > 0:
module_name = type_name[:module_end_index]
class_name = type_name[module_end_index+1:]
mod = importlib.import_module(module_name)
return class_name in [x[0] for x in inspect.getmembers(mod)]
except:
logging.error("Failed to import {}".format(type_name))
return False
|
{
"content_hash": "d851df06d08421a261dbea0ee4843c33",
"timestamp": "",
"source": "github",
"line_count": 262,
"max_line_length": 146,
"avg_line_length": 35.37786259541985,
"alnum_prop": 0.6013593699428201,
"repo_name": "tg-msft/azure-sdk-tools",
"id": "cd2edf39c44edb6d8e8cd36457764d6ae18ff3e2",
"size": "9269",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python-packages/api-stub-generator/apistub/_apiview.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "816"
},
{
"name": "C#",
"bytes": "1358017"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "868"
},
{
"name": "Go",
"bytes": "35301"
},
{
"name": "HTML",
"bytes": "59599"
},
{
"name": "Java",
"bytes": "187077"
},
{
"name": "JavaScript",
"bytes": "9361"
},
{
"name": "PowerShell",
"bytes": "312580"
},
{
"name": "Python",
"bytes": "187175"
},
{
"name": "SCSS",
"bytes": "9152"
},
{
"name": "Shell",
"bytes": "8139"
},
{
"name": "TypeScript",
"bytes": "21823"
}
],
"symlink_target": ""
}
|
"""
Django settings for UCLGoProject project.
Generated by 'django-admin startproject' using Django 1.11.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
from decouple import config
from dj_database_url import parse as db_url
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
LOGIN_URL = '/admin/login/'
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret! Generate one with:
# import random
# ''.join(random.SystemRandom().choice('abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50))
SECRET_KEY = config('SECRET_KEY')
#Senha padrão para criação de participante:
SENHA_PADRAO_USER = config('SENHA_PADRAO_USER', default='0948753045#5@9586$#%*')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = config('DEBUG', default=False, cast=bool)
# AWS S3 backend used to serve static files:
AWS_S3 = config('AWS_S3', default=False, cast=bool)
ALLOWED_HOSTS = ['uclgo.sa-east-1.elasticbeanstalk.com',
'uclgo-test.sa-east-1.elasticbeanstalk.com',
'127.0.0.1', 'localhost',
'uclgo-test.agora.vix.br',
'uclgo.agora.vix.br']
# Application definition
INSTALLED_APPS = [
'admin_tools.menu',
'dal',
'dal_select2',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'storages',
'rest_framework',
'rest_framework_tracking',
'UCLGo.apps.UclgoConfig',
# 'debug_toolbar',
]
# INTERNAL_IPS = ['127.0.0.1',]
MIDDLEWARE = [
# 'debug_toolbar.middleware.DebugToolbarMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'UCLGoProject.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
# 'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
'loaders': [
# 'django.template.loaders.filesystem.load_template_source',
# 'django.template.loaders.app_directories.load_template_source',
'admin_tools.template_loaders.Loader',
'django.template.loaders.app_directories.Loader',
'django.template.loaders.filesystem.Loader',
],
},
},
]
WSGI_APPLICATION = 'UCLGoProject.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
# }
# }
DATABASES = {
'default': config('DATABASE_URL',
default='sqlite:///' + os.path.join(BASE_DIR, 'db.sqlite3'),
cast=db_url
),
}
if DATABASES['default']['ENGINE'] == 'django.db.backends.mysql':
DATABASES['default']['OPTIONS'] = {'init_command': "SET sql_mode='STRICT_TRANS_TABLES'",}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': (config('API_AUTH', default='rest_framework.permissions.AllowAny', cast=str),),
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework_jwt.authentication.JSONWebTokenAuthentication',
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.BasicAuthentication',
),
}
JWT_AUTH = {
'JWT_VERIFY_EXPIRATION': False,
}
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'pt-br'
# TIME_ZONE = 'UTC'
TIME_ZONE = 'America/Sao_Paulo'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# https://stackoverflow.com/questions/6418072/accessing-media-files-in-django
# https://timmyomahony.com/blog/static-vs-media-and-root-vs-path-in-django/
# MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# MEDIA_URL = '/media/'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
# STATIC_URL = '/static/'
# STATIC_ROOT = 'static'
if AWS_S3:
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
STATICFILES_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage'
AWS_STORAGE_BUCKET_NAME = config('AWS_STORAGE_BUCKET_NAME')
AWS_ACCESS_KEY_ID = config('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = config('AWS_SECRET_ACCESS_KEY')
AWS_S3_CUSTOM_DOMAIN = '%s.s3.amazonaws.com' % AWS_STORAGE_BUCKET_NAME
STATICFILES_LOCATION = 'static'
STATIC_URL = "https://%s/%s/" % (AWS_S3_CUSTOM_DOMAIN, STATICFILES_LOCATION)
MEDIAFILES_LOCATION = 'media'
MEDIA_URL = "https://%s/%s/" % (AWS_S3_CUSTOM_DOMAIN, MEDIAFILES_LOCATION)
STATICFILES_STORAGE = 'UCLGoProject.custom_storages.StaticStorage'
DEFAULT_FILE_STORAGE = 'UCLGoProject.custom_storages.MediaStorage'
else:
default_staticurl = '/static/'
default_mediaurl = '/media/'
STATIC_URL = config('STATIC_URL', default=default_staticurl)
STATIC_ROOT = os.path.join(BASE_DIR, "..", "static")
MEDIA_URL = config('MEDIA_URL', default=default_mediaurl)
MEDIA_ROOT = os.path.join(BASE_DIR, ".." ,"media")
ADMIN_TOOLS_MENU = 'UCLGoProject.menu.CustomMenu'
|
{
"content_hash": "195030243582023a51257171bfdd0942",
"timestamp": "",
"source": "github",
"line_count": 218,
"max_line_length": 113,
"avg_line_length": 30.844036697247706,
"alnum_prop": 0.6702855443188578,
"repo_name": "ribeiro-ucl/UCLGoProject",
"id": "e01ffc741cda0c71c12f786a5bd2b03b05c0dc4d",
"size": "6727",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "UCLGoProject/settings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "83351"
},
{
"name": "HTML",
"bytes": "22973"
},
{
"name": "JavaScript",
"bytes": "113614"
},
{
"name": "Python",
"bytes": "103832"
}
],
"symlink_target": ""
}
|
import autoinstall_lib as atl
from waflib import Logs
import os.path as osp
from waflib import Logs
from waflib import Context
from waflib import Errors
import sys
import waflib
version = "lapack-3.3.1"
tool = "lapack-3.3.1"
lapack_funcs = "dtrsv dpotrf dpotrs dpotri dtrtri dtrmm dtrmv dgeqrf dormqr dsyev dgesvd dsymv dgemv dgemm dsyrk dsyr2k daxpy dtrsm dsymm dsyr ddot"
def options(ctx):
atl.add_lib_option("lapack",ctx,install=True)
grp = ctx.parser.get_option_group("--lapack_install")
grp.add_option("--lapack_mkl",action="store",default="",help="if lapack is mkl, location of the mkl install")
grp.add_option("--lapack_mkl_version",action="store",default="",help="version of the mkl lib (should be 10.3, 10.2, 10.1 or 10.0)")
grp.add_option("--lapack_apple",action="store_true",default=False,help="use apple version of blas/lapack")
def do_include(ctx,ptrn="%s_"):
f=open(osp.join(ctx.env.PREFIX,"include/lapack_clik.h"),"w")
for fnc in lapack_funcs.split():
print >>f,("#define %s "+ptrn)%(fnc,fnc)
print >>f,extra_inc
f.close()
def configure(ctx):
#always assume that I need a dedicated include file.
if ctx.options.lapack_apple:
ctx.start_msg("Check apple lapack")
if sys.platform.lower()!="darwin":
ctx.end_msg("not on darwin ! Got '%s'"%sys.platform,color="YELLOW")
raise Errors.WafError("cannot find apple lapack")
ctx.end_msg("ok")
lapack_extradefs = ["HAS_LAPACK"]
lapack_libs = ["BLAS","LAPACK"]
lapack_includes = ["lapack_clik.h"]
lapack_extradefs += ["LAPACK_CLIK"]
ctx.options.lapack_include = osp.join(ctx.env.PREFIX,"include")
ctx.options.lapack_lib = "/System/Library/Frameworks/Accelerate.framework/Versions/Current/Frameworks/vecLib.framework/Versions/Current"
do_include(ctx,"%s_")
elif ctx.options.lapack_mkl:
# parse version
ctx.start_msg("Check mkl version")
if ctx.options.lapack_mkl_version.strip()[:4] not in ("10.0","10.1","10.2","10.3"):
ctx.end_msg(ctx.options.lapack_mkl_version.strip(),color="YELLOW")
raise Errors.WafError("Cannot understand mkl version '%s'"%ctx.options.lapack_mkl_version.strip())
version = int(ctx.options.lapack_mkl_version.strip()[:4].split(".")[1])
ctx.end_msg("10.%d"%version)
lapack_extradefs = ["HAS_LAPACK"]
lapack_extradefs += ["HAS_MKL"]
lapack_includes = ["mkl_lapack.h","mkl_blas.h"]
lapack_libs = []
tag = sys.platform.lower()
if tag=="darwin":
pass
elif "linux" in tag:
tag="linux"
else:
raise Errors.WafError("unknown platform '%s'"%tag)
tag+="_10.%d"%version
mopt = ctx.env.mopt
if "64" in mopt:
tag+="_64"
else:
tag +="_32"
if sys.platform.lower()!='darwin':
#I need to create my own lapack !
cmdline = """gcc -shared -Bdynamic %(func_list)s -Wl,--start-group %(ars)s -Wl,--end-group %(Lomp)s %(omp)s -o "%(res)s" """
cmdlist = {}
cmdlist["func_list"] = " ".join(["-u %s_"%v for v in lapack_funcs.split()])
cmdlist["ars"] = " ".join([osp.join(mkl_options[tag][0]%(ctx.options.lapack_mkl),"lib%s.a"%v.strip()) for v in mkl_options[tag][1].split("-l") if v.strip() and v.strip()[:3]=="mkl"])
cmdlist["Lomp"] = " ".join("-L%s"%v.strip() for v in ctx.env.LIBPATH_fc_runtime if v.strip())
cmdlist["omp"] = " ".join([v.strip() for v in mkl_options[tag][1].split() if v.strip() and "mkl" not in v])
cmdlist["res"] = osp.join(ctx.env.LIBDIR,ctx.env.cshlib_PATTERN%"clik_mkl")
cmdline = cmdline%cmdlist
#print cmdline
ctx.start_msg("create specific mkl lib")
llgo,llge = ctx.cmd_and_log(cmdline, output=waflib.Context.BOTH)
#print llgo
#print llge
ctx.end_msg(cmdlist["res"])
ctx.options.lapack_link = "-lclik_mkl "+cmdlist["omp"]
ctx.options.lapack_lib = ctx.env.LIBDIR+":".join([""]+ctx.env.LIBPATH_fc_runtime)
ctx.options.lapack_include = ctx.options.lapack_mkl+"/include"
else:
ctx.options.lapack_link = mkl_options[tag][1]
ctx.options.lapack_lib = mkl_options[tag][0]%(ctx.options.lapack_mkl)+":".join([""]+ctx.env.LIBPATH_fc_runtime)
if "framework" in ctx.options.lapack_mkl.lower():
ctx.options.lapack_include = ctx.options.lapack_mkl+"/Headers"
else:
ctx.options.lapack_include = ctx.options.lapack_mkl+"/include"
#try:
# atl.conf_lib(ctx,"lapack",lapack_libs,lapack_funcs.split(),lapack_includes,defines=lapack_extradefs,install=installlapack)
#except Exception,e:
# pass
#lapack_extradefs = ["HAS_LAPACK"]
#lapack_libs = ["BLAS","LAPACK"]
#lapack_includes = ["lapack.h","blas.h"]
#if "mkl" in ctx.options.lapack_lib.lower() or "mkl" in ctx.options.lapack_include.lower() or "mkl" in ctx.options.lapack_link or ctx.options.lapack_mkl:
# ctx.env.mkl = True
# lapack_extradefs += ["HAS_MKL"]
# lapack_includes = ["mkl_lapack.h","mkl_blas.h"]
# if ctx.options.lapack_mkl:
# if ctx.env.has_ifort==False:
# raise Exception("cannot use MKL without ifort")
# if "framework" in ctx.options.lapack_mkl.lower():
# # guess we are on macosx
# # get the path of the framework
# if ctx.options.lapack_mkl[-1] == "/":
# fpath,fname = osp.split(ctx.options.lapack_mkl[:-1])
# else:
# fpath,fname = osp.split(ctx.options.lapack_mkl)
# fname = fname.split(".")[0]
# ctx.options.lapack_include = ctx.options.lapack_mkl+"/Headers"
# ctx.options.lapack_lib = ctx.options.lapack_mkl+"/Libraries/universal"
# if ctx.options.lapack_link=="":
# ctx.options.lapack_link = "-lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core"
# else:
# # assume it's 10 on linux
# # check whether it's 10.3
# if ctx.options.m32:
# libsuffix="/lib/32"
# libdep = "-lmkl_intel"
# else:
# libsuffix="/lib/em64t"
# libdep = "-lmkl_intel_lp64"
# if ctx.options.lapack_link=="":
# ctx.options.lapack_link = "-lmkl_lapack -lmkl_intel_thread -lmkl_core -liomp5 -lm -lpthread -lmkl_def" + libdep
# if not ctx.options.m32 and osp.exists(ctx.options.lapack_mkl+"/lib/intel64"):
# libsuffix="/lib/intel64"
# ctx.options.lapack_link = "-lmkl_intel_thread -lmkl_core -liomp5 -lm -lpthread -lmkl_def" + libdep
# ctx.options.lapack_include=ctx.options.lapack_mkl+"/include"
# ctx.options.lapack_lib=ctx.options.lapack_mkl+libsuffix+":".join([""]+ctx.env.LIBPATH_fc_runtime)
elif atl.upgrade(ctx,"lapack") or ctx.options.lapack_islocal or ctx.options.lapack_forceinstall or atl.shouldIinstall_all(ctx,"lapack"):
ctx.env.append_value("LIBPATH_lapack",ctx.env.LIBPATH_fc_runtime)
ctx.env.append_value("RPATH_lapack",ctx.env.RPATH_fc_runtime)
ctx.env.append_value("LIB_lapack",ctx.env.LIB_fc_runtime)
lapack_libs = ["lapack_clik","blas_clik"]
lapack_includes = ["lapack_clik.h"]
lapack_extradefs = ["HAS_LAPACK"]
lapack_extradefs += ["LAPACK_CLIK"]
else:
lapack_libs = []
lapack_includes = ["lapack_clik.h"]
lapack_extradefs = ["HAS_LAPACK"]
lapack_extradefs += ["LAPACK_CLIK"]
do_include(ctx)
atl.conf_lib(ctx,"lapack",lapack_libs,lapack_funcs.split(),lapack_includes,defines=lapack_extradefs,install=installlapack)
def installlapack(ctx):
filen = version+".tgz"
atl.installsmthg_pre(ctx,"http://www.netlib.org/lapack/"+filen,filen)
from waflib import Utils,Errors
dii = {"FCC":ctx.env.FC,"FCFLAGS":" ".join(ctx.env.FCFLAGS+ctx.env.FCFLAGS_fcshlib),"FLINKFLAGS":" ".join(ctx.env.FCFLAGS+ctx.env.LINKFLAGS_fcshlib),"SO":ctx.env.shsuffix,"MFLAG":" ".join(ctx.env.FCFLAGS) }
Logs.pprint("PINK","build blas")
f=open("build/%s/make.inc"%version,"w")
print >>f,make_inc_blas%dii
f.close()
cmdline = "cd build/%s; make blaslib"%version
if ctx.exec_command(cmdline)!=0:
raise Errors.WafError("Cannot build %s"%version)
Logs.pprint("PINK","build lapack")
f=open("build/%s/make.inc"%version,"w")
print >>f,make_inc_lapack%dii
f.close()
cmdline = "cd build/%s; make lapacklib"%version
if ctx.exec_command(cmdline)!=0:
raise Errors.WafError("Cannot build %s"%version)
import shutil
shutil.copyfile("build/%s/liblapack_clik.%s"%(version,ctx.env.shsuffix), osp.join(ctx.env.LIBDIR,"liblapack_clik.%s"%ctx.env.shsuffix))
shutil.copyfile("build/%s/libblas_clik.%s"%(version,ctx.env.shsuffix), osp.join(ctx.env.LIBDIR,"libblas_clik.%s"%ctx.env.shsuffix))
do_include(ctx)
make_inc_lapack="""
SHELL = /bin/sh
FORTRAN = %(FCC)s %(FCFLAGS)s
OPTS =
DRVOPTS = $(OPTS)
NOOPT = -g -O0
TIMER = INT_CPU_TIME
LOADER = %(FCC)s
LOADOPTS = %(MFLAG)s
BLASLIB = ../../libblas_clik.%(SO)s
ARCH = %(FCC)s
ARCHFLAGS = %(FLINKFLAGS)s -L../ -lblas_clik -o
RANLIB = echo
LAPACKLIB = liblapack_clik.%(SO)s
"""
make_inc_blas="""
SHELL = /bin/sh
FORTRAN = %(FCC)s %(FCFLAGS)s
OPTS =
DRVOPTS = $(OPTS)
NOOPT = -g -O0
TIMER = INT_CPU_TIME
BLASLIB = ../../libblas_clik.%(SO)s
ARCH = %(FCC)s
ARCHFLAGS = %(FLINKFLAGS)s -o
RANLIB = echo
LAPACKLIB = liblapack_clik.%(SO)s
"""
extra_inc = """
void dtrsv(const char *uplo, const char *trans, const char *diag, const int *n,
const double *a, const int *lda, double *x, const int *incx);
void dpotrf( char* uplo, int * n, double* a, int * lda, int * info );
void dpotri( char* uplo, int * n, double* a, int * lda, int * info );
void dgemv(const char *trans, const int *m, const int *n, const double *alpha,
const double *a, const int *lda, const double *x, const int *incx,
const double *beta, double *y, const int *incy);
void dsyrk(const char *uplo, const char *trans, const int *n, const int *k,
const double *alpha, const double *a, const int *lda, const double *beta,
double *c, const int *ldc);
void dsyr2k(const char *uplo, const char *trans, const int *n, const int *k,
const double *alpha, const double *a, const int *lda, const double *b, const int *ldb,
const double *beta, double *c, const int *ldc);
void dgesvd( char* jobu, char* jobvt, int * m, int * n, double* a, int * lda, double* s, double* u, int * ldu, double* vt, int * ldvt, double* work, int * lwork, int * info );
void dgemm(const char *transa, const char *transb, const int *m, const int *n, const int *k,
const double *alpha, const double *a, const int *lda, const double *b, const int *ldb,
const double *beta, double *c, const int *ldc);
void dtrtri( char* uplo, char* diag, int * n, double* a, int * lda, int * info );
void dtrmm(const char *side, const char *uplo, const char *transa, const char *diag,
const int *m, const int *n, const double *alpha, const double *a, const int *lda,
double *b, const int *ldb);
void dtrmv(const char *uplo, const char *transa, const char *diag, const int *n,
const double *a, const int *lda, double *b, const int *incx);
void dgeqrf( int * m, int * n, double* a, int * lda, double* tau, double* work, int * lwork, int * info );
void dormqr( char* side, char* trans, int * m, int * n, int * k, double* a, int * lda, double* tau, double* c, int * ldc, double* work, int * lwork, int * info );
void dsyev( char* jobz, char* uplo, int * n, double* a, int * lda, double* w, double* work, int * lwork, int * info );
void dsymv(const char *uplo, const int *n, const double *alpha, const double *a, const int *lda,
const double *x, const int *incx, const double *beta, double *y, const int *incy);
void daxpy(const int *n, const double *alpha, const double *x, const int *incx, double *y, const int *incy);
void dtrsm(const char *side, const char *uplo, const char *transa, const char *diag,
const int *m, const int *n, const double *alpha, const double *a, const int *lda,
double *b, const int *ldb);
void dsyr(const char *uplo, const int *n, const double *alpha, const double *x, const int *incx,
double *a, const int *lda);
void dsymm(const char *side, const char *uplo, const int *m, const int *n,
const double *alpha, const double *a, const int *lda, const double *b, const int *ldb,
const double *beta, double *c, const int *ldc);
double ddot(int* N,double *DX, int* INCX,double *DY,int* INCY);
void dpotrs(char* UPLO,int * N,int * NRHS,double* A,int* LDA,double* B,int* LDB,double* INFO );
"""
mkl_options = {
"darwin_10.3_64":("%s/lib","-lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lm"),
"darwin_10.2_64":("%s/lib/em64t","-lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lm"),
"darwin_10.1_64":("%s/lib/em64t","-lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lm"),
"darwin_10.0_64":("%s/lib/em64t","-lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lm"),
"linux_10.0_64" :("%s/lib/em64t","-lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lm"),
"linux_10.1_64" :("%s/lib/em64t","-lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lm"),
"linux_10.0_32" :("%s/lib/32","-lmkl_intel -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lm"),
"linux_10.1_32" :("%s/lib/32","-lmkl_intel -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lm"),
"linux_10.2_32" :("%s/lib/32","-lmkl_intel -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lm"),
"linux_10.2_64" :("%s/lib/em64t"," -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lm"),
"linux_10.3_64" :("%s/lib/intel64"," -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lm"),
"linux_10.3_32" :("%s/lib/ia32"," -lmkl_intel -lmkl_intel_thread -lmkl_core -liomp5 -lpthread -lm"),
}
|
{
"content_hash": "e817534221d837ffa48bd66dfc36d43c",
"timestamp": "",
"source": "github",
"line_count": 275,
"max_line_length": 208,
"avg_line_length": 49.916363636363634,
"alnum_prop": 0.6431849639396809,
"repo_name": "baudren/montepython_public",
"id": "d087317d5d519ca34170ea39d5cff747b3d9cb00",
"size": "13767",
"binary": false,
"copies": "2",
"ref": "refs/heads/2.2",
"path": "wrapper_wmap/waf_tools/any_lapack.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "5106"
},
{
"name": "C",
"bytes": "83026"
},
{
"name": "Fortran",
"bytes": "2316"
},
{
"name": "Gnuplot",
"bytes": "3263"
},
{
"name": "Makefile",
"bytes": "6379"
},
{
"name": "Python",
"bytes": "639114"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, print_function, unicode_literals
import logging
import os
import sys
import six
from . import config, django, install
def execute():
# Log info and above to console
logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
config_data = config.parse(sys.argv[1:])
try:
if config_data.plugins:
config.show_plugins()
elif config_data.dump_reqs:
config.show_requirements(config_data)
else:
sys.stdout.write('Creating the project\n'
'Please wait while I install dependencies\n')
if not config_data.no_deps:
if config_data.requirements_file:
install.requirements(
config_data.requirements_file, config_data.pip_options, True
)
else:
install.requirements(
config_data.requirements, config_data.pip_options
)
sys.stdout.write('Dependencies installed\nCreating the project')
install.check_install(config_data)
django.create_project(config_data)
django.patch_settings(config_data)
django.copy_files(config_data)
if not config_data.no_sync:
django.setup_database(config_data)
if config_data.starting_page:
django.load_starting_page(config_data)
if not config_data.requirements_file:
install.write_requirements(config_data)
if config_data.aldryn: # pragma: no cover
sys.stdout.write('Project created!\n')
sys.stdout.write('aldryn boilerplate requires action before '
'you can actually run the project.\n'
'See documentation at '
'http://aldryn-boilerplate.readthedocs.org/'
'for more information.\n')
else:
sys.stdout.write('All done!\n')
sys.stdout.write('Get into "%s" directory and type '
'"python manage.py runserver" to start your '
'project\n' % os.path.abspath(config_data.project_directory))
except Exception as e:
# Clean up your own mess
install.cleanup_directory(config_data)
if six.PY3:
tb = sys.exc_info()[2]
raise EnvironmentError('%s\nDocumentation available at '
'http://djangocms-installer.rtfd.org\n' % e).with_traceback(tb)
else:
raise
|
{
"content_hash": "2be116e2b96cfa7d917acbb78cebf2d9",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 98,
"avg_line_length": 41.90769230769231,
"alnum_prop": 0.5436857562408223,
"repo_name": "Glasgow2015/team-10",
"id": "b9fca0c3acb99e51f46dce41c54e9c3f930531ce",
"size": "2748",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "env/lib/python2.7/site-packages/djangocms_installer/main.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "562501"
},
{
"name": "HTML",
"bytes": "458748"
},
{
"name": "JavaScript",
"bytes": "786940"
},
{
"name": "PHP",
"bytes": "5453"
},
{
"name": "Python",
"bytes": "12350526"
},
{
"name": "Ruby",
"bytes": "990"
},
{
"name": "Shell",
"bytes": "4232"
},
{
"name": "XSLT",
"bytes": "5122"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from setuptools import setup, find_packages
install_requires = [
"Jinja2",
"boto>=2.20.0",
"flask",
"httpretty>=0.6.1",
"requests",
"xmltodict",
"six",
"werkzeug",
]
extras_require = {
# No builtin OrderedDict before 2.7
':python_version=="2.6"': ['ordereddict'],
}
setup(
name='moto',
version='0.4.18',
description='A library that allows your python tests to easily'
' mock out the boto library',
author='Steve Pulec',
author_email='spulec@gmail.com',
url='https://github.com/spulec/moto',
entry_points={
'console_scripts': [
'moto_server = moto.server:main',
],
},
packages=find_packages(exclude=("tests", "tests.*")),
install_requires=install_requires,
extras_require=extras_require,
license="Apache",
test_suite="tests",
classifiers=[
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"License :: OSI Approved :: Apache Software License",
"Topic :: Software Development :: Testing",
],
)
|
{
"content_hash": "6775d5bf112f4229a78a5d45cdcf8de3",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 67,
"avg_line_length": 27.361702127659573,
"alnum_prop": 0.588646967340591,
"repo_name": "2mf/moto",
"id": "a65f5e15ed9d1fb9846d30e2914d602c1ff2a19a",
"size": "1308",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "203"
},
{
"name": "Python",
"bytes": "1548032"
}
],
"symlink_target": ""
}
|
import option
class Quit(option.Option):
"""
grape q
Quits grape.
Usage: grape-q
"""
def __init__(self):
super(Quit, self).__init__()
self._key = "q"
self._section = "Other"
def description(self):
return "Quit."
def execute(self, args):
return True
def setDefaultConfig(self, config):
pass
|
{
"content_hash": "3e6993b9c8aae65f8c4516200171fdaf",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 39,
"avg_line_length": 15.958333333333334,
"alnum_prop": 0.5248041775456919,
"repo_name": "robinson96/GRAPE",
"id": "11621c6a9d75d97dddfd733067e774a70c622e99",
"size": "383",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "vine/quit.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "5564"
},
{
"name": "Python",
"bytes": "1374602"
},
{
"name": "Shell",
"bytes": "24573"
}
],
"symlink_target": ""
}
|
import os
import cv2
import numpy
def getImageFromData():
dataDir = os.path.join(os.path.dirname(__file__), '..', 'data')
files = os.listdir(dataDir)
for f in files:
if f.endswith('.jpg') or f.endswith('.png'):
return os.path.join(dataDir, f)
return None
def showImage(image, wait=0):
cv2.imshow('EbookChecker', image)
cv2.waitKey(wait)
class Range:
def __init__(self, start=-1, end=-1):
self.start = start
self.end = end
def findSameValueHorizontal(src):
assert (src.dtype == numpy.int32)
ranges = []
(rows, cols) = src.shape
srcLine = src[rows - 1]
r = Range()
for i in range(1, cols):
sameValue = srcLine[i] == srcLine[i - 1]
if sameValue and r.start < 0:
r.start = i - 1
elif not sameValue and r.start >= 0:
r.end = i - 1
ranges.append(r)
r = Range()
if r.start >= 0 and r.end < 0:
r.end = cols - 1
ranges.append(r)
return ranges
def findSameValueVertical(src):
assert (src.dtype == numpy.int32)
ranges = []
r = Range()
(rows, cols) = src.shape
endPos = cols - 1
src0 = src[0, endPos]
for i in range(1, rows):
src1 = src[i, endPos]
sameValue = src0 == src1
if sameValue and r.start < 0:
r.start = i - 1
elif not sameValue and r.start >= 0:
r.end = i - 1
ranges.append(r)
r = Range()
src0 = src1
if r.start >= 0 and r.end < 0:
r.end = rows - 1
return ranges
if __name__ == '__main__':
fname = getImageFromData()
# 画像の読み込み
image = cv2.imread(fname, cv2.IMREAD_GRAYSCALE)
assert image.dtype == numpy.uint8 and len(image.shape) == 2
showImage(image)
# 二値化
binaryMax = 1
binaryThreshold = 128
(ret, binary) = cv2.threshold(image, binaryThreshold, binaryMax, cv2.THRESH_BINARY_INV)
assert len(binary.shape) == 2 and binary.dtype == numpy.uint8
showImage(binary)
cv2.imwrite('binary.jpg', binary)
# 積分画像の生成
integral = cv2.integral(binary)
assert len(integral.shape) == 2 and integral.dtype == numpy.int32
showImage(integral)
cv2.imwrite('integral.jpg', integral)
# 積分画像を見やすくする処理
(min, max, minLoc, maxLoc) = cv2.minMaxLoc(integral)
assert min == 0, 'min must be 0'
integralVisible = (integral * 255 / max).astype(numpy.uint8)
showImage(integralVisible)
cv2.imwrite('integralVisible.jpg', integralVisible)
# 横方向
horizontalRanges = findSameValueHorizontal(integral)
horizontalRangeDst = cv2.merge([image, image, image])
for r in horizontalRanges:
horizontalRangeDst[:, r.start:r.end, :] = (240, 176, 0)
showImage(horizontalRangeDst)
cv2.imwrite('horizontalDst.jpg', horizontalRangeDst)
# 縦方向
verticalRanges = findSameValueVertical(integral)
verticalRangeDst = cv2.merge([image, image, image])
for r in verticalRanges:
verticalRangeDst[r.start:r.end, :, :] = (0, 0, 255)
showImage(verticalRangeDst)
cv2.imwrite('verticalDst.jpg', verticalRangeDst)
# 横方向の結果と縦方向の結果を合わせる
for r in verticalRanges:
horizontalRangeDst[r.start:r.end, :, :] = (0, 0, 255)
showImage(horizontalRangeDst)
cv2.imwrite('horizontalVerticalDst.jpg', horizontalRangeDst)
|
{
"content_hash": "e236208ad93a9f361309c583d1dbfcbb",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 91,
"avg_line_length": 23.4375,
"alnum_prop": 0.6038518518518519,
"repo_name": "kyokushin/EBookChecker",
"id": "177cebcaab8601d60bfa3e78bca8efcdf38d27bb",
"size": "3508",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/main.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "176"
},
{
"name": "C++",
"bytes": "23581"
},
{
"name": "CMake",
"bytes": "2438"
},
{
"name": "Python",
"bytes": "3508"
}
],
"symlink_target": ""
}
|
from oslo.config import cfg
import taskflow.engines
from taskflow.patterns import linear_flow
from taskflow.utils import misc
from cinder import exception
from cinder import flow_utils
from cinder.openstack.common import log as logging
from cinder.openstack.common import timeutils
from cinder import policy
from cinder import quota
from cinder import units
from cinder import utils
from cinder.volume.flows import common
from cinder.volume import volume_types
LOG = logging.getLogger(__name__)
ACTION = 'volume:create'
CONF = cfg.CONF
GB = units.GiB
QUOTAS = quota.QUOTAS
# Only in these 'sources' status can we attempt to create a volume from a
# source volume or a source snapshot, other status states we can not create
# from, 'error' being the common example.
SNAPSHOT_PROCEED_STATUS = ('available',)
SRC_VOL_PROCEED_STATUS = ('available', 'in-use',)
class ExtractVolumeRequestTask(flow_utils.CinderTask):
"""Processes an api request values into a validated set of values.
This tasks responsibility is to take in a set of inputs that will form
a potential volume request and validates those values against a set of
conditions and/or translates those values into a valid set and then returns
the validated/translated values for use by other tasks.
Reversion strategy: N/A
"""
# This task will produce the following outputs (said outputs can be
# saved to durable storage in the future so that the flow can be
# reconstructed elsewhere and continued).
default_provides = set(['availability_zone', 'size', 'snapshot_id',
'source_volid', 'volume_type', 'volume_type_id',
'encryption_key_id'])
def __init__(self, image_service, az_check_functor=None, **kwargs):
super(ExtractVolumeRequestTask, self).__init__(addons=[ACTION],
**kwargs)
self.image_service = image_service
self.az_check_functor = az_check_functor
if not self.az_check_functor:
self.az_check_functor = lambda az: True
@staticmethod
def _extract_snapshot(snapshot):
"""Extracts the snapshot id from the provided snapshot (if provided).
This function validates the input snapshot dict and checks that the
status of that snapshot is valid for creating a volume from.
"""
snapshot_id = None
if snapshot is not None:
if snapshot['status'] not in SNAPSHOT_PROCEED_STATUS:
msg = _("Originating snapshot status must be one"
" of %s values")
msg = msg % (", ".join(SNAPSHOT_PROCEED_STATUS))
# TODO(harlowja): what happens if the status changes after this
# initial snapshot status check occurs??? Seems like someone
# could delete the snapshot after this check passes but before
# the volume is officially created?
raise exception.InvalidSnapshot(reason=msg)
snapshot_id = snapshot['id']
return snapshot_id
@staticmethod
def _extract_source_volume(source_volume):
"""Extracts the volume id from the provided volume (if provided).
This function validates the input source_volume dict and checks that
the status of that source_volume is valid for creating a volume from.
"""
source_volid = None
if source_volume is not None:
if source_volume['status'] not in SRC_VOL_PROCEED_STATUS:
msg = _("Unable to create a volume from an originating source"
" volume when its status is not one of %s"
" values")
msg = msg % (", ".join(SRC_VOL_PROCEED_STATUS))
# TODO(harlowja): what happens if the status changes after this
# initial volume status check occurs??? Seems like someone
# could delete the volume after this check passes but before
# the volume is officially created?
raise exception.InvalidVolume(reason=msg)
source_volid = source_volume['id']
return source_volid
@staticmethod
def _extract_size(size, source_volume, snapshot):
"""Extracts and validates the volume size.
This function will validate or when not provided fill in the provided
size variable from the source_volume or snapshot and then does
validation on the size that is found and returns said validated size.
"""
def validate_snap_size(size):
if snapshot and size < snapshot['volume_size']:
msg = _("Volume size %(size)sGB cannot be smaller than"
" the snapshot size %(snap_size)sGB. "
"They must be >= original snapshot size.")
msg = msg % {'size': size,
'snap_size': snapshot['volume_size']}
raise exception.InvalidInput(reason=msg)
def validate_source_size(size):
if source_volume and size < source_volume['size']:
msg = _("Volume size %(size)sGB cannot be smaller than "
"original volume size %(source_size)sGB. "
"They must be >= original volume size.")
msg = msg % {'size': size,
'source_size': source_volume['size']}
raise exception.InvalidInput(reason=msg)
def validate_int(size):
if not isinstance(size, int) or size <= 0:
msg = _("Volume size %(size)s must be an integer and"
" greater than 0") % {'size': size}
raise exception.InvalidInput(reason=msg)
# Figure out which validation functions we should be applying
# on the size value that we extract.
validator_functors = [validate_int]
if source_volume:
validator_functors.append(validate_source_size)
elif snapshot:
validator_functors.append(validate_snap_size)
# If the size is not provided then try to provide it.
if not size and source_volume:
size = source_volume['size']
elif not size and snapshot:
size = snapshot['volume_size']
size = utils.as_int(size)
LOG.debug("Validating volume %(size)s using %(functors)s" %
{'size': size,
'functors': ", ".join([common.make_pretty_name(func)
for func in validator_functors])})
for func in validator_functors:
func(size)
return size
def _check_image_metadata(self, context, image_id, size):
"""Checks image existence and validates that the image metadata."""
# Check image existence
if image_id is None:
return
# NOTE(harlowja): this should raise an error if the image does not
# exist, this is expected as it signals that the image_id is missing.
image_meta = self.image_service.show(context, image_id)
# Check image size is not larger than volume size.
image_size = utils.as_int(image_meta['size'], quiet=False)
image_size_in_gb = (image_size + GB - 1) / GB
if image_size_in_gb > size:
msg = _('Size of specified image %(image_size)sGB'
' is larger than volume size %(volume_size)sGB.')
msg = msg % {'image_size': image_size_in_gb, 'volume_size': size}
raise exception.InvalidInput(reason=msg)
# Check image min_disk requirement is met for the particular volume
min_disk = image_meta.get('min_disk', 0)
if size < min_disk:
msg = _('Volume size %(volume_size)sGB cannot be smaller'
' than the image minDisk size %(min_disk)sGB.')
msg = msg % {'volume_size': size, 'min_disk': min_disk}
raise exception.InvalidInput(reason=msg)
@staticmethod
def _check_metadata_properties(metadata=None):
"""Checks that the volume metadata properties are valid."""
if not metadata:
metadata = {}
for (k, v) in metadata.iteritems():
if len(k) == 0:
msg = _("Metadata property key blank")
LOG.warn(msg)
raise exception.InvalidVolumeMetadata(reason=msg)
if len(k) > 255:
msg = _("Metadata property key %s greater than 255 "
"characters") % k
LOG.warn(msg)
raise exception.InvalidVolumeMetadataSize(reason=msg)
if len(v) > 255:
msg = _("Metadata property key %s value greater than"
" 255 characters") % k
LOG.warn(msg)
raise exception.InvalidVolumeMetadataSize(reason=msg)
def _extract_availability_zone(self, availability_zone, snapshot,
source_volume):
"""Extracts and returns a validated availability zone.
This function will extract the availability zone (if not provided) from
the snapshot or source_volume and then performs a set of validation
checks on the provided or extracted availability zone and then returns
the validated availability zone.
"""
# Try to extract the availability zone from the corresponding snapshot
# or source volume if either is valid so that we can be in the same
# availability zone as the source.
if availability_zone is None:
if snapshot:
try:
availability_zone = snapshot['volume']['availability_zone']
except (TypeError, KeyError):
pass
if source_volume and availability_zone is None:
try:
availability_zone = source_volume['availability_zone']
except (TypeError, KeyError):
pass
if availability_zone is None:
if CONF.default_availability_zone:
availability_zone = CONF.default_availability_zone
else:
# For backwards compatibility use the storage_availability_zone
availability_zone = CONF.storage_availability_zone
if not self.az_check_functor(availability_zone):
msg = _("Availability zone '%s' is invalid") % (availability_zone)
LOG.warn(msg)
raise exception.InvalidInput(reason=msg)
# If the configuration only allows cloning to the same availability
# zone then we need to enforce that.
if CONF.cloned_volume_same_az:
snap_az = None
try:
snap_az = snapshot['volume']['availability_zone']
except (TypeError, KeyError):
pass
if snap_az and snap_az != availability_zone:
msg = _("Volume must be in the same "
"availability zone as the snapshot")
raise exception.InvalidInput(reason=msg)
source_vol_az = None
try:
source_vol_az = source_volume['availability_zone']
except (TypeError, KeyError):
pass
if source_vol_az and source_vol_az != availability_zone:
msg = _("Volume must be in the same "
"availability zone as the source volume")
raise exception.InvalidInput(reason=msg)
return availability_zone
def _get_encryption_key_id(self, key_manager, context, volume_type_id,
snapshot, source_volume, backup_source_volume):
encryption_key_id = None
if volume_types.is_encrypted(context, volume_type_id):
if snapshot is not None: # creating from snapshot
encryption_key_id = snapshot['encryption_key_id']
elif source_volume is not None: # cloning volume
encryption_key_id = source_volume['encryption_key_id']
elif backup_source_volume is not None: # creating from backup
encryption_key_id = backup_source_volume['encryption_key_id']
# NOTE(joel-coffman): References to the encryption key should *not*
# be copied because the key is deleted when the volume is deleted.
# Clone the existing key and associate a separate -- but
# identical -- key with each volume.
if encryption_key_id is not None:
encryption_key_id = key_manager.copy_key(context,
encryption_key_id)
else:
encryption_key_id = key_manager.create_key(context)
return encryption_key_id
def _get_volume_type_id(self, volume_type, source_volume, snapshot,
backup_source_volume):
volume_type_id = None
if not volume_type and source_volume:
volume_type_id = source_volume['volume_type_id']
elif snapshot is not None:
if volume_type:
current_volume_type_id = volume_type.get('id')
if (current_volume_type_id !=
snapshot['volume_type_id']):
msg = _("Volume type will be changed to "
"be the same as the source volume.")
LOG.warn(msg)
volume_type_id = snapshot['volume_type_id']
elif backup_source_volume is not None:
volume_type_id = backup_source_volume['volume_type_id']
else:
volume_type_id = volume_type.get('id')
return volume_type_id
def execute(self, context, size, snapshot, image_id, source_volume,
availability_zone, volume_type, metadata,
key_manager, backup_source_volume):
utils.check_exclusive_options(snapshot=snapshot,
imageRef=image_id,
source_volume=source_volume)
policy.enforce_action(context, ACTION)
# TODO(harlowja): what guarantee is there that the snapshot or source
# volume will remain available after we do this initial verification??
snapshot_id = self._extract_snapshot(snapshot)
source_volid = self._extract_source_volume(source_volume)
size = self._extract_size(size, source_volume, snapshot)
self._check_image_metadata(context, image_id, size)
availability_zone = self._extract_availability_zone(availability_zone,
snapshot,
source_volume)
# TODO(joel-coffman): This special handling of snapshots to ensure that
# their volume type matches the source volume is too convoluted. We
# should copy encryption metadata from the encrypted volume type to the
# volume upon creation and propagate that information to each snapshot.
# This strategy avoid any dependency upon the encrypted volume type.
if not volume_type and not source_volume and not snapshot:
volume_type = volume_types.get_default_volume_type()
volume_type_id = self._get_volume_type_id(volume_type,
source_volume, snapshot,
backup_source_volume)
encryption_key_id = self._get_encryption_key_id(key_manager,
context,
volume_type_id,
snapshot,
source_volume,
backup_source_volume)
specs = {}
if volume_type_id:
qos_specs = volume_types.get_volume_type_qos_specs(volume_type_id)
specs = qos_specs['qos_specs']
if not specs:
# to make sure we don't pass empty dict
specs = None
self._check_metadata_properties(metadata)
return {
'size': size,
'snapshot_id': snapshot_id,
'source_volid': source_volid,
'availability_zone': availability_zone,
'volume_type': volume_type,
'volume_type_id': volume_type_id,
'encryption_key_id': encryption_key_id,
'qos_specs': specs,
}
class EntryCreateTask(flow_utils.CinderTask):
"""Creates an entry for the given volume creation in the database.
Reversion strategy: remove the volume_id created from the database.
"""
default_provides = set(['volume_properties', 'volume_id', 'volume'])
def __init__(self, db):
requires = ['availability_zone', 'description', 'metadata',
'name', 'reservations', 'size', 'snapshot_id',
'source_volid', 'volume_type_id', 'encryption_key_id']
super(EntryCreateTask, self).__init__(addons=[ACTION],
requires=requires)
self.db = db
self.provides.update()
def execute(self, context, **kwargs):
"""Creates a database entry for the given inputs and returns details.
Accesses the database and creates a new entry for the to be created
volume using the given volume properties which are extracted from the
input kwargs (and associated requirements this task needs). These
requirements should be previously satisfied and validated by a
pre-cursor task.
"""
volume_properties = {
'size': kwargs.pop('size'),
'user_id': context.user_id,
'project_id': context.project_id,
'status': 'creating',
'attach_status': 'detached',
'encryption_key_id': kwargs.pop('encryption_key_id'),
# Rename these to the internal name.
'display_description': kwargs.pop('description'),
'display_name': kwargs.pop('name'),
}
# Merge in the other required arguments which should provide the rest
# of the volume property fields (if applicable).
volume_properties.update(kwargs)
volume = self.db.volume_create(context, volume_properties)
return {
'volume_id': volume['id'],
'volume_properties': volume_properties,
# NOTE(harlowja): it appears like further usage of this volume
# result actually depend on it being a sqlalchemy object and not
# just a plain dictionary so that's why we are storing this here.
#
# In the future where this task results can be serialized and
# restored automatically for continued running we will need to
# resolve the serialization & recreation of this object since raw
# sqlalchemy objects can't be serialized.
'volume': volume,
}
def revert(self, context, result, **kwargs):
# We never produced a result and therefore can't destroy anything.
if isinstance(result, misc.Failure):
return
if context.quota_committed:
# Committed quota doesn't rollback as the volume has already been
# created at this point, and the quota has already been absorbed.
return
vol_id = result['volume_id']
try:
self.db.volume_destroy(context.elevated(), vol_id)
except exception.CinderException:
# We are already reverting, therefore we should silence this
# exception since a second exception being active will be bad.
#
# NOTE(harlowja): Being unable to destroy a volume is pretty
# bad though!!
LOG.exception(_("Failed destroying volume entry %s"), vol_id)
class QuotaReserveTask(flow_utils.CinderTask):
"""Reserves a single volume with the given size & the given volume type.
Reversion strategy: rollback the quota reservation.
Warning Warning: if the process that is running this reserve and commit
process fails (or is killed before the quota is rolled back or committed
it does appear like the quota will never be rolled back). This makes
software upgrades hard (inflight operations will need to be stopped or
allowed to complete before the upgrade can occur). *In the future* when
taskflow has persistence built-in this should be easier to correct via
an automated or manual process.
"""
default_provides = set(['reservations'])
def __init__(self):
super(QuotaReserveTask, self).__init__(addons=[ACTION])
def execute(self, context, size, volume_type_id):
try:
reserve_opts = {'volumes': 1, 'gigabytes': size}
QUOTAS.add_volume_type_opts(context, reserve_opts, volume_type_id)
reservations = QUOTAS.reserve(context, **reserve_opts)
return {
'reservations': reservations,
}
except exception.OverQuota as e:
overs = e.kwargs['overs']
quotas = e.kwargs['quotas']
usages = e.kwargs['usages']
def _consumed(name):
return (usages[name]['reserved'] + usages[name]['in_use'])
def _is_over(name):
for over in overs:
if name in over:
return True
return False
if _is_over('gigabytes'):
msg = _("Quota exceeded for %(s_pid)s, tried to create "
"%(s_size)sG volume (%(d_consumed)dG "
"of %(d_quota)dG already consumed)")
LOG.warn(msg % {'s_pid': context.project_id,
's_size': size,
'd_consumed': _consumed('gigabytes'),
'd_quota': quotas['gigabytes']})
raise exception.VolumeSizeExceedsAvailableQuota(
requested=size,
consumed=_consumed('gigabytes'),
quota=quotas['gigabytes'])
elif _is_over('volumes'):
msg = _("Quota exceeded for %(s_pid)s, tried to create "
"volume (%(d_consumed)d volumes "
"already consumed)")
LOG.warn(msg % {'s_pid': context.project_id,
'd_consumed': _consumed('volumes')})
raise exception.VolumeLimitExceeded(allowed=quotas['volumes'])
else:
# If nothing was reraised, ensure we reraise the initial error
raise
def revert(self, context, result, **kwargs):
# We never produced a result and therefore can't destroy anything.
if isinstance(result, misc.Failure):
return
if context.quota_committed:
# The reservations have already been committed and can not be
# rolled back at this point.
return
# We actually produced an output that we can revert so lets attempt
# to use said output to rollback the reservation.
reservations = result['reservations']
try:
QUOTAS.rollback(context, reservations)
except exception.CinderException:
# We are already reverting, therefore we should silence this
# exception since a second exception being active will be bad.
LOG.exception(_("Failed rolling back quota for"
" %s reservations"), reservations)
class QuotaCommitTask(flow_utils.CinderTask):
"""Commits the reservation.
Reversion strategy: N/A (the rollback will be handled by the task that did
the initial reservation (see: QuotaReserveTask).
Warning Warning: if the process that is running this reserve and commit
process fails (or is killed before the quota is rolled back or committed
it does appear like the quota will never be rolled back). This makes
software upgrades hard (inflight operations will need to be stopped or
allowed to complete before the upgrade can occur). *In the future* when
taskflow has persistence built-in this should be easier to correct via
an automated or manual process.
"""
def __init__(self):
super(QuotaCommitTask, self).__init__(addons=[ACTION])
def execute(self, context, reservations, volume_properties):
QUOTAS.commit(context, reservations)
context.quota_committed = True
return {'volume_properties': volume_properties}
def revert(self, context, result, **kwargs):
# We never produced a result and therefore can't destroy anything.
if isinstance(result, misc.Failure):
return
volume = result['volume_properties']
try:
reserve_opts = {'volumes': -1, 'gigabytes': -volume['size']}
QUOTAS.add_volume_type_opts(context,
reserve_opts,
volume['volume_type_id'])
reservations = QUOTAS.reserve(context,
project_id=context.project_id,
**reserve_opts)
if reservations:
QUOTAS.commit(context, reservations,
project_id=context.project_id)
except Exception:
LOG.exception(_("Failed to update quota for deleting volume: %s"),
volume['id'])
class VolumeCastTask(flow_utils.CinderTask):
"""Performs a volume create cast to the scheduler or to the volume manager.
This which will signal a transition of the api workflow to another child
and/or related workflow on another component.
Reversion strategy: N/A
"""
def __init__(self, scheduler_rpcapi, volume_rpcapi, db):
requires = ['image_id', 'scheduler_hints', 'snapshot_id',
'source_volid', 'volume_id', 'volume_type',
'volume_properties']
super(VolumeCastTask, self).__init__(addons=[ACTION],
requires=requires)
self.volume_rpcapi = volume_rpcapi
self.scheduler_rpcapi = scheduler_rpcapi
self.db = db
def _cast_create_volume(self, context, request_spec, filter_properties):
source_volid = request_spec['source_volid']
volume_id = request_spec['volume_id']
snapshot_id = request_spec['snapshot_id']
image_id = request_spec['image_id']
host = None
if snapshot_id and CONF.snapshot_same_host:
# NOTE(Rongze Zhu): A simple solution for bug 1008866.
#
# If snapshot_id is set, make the call create volume directly to
# the volume host where the snapshot resides instead of passing it
# through the scheduler. So snapshot can be copy to new volume.
snapshot_ref = self.db.snapshot_get(context, snapshot_id)
source_volume_ref = self.db.volume_get(context,
snapshot_ref['volume_id'])
host = source_volume_ref['host']
elif source_volid:
source_volume_ref = self.db.volume_get(context, source_volid)
host = source_volume_ref['host']
if not host:
# Cast to the scheduler and let it handle whatever is needed
# to select the target host for this volume.
self.scheduler_rpcapi.create_volume(
context,
CONF.volume_topic,
volume_id,
snapshot_id=snapshot_id,
image_id=image_id,
request_spec=request_spec,
filter_properties=filter_properties)
else:
# Bypass the scheduler and send the request directly to the volume
# manager.
now = timeutils.utcnow()
values = {'host': host, 'scheduled_at': now}
volume_ref = self.db.volume_update(context, volume_id, values)
self.volume_rpcapi.create_volume(
context,
volume_ref,
volume_ref['host'],
request_spec,
filter_properties,
allow_reschedule=False,
snapshot_id=snapshot_id,
image_id=image_id,
source_volid=source_volid)
def execute(self, context, **kwargs):
scheduler_hints = kwargs.pop('scheduler_hints', None)
request_spec = kwargs.copy()
filter_properties = {}
if scheduler_hints:
filter_properties['scheduler_hints'] = scheduler_hints
self._cast_create_volume(context, request_spec, filter_properties)
def revert(self, context, result, flow_failures, **kwargs):
if isinstance(result, misc.Failure):
return
# Restore the source volume status and set the volume to error status.
volume_id = kwargs['volume_id']
common.restore_source_status(context, self.db, kwargs)
common.error_out_volume(context, self.db, volume_id)
LOG.error(_("Volume %s: create failed"), volume_id)
exc_info = False
if all(flow_failures[-1].exc_info):
exc_info = flow_failures[-1].exc_info
LOG.error(_('Unexpected build error:'), exc_info=exc_info)
def get_flow(scheduler_rpcapi, volume_rpcapi, db,
image_service,
az_check_functor,
create_what):
"""Constructs and returns the api entrypoint flow.
This flow will do the following:
1. Inject keys & values for dependent tasks.
2. Extracts and validates the input keys & values.
3. Reserves the quota (reverts quota on any failures).
4. Creates the database entry.
5. Commits the quota.
6. Casts to volume manager or scheduler for further processing.
"""
flow_name = ACTION.replace(":", "_") + "_api"
api_flow = linear_flow.Flow(flow_name)
api_flow.add(ExtractVolumeRequestTask(
image_service,
az_check_functor,
rebind={'size': 'raw_size',
'availability_zone': 'raw_availability_zone',
'volume_type': 'raw_volume_type'}))
api_flow.add(QuotaReserveTask(),
EntryCreateTask(db),
QuotaCommitTask())
# This will cast it out to either the scheduler or volume manager via
# the rpc apis provided.
api_flow.add(VolumeCastTask(scheduler_rpcapi, volume_rpcapi, db))
# Now load (but do not run) the flow using the provided initial data.
return taskflow.engines.load(api_flow, store=create_what)
|
{
"content_hash": "c21fb8aec1269d5280e8efeae96981d6",
"timestamp": "",
"source": "github",
"line_count": 706,
"max_line_length": 79,
"avg_line_length": 43.89660056657224,
"alnum_prop": 0.5813945984318027,
"repo_name": "Thingee/cinder",
"id": "8ef6bb5067d571fba39f8647bd7cb1f8db394f6d",
"size": "31565",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cinder/volume/flows/api/create_volume.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "6121923"
},
{
"name": "SQL",
"bytes": "9824"
},
{
"name": "Shell",
"bytes": "8998"
}
],
"symlink_target": ""
}
|
"""
The client module is used to create a client connection to the publisher
The data structure needs to be:
{'enc': 'clear',
'load': {'fun': '<mod.callable>',
'arg':, ('arg1', 'arg2', ...),
'tgt': '<glob or id>',
'key': '<read in the key file>'}
"""
import logging
# The components here are simple, and they need to be and stay simple, we
# want a client to have 3 external concerns, and maybe a forth configurable
# option.
# The concerns are:
# 1. Who executes the command?
# 2. What is the function being run?
# 3. What arguments need to be passed to the function?
# 4. How long do we wait for all of the replies?
import os
import random
import sys
import time
from datetime import datetime
import salt.cache
import salt.channel.client
import salt.config
import salt.defaults.exitcodes
import salt.ext.tornado.gen
import salt.loader
import salt.payload
import salt.syspaths as syspaths
import salt.utils.args
import salt.utils.event
import salt.utils.files
import salt.utils.jid
import salt.utils.minions
import salt.utils.network
import salt.utils.platform
import salt.utils.stringutils
import salt.utils.user
import salt.utils.verify
from salt.exceptions import (
AuthenticationError,
AuthorizationError,
EauthAuthenticationError,
PublishError,
SaltClientError,
SaltInvocationError,
SaltReqTimeoutError,
)
HAS_RANGE = False
try:
import seco.range
HAS_RANGE = True
except ImportError:
pass
log = logging.getLogger(__name__)
def get_local_client(
c_path=os.path.join(syspaths.CONFIG_DIR, "master"),
mopts=None,
skip_perm_errors=False,
io_loop=None,
auto_reconnect=False,
listen=False,
):
"""
.. versionadded:: 2014.7.0
Read in the config and return the correct LocalClient object based on
the configured transport
:param str c_path: Path of config file to use for opts.
The default value is None.
:param bool mopts: When provided the local client will use this dictionary of
options insead of loading a config file from the value
of c_path.
The default value is None.
:param str skip_perm_errors: Ignore permissions errors while loading keys.
The default value is False.
:param IOLoop io_loop: io_loop used for events.
Pass in an io_loop if you want asynchronous
operation for obtaining events. Eg use of
set_event_handler() API. Otherwise, operation
will be synchronous.
:param bool keep_loop: Do not destroy the event loop when closing the event
subsriber.
:param bool auto_reconnect: When True the event subscriber will reconnect
automatically if a disconnect error is raised.
.. versionadded:: 3004
:param bool listen: Listen for events indefinitly. When option is set the
LocalClient object will listen for events until it's
destroy method is called.
The default value is False.
"""
if mopts:
opts = mopts
else:
# Late import to prevent circular import
import salt.config
opts = salt.config.client_config(c_path)
# TODO: AIO core is separate from transport
return LocalClient(
mopts=opts,
skip_perm_errors=skip_perm_errors,
io_loop=io_loop,
auto_reconnect=auto_reconnect,
listen=listen,
)
class LocalClient:
"""
The interface used by the :command:`salt` CLI tool on the Salt Master
``LocalClient`` is used to send a command to Salt minions to execute
:ref:`execution modules <all-salt.modules>` and return the results to the
Salt Master.
Importing and using ``LocalClient`` must be done on the same machine as the
Salt Master and it must be done using the same user that the Salt Master is
running as. (Unless :conf_master:`external_auth` is configured and
authentication credentials are included in the execution).
.. note::
The LocalClient uses a Tornado IOLoop, this can create issues when
using the LocalClient inside an existing IOLoop. If creating the
LocalClient in partnership with another IOLoop either create the
IOLoop before creating the LocalClient, or when creating the IOLoop
use ioloop.current() which will return the ioloop created by
LocalClient.
.. code-block:: python
import salt.client
local = salt.client.LocalClient()
local.cmd('*', 'test.fib', [10])
"""
def __init__(
self,
c_path=os.path.join(syspaths.CONFIG_DIR, "master"),
mopts=None,
skip_perm_errors=False,
io_loop=None,
keep_loop=False,
auto_reconnect=False,
listen=False,
):
"""
:param str c_path: Path of config file to use for opts.
The default value is None.
:param bool mopts: When provided the local client will use this dictionary of
options insead of loading a config file from the value
of c_path.
The default value is None.
:param str skip_perm_errors: Ignore permissions errors while loading keys.
The default value is False.
:param IOLoop io_loop: io_loop used for events.
Pass in an io_loop if you want asynchronous
operation for obtaining events. Eg use of
set_event_handler() API. Otherwise, operation
will be synchronous.
:param bool keep_loop: Do not destroy the event loop when closing the event
subsriber.
:param bool auto_reconnect: When True the event subscriber will reconnect
automatically if a disconnect error is raised.
.. versionadded:: 3004
:param bool listen: Listen for events indefinitly. When option is set the
LocalClient object will listen for events until it's
destroy method is called.
The default value is False.
"""
if mopts:
self.opts = mopts
else:
if os.path.isdir(c_path):
log.warning(
"%s expects a file path not a directory path(%s) to "
"its 'c_path' keyword argument",
self.__class__.__name__,
c_path,
)
self.opts = salt.config.client_config(c_path)
self.salt_user = salt.utils.user.get_specific_user()
self.skip_perm_errors = skip_perm_errors
self.key = self.__read_master_key()
self.auto_reconnect = auto_reconnect
self.listen = listen
self.event = salt.utils.event.get_event(
"master",
self.opts["sock_dir"],
opts=self.opts,
listen=self.listen,
io_loop=io_loop,
keep_loop=keep_loop,
)
self.utils = salt.loader.utils(self.opts)
self.functions = salt.loader.minion_mods(self.opts, utils=self.utils)
self.returners = salt.loader.returners(self.opts, self.functions)
def __read_master_key(self):
"""
Read in the rotating master authentication key
"""
key_user = self.salt_user
if key_user == "root":
if self.opts.get("user", "root") != "root":
key_user = self.opts.get("user", "root")
if key_user.startswith("sudo_"):
key_user = self.opts.get("user", "root")
if salt.utils.platform.is_windows():
# The username may contain '\' if it is in Windows
# 'DOMAIN\username' format. Fix this for the keyfile path.
key_user = key_user.replace("\\", "_")
keyfile = os.path.join(self.opts["cachedir"], ".{}_key".format(key_user))
try:
# Make sure all key parent directories are accessible
salt.utils.verify.check_path_traversal(
self.opts["cachedir"], key_user, self.skip_perm_errors
)
with salt.utils.files.fopen(keyfile, "r") as key:
return salt.utils.stringutils.to_unicode(key.read())
except (OSError, SaltClientError):
# Fall back to eauth
return ""
def _convert_range_to_list(self, tgt):
"""
convert a seco.range range into a list target
"""
range_ = seco.range.Range(self.opts["range_server"])
try:
return range_.expand(tgt)
except seco.range.RangeException as err:
print("Range server exception: {}".format(err))
return []
def _get_timeout(self, timeout):
"""
Return the timeout to use
"""
if timeout is None:
return self.opts["timeout"]
if isinstance(timeout, int):
return timeout
if isinstance(timeout, str):
try:
return int(timeout)
except ValueError:
return self.opts["timeout"]
# Looks like the timeout is invalid, use config
return self.opts["timeout"]
def gather_job_info(self, jid, tgt, tgt_type, listen=True, **kwargs):
"""
Return the information about a given job
"""
log.debug("Checking whether jid %s is still running", jid)
timeout = int(kwargs.get("gather_job_timeout", self.opts["gather_job_timeout"]))
pub_data = self.run_job(
tgt,
"saltutil.find_job",
arg=[jid],
tgt_type=tgt_type,
timeout=timeout,
listen=listen,
**kwargs
)
if "jid" in pub_data:
self.event.subscribe(pub_data["jid"])
return pub_data
def _check_pub_data(self, pub_data, listen=True):
"""
Common checks on the pub_data data structure returned from running pub
"""
if pub_data == "":
# Failed to authenticate, this could be a bunch of things
raise EauthAuthenticationError(
"Failed to authenticate! This is most likely because this "
"user is not permitted to execute commands, but there is a "
"small possibility that a disk error occurred (check "
"disk/inode usage)."
)
# Failed to connect to the master and send the pub
if "error" in pub_data:
print(pub_data["error"])
log.debug("_check_pub_data() error: %s", pub_data["error"])
return {}
elif "jid" not in pub_data:
return {}
if pub_data["jid"] == "0":
print("Failed to connect to the Master, is the Salt Master running?")
return {}
# If we order masters (via a syndic), don't short circuit if no minions
# are found
if not self.opts.get("order_masters"):
# Check for no minions
if not pub_data["minions"]:
print(
"No minions matched the target. "
"No command was sent, no jid was assigned."
)
return {}
# don't install event subscription listeners when the request is asynchronous
# and doesn't care. this is important as it will create event leaks otherwise
if not listen:
return pub_data
if self.opts.get("order_masters"):
self.event.subscribe("syndic/.*/{}".format(pub_data["jid"]), "regex")
self.event.subscribe("salt/job/{}".format(pub_data["jid"]))
return pub_data
def run_job(
self,
tgt,
fun,
arg=(),
tgt_type="glob",
ret="",
timeout=None,
jid="",
kwarg=None,
listen=False,
**kwargs
):
"""
Asynchronously send a command to connected minions
Prep the job directory and publish a command to any targeted minions.
:return: A dictionary of (validated) ``pub_data`` or an empty
dictionary on failure. The ``pub_data`` contains the job ID and a
list of all minions that are expected to return data.
.. code-block:: python
>>> local.run_job('*', 'test.sleep', [300])
{'jid': '20131219215650131543', 'minions': ['jerry']}
"""
arg = salt.utils.args.condition_input(arg, kwarg)
try:
pub_data = self.pub(
tgt,
fun,
arg,
tgt_type,
ret,
jid=jid,
timeout=self._get_timeout(timeout),
listen=listen,
**kwargs
)
except SaltClientError:
# Re-raise error with specific message
raise SaltClientError(
"The salt master could not be contacted. Is master running?"
)
except AuthenticationError as err:
raise
except AuthorizationError as err:
raise
except Exception as general_exception: # pylint: disable=broad-except
# Convert to generic client error and pass along message
raise SaltClientError(general_exception)
return self._check_pub_data(pub_data, listen=listen)
def gather_minions(self, tgt, expr_form):
_res = salt.utils.minions.CkMinions(self.opts).check_minions(
tgt, tgt_type=expr_form
)
return _res["minions"]
@salt.ext.tornado.gen.coroutine
def run_job_async(
self,
tgt,
fun,
arg=(),
tgt_type="glob",
ret="",
timeout=None,
jid="",
kwarg=None,
listen=True,
io_loop=None,
**kwargs
):
"""
Asynchronously send a command to connected minions
Prep the job directory and publish a command to any targeted minions.
:return: A dictionary of (validated) ``pub_data`` or an empty
dictionary on failure. The ``pub_data`` contains the job ID and a
list of all minions that are expected to return data.
.. code-block:: python
>>> local.run_job_async('*', 'test.sleep', [300])
{'jid': '20131219215650131543', 'minions': ['jerry']}
"""
arg = salt.utils.args.condition_input(arg, kwarg)
try:
pub_data = yield self.pub_async(
tgt,
fun,
arg,
tgt_type,
ret,
jid=jid,
timeout=self._get_timeout(timeout),
io_loop=io_loop,
listen=listen,
**kwargs
)
except SaltClientError:
# Re-raise error with specific message
raise SaltClientError(
"The salt master could not be contacted. Is master running?"
)
except AuthenticationError as err:
raise AuthenticationError(err)
except AuthorizationError as err:
raise AuthorizationError(err)
except Exception as general_exception: # pylint: disable=broad-except
# Convert to generic client error and pass along message
raise SaltClientError(general_exception)
raise salt.ext.tornado.gen.Return(self._check_pub_data(pub_data, listen=listen))
def cmd_async(
self, tgt, fun, arg=(), tgt_type="glob", ret="", jid="", kwarg=None, **kwargs
):
"""
Asynchronously send a command to connected minions
The function signature is the same as :py:meth:`cmd` with the
following exceptions.
:returns: A job ID or 0 on failure.
.. code-block:: python
>>> local.cmd_async('*', 'test.sleep', [300])
'20131219215921857715'
"""
pub_data = self.run_job(
tgt, fun, arg, tgt_type, ret, jid=jid, kwarg=kwarg, listen=False, **kwargs
)
try:
return pub_data["jid"]
except KeyError:
return 0
def cmd_subset(
self,
tgt,
fun,
arg=(),
tgt_type="glob",
ret="",
kwarg=None,
subset=3,
cli=False,
progress=False,
full_return=False,
**kwargs
):
"""
Execute a command on a random subset of the targeted systems
The function signature is the same as :py:meth:`cmd` with the
following exceptions.
:param subset: The number of systems to execute on
:param cli: When this is set to True, a generator is returned,
otherwise a dictionary of the minion returns is returned
.. code-block:: python
>>> SLC.cmd_subset('*', 'test.ping', subset=1)
{'jerry': True}
"""
# Support legacy parameter name:
subset = kwargs.pop("sub", subset)
minion_ret = self.cmd(tgt, "sys.list_functions", tgt_type=tgt_type, **kwargs)
minions = list(minion_ret)
random.shuffle(minions)
f_tgt = []
for minion in minions:
if fun in minion_ret[minion]:
f_tgt.append(minion)
if len(f_tgt) >= subset:
break
func = self.cmd
if cli:
func = self.cmd_cli
return func(
f_tgt,
fun,
arg,
tgt_type="list",
ret=ret,
kwarg=kwarg,
progress=progress,
full_return=full_return,
**kwargs
)
def cmd_batch(
self,
tgt,
fun,
arg=(),
tgt_type="glob",
ret="",
kwarg=None,
batch="10%",
**kwargs
):
"""
Iteratively execute a command on subsets of minions at a time
The function signature is the same as :py:meth:`cmd` with the
following exceptions.
:param batch: The batch identifier of systems to execute on
:returns: A generator of minion returns
.. code-block:: python
>>> returns = local.cmd_batch('*', 'state.highstate', batch='10%')
>>> for ret in returns:
... print(ret)
{'jerry': {...}}
{'dave': {...}}
{'stewart': {...}}
"""
# We need to re-import salt.utils.args here
# even though it has already been imported.
# when cmd_batch is called via the NetAPI
# the module is unavailable.
# Late import - not used anywhere else in this file
import salt.cli.batch
import salt.utils.args
arg = salt.utils.args.condition_input(arg, kwarg)
opts = {
"tgt": tgt,
"fun": fun,
"arg": arg,
"tgt_type": tgt_type,
"ret": ret,
"batch": batch,
"failhard": kwargs.get("failhard", self.opts.get("failhard", False)),
"raw": kwargs.get("raw", False),
}
if "timeout" in kwargs:
opts["timeout"] = kwargs["timeout"]
if "gather_job_timeout" in kwargs:
opts["gather_job_timeout"] = kwargs["gather_job_timeout"]
if "batch_wait" in kwargs:
opts["batch_wait"] = int(kwargs["batch_wait"])
eauth = {}
if "eauth" in kwargs:
eauth["eauth"] = kwargs.pop("eauth")
if "username" in kwargs:
eauth["username"] = kwargs.pop("username")
if "password" in kwargs:
eauth["password"] = kwargs.pop("password")
if "token" in kwargs:
eauth["token"] = kwargs.pop("token")
for key, val in self.opts.items():
if key not in opts:
opts[key] = val
batch = salt.cli.batch.Batch(opts, eauth=eauth, quiet=True)
for ret, _ in batch.run():
yield ret
def cmd(
self,
tgt,
fun,
arg=(),
timeout=None,
tgt_type="glob",
ret="",
jid="",
full_return=False,
kwarg=None,
**kwargs
):
"""
Synchronously execute a command on targeted minions
The cmd method will execute and wait for the timeout period for all
minions to reply, then it will return all minion data at once.
.. code-block:: python
>>> import salt.client
>>> local = salt.client.LocalClient()
>>> local.cmd('*', 'cmd.run', ['whoami'])
{'jerry': 'root'}
With extra keyword arguments for the command function to be run:
.. code-block:: python
local.cmd('*', 'test.arg', ['arg1', 'arg2'], kwarg={'foo': 'bar'})
Compound commands can be used for multiple executions in a single
publish. Function names and function arguments are provided in separate
lists but the index values must correlate and an empty list must be
used if no arguments are required.
.. code-block:: python
>>> local.cmd('*', [
'grains.items',
'sys.doc',
'cmd.run',
],
[
[],
[],
['uptime'],
])
:param tgt: Which minions to target for the execution. Default is shell
glob. Modified by the ``tgt_type`` option.
:type tgt: string or list
:param fun: The module and function to call on the specified minions of
the form ``module.function``. For example ``test.ping`` or
``grains.items``.
Compound commands
Multiple functions may be called in a single publish by
passing a list of commands. This can dramatically lower
overhead and speed up the application communicating with Salt.
This requires that the ``arg`` param is a list of lists. The
``fun`` list and the ``arg`` list must correlate by index
meaning a function that does not take arguments must still have
a corresponding empty list at the expected index.
:type fun: string or list of strings
:param arg: A list of arguments to pass to the remote function. If the
function takes no arguments ``arg`` may be omitted except when
executing a compound command.
:type arg: list or list-of-lists
:param timeout: Seconds to wait after the last minion returns but
before all minions return.
:param tgt_type: The type of ``tgt``. Allowed values:
* ``glob`` - Bash glob completion - Default
* ``pcre`` - Perl style regular expression
* ``list`` - Python list of hosts
* ``grain`` - Match based on a grain comparison
* ``grain_pcre`` - Grain comparison with a regex
* ``pillar`` - Pillar data comparison
* ``pillar_pcre`` - Pillar data comparison with a regex
* ``nodegroup`` - Match on nodegroup
* ``range`` - Use a Range server for matching
* ``compound`` - Pass a compound match string
* ``ipcidr`` - Match based on Subnet (CIDR notation) or IPv4 address.
.. versionchanged:: 2017.7.0
Renamed from ``expr_form`` to ``tgt_type``
:param ret: The returner to use. The value passed can be single
returner, or a comma delimited list of returners to call in order
on the minions
:param kwarg: A dictionary with keyword arguments for the function.
:param full_return: Output the job return only (default) or the full
return including exit code and other job metadata.
:param kwargs: Optional keyword arguments.
Authentication credentials may be passed when using
:conf_master:`external_auth`.
For example: ``local.cmd('*', 'test.ping', username='saltdev',
password='saltdev', eauth='pam')``.
Or: ``local.cmd('*', 'test.ping',
token='5871821ea51754fdcea8153c1c745433')``
:returns: A dictionary with the result of the execution, keyed by
minion ID. A compound command will return a sub-dictionary keyed by
function name.
"""
was_listening = self.event.cpub
try:
pub_data = self.run_job(
tgt,
fun,
arg,
tgt_type,
ret,
timeout,
jid,
kwarg=kwarg,
listen=True,
**kwargs
)
if not pub_data:
return pub_data
ret = {}
for fn_ret in self.get_cli_event_returns(
pub_data["jid"],
pub_data["minions"],
self._get_timeout(timeout),
tgt,
tgt_type,
**kwargs
):
if fn_ret:
for mid, data in fn_ret.items():
ret[mid] = data if full_return else data.get("ret", {})
for failed in list(set(pub_data["minions"]) - set(ret)):
ret[failed] = False
return ret
finally:
if not was_listening:
self.event.close_pub()
def cmd_cli(
self,
tgt,
fun,
arg=(),
timeout=None,
tgt_type="glob",
ret="",
verbose=False,
kwarg=None,
progress=False,
**kwargs
):
"""
Used by the :command:`salt` CLI. This method returns minion returns as
they come back and attempts to block until all minions return.
The function signature is the same as :py:meth:`cmd` with the
following exceptions.
:param verbose: Print extra information about the running command
:returns: A generator
"""
was_listening = self.event.cpub
try:
self.pub_data = self.run_job(
tgt,
fun,
arg,
tgt_type,
ret,
timeout,
kwarg=kwarg,
listen=True,
**kwargs
)
if not self.pub_data:
yield self.pub_data
else:
try:
for fn_ret in self.get_cli_event_returns(
self.pub_data["jid"],
self.pub_data["minions"],
self._get_timeout(timeout),
tgt,
tgt_type,
verbose,
progress,
**kwargs
):
if not fn_ret:
continue
yield fn_ret
except KeyboardInterrupt:
raise SystemExit(
"\n"
"This job's jid is: {0}\n"
"Exiting gracefully on Ctrl-c\n"
"The minions may not have all finished running and any "
"remaining minions will return upon completion. To look "
"up the return data for this job later, run the following "
"command:\n\n"
"salt-run jobs.lookup_jid {0}".format(self.pub_data["jid"])
)
finally:
if not was_listening:
self.event.close_pub()
def cmd_iter(
self,
tgt,
fun,
arg=(),
timeout=None,
tgt_type="glob",
ret="",
kwarg=None,
**kwargs
):
"""
Yields the individual minion returns as they come in
The function signature is the same as :py:meth:`cmd` with the
following exceptions.
Normally :py:meth:`cmd_iter` does not yield results for minions that
are not connected. If you want it to return results for disconnected
minions set `expect_minions=True` in `kwargs`.
:return: A generator yielding the individual minion returns
.. code-block:: python
>>> ret = local.cmd_iter('*', 'test.ping')
>>> for i in ret:
... print(i)
{'jerry': {'ret': True}}
{'dave': {'ret': True}}
{'stewart': {'ret': True}}
"""
was_listening = self.event.cpub
try:
pub_data = self.run_job(
tgt,
fun,
arg,
tgt_type,
ret,
timeout,
kwarg=kwarg,
listen=True,
**kwargs
)
if not pub_data:
yield pub_data
else:
if kwargs.get("yield_pub_data"):
yield pub_data
for fn_ret in self.get_iter_returns(
pub_data["jid"],
pub_data["minions"],
timeout=self._get_timeout(timeout),
tgt=tgt,
tgt_type=tgt_type,
**kwargs
):
if not fn_ret:
continue
yield fn_ret
self._clean_up_subscriptions(pub_data["jid"])
finally:
if not was_listening:
self.event.close_pub()
def cmd_iter_no_block(
self,
tgt,
fun,
arg=(),
timeout=None,
tgt_type="glob",
ret="",
kwarg=None,
show_jid=False,
verbose=False,
**kwargs
):
"""
Yields the individual minion returns as they come in, or None
when no returns are available.
The function signature is the same as :py:meth:`cmd` with the
following exceptions.
:returns: A generator yielding the individual minion returns, or None
when no returns are available. This allows for actions to be
injected in between minion returns.
.. code-block:: python
>>> ret = local.cmd_iter_no_block('*', 'test.ping')
>>> for i in ret:
... print(i)
None
{'jerry': {'ret': True}}
{'dave': {'ret': True}}
None
{'stewart': {'ret': True}}
"""
was_listening = self.event.cpub
try:
pub_data = self.run_job(
tgt,
fun,
arg,
tgt_type,
ret,
timeout,
kwarg=kwarg,
listen=True,
**kwargs
)
if not pub_data:
yield pub_data
else:
for fn_ret in self.get_iter_returns(
pub_data["jid"],
pub_data["minions"],
timeout=timeout,
tgt=tgt,
tgt_type=tgt_type,
block=False,
**kwargs
):
if fn_ret and any([show_jid, verbose]):
for minion in fn_ret:
fn_ret[minion]["jid"] = pub_data["jid"]
yield fn_ret
self._clean_up_subscriptions(pub_data["jid"])
finally:
if not was_listening:
self.event.close_pub()
def cmd_full_return(
self,
tgt,
fun,
arg=(),
timeout=None,
tgt_type="glob",
ret="",
verbose=False,
kwarg=None,
**kwargs
):
"""
Execute a salt command and return
"""
was_listening = self.event.cpub
try:
pub_data = self.run_job(
tgt,
fun,
arg,
tgt_type,
ret,
timeout,
kwarg=kwarg,
listen=True,
**kwargs
)
if not pub_data:
return pub_data
return self.get_cli_static_event_returns(
pub_data["jid"], pub_data["minions"], timeout, tgt, tgt_type, verbose
)
finally:
if not was_listening:
self.event.close_pub()
def get_cli_returns(
self,
jid,
minions,
timeout=None,
tgt="*",
tgt_type="glob",
verbose=False,
show_jid=False,
**kwargs
):
"""
Starts a watcher looking at the return data for a specified JID
:returns: all of the information for the JID
"""
if verbose:
msg = "Executing job with jid {}".format(jid)
print(msg)
print("-" * len(msg) + "\n")
elif show_jid:
print("jid: {}".format(jid))
if timeout is None:
timeout = self.opts["timeout"]
fret = {}
# make sure the minions is a set (since we do set operations on it)
minions = set(minions)
found = set()
# start this before the cache lookup-- in case new stuff comes in
event_iter = self.get_event_iter_returns(jid, minions, timeout=timeout)
# get the info from the cache
ret = self.get_cache_returns(jid)
if ret != {}:
found.update(set(ret))
yield ret
# if you have all the returns, stop
if len(found.intersection(minions)) >= len(minions):
return
# otherwise, get them from the event system
for event in event_iter:
if event != {}:
found.update(set(event))
yield event
if len(found.intersection(minions)) >= len(minions):
self._clean_up_subscriptions(jid)
return
# TODO: tests!!
def get_returns_no_block(self, tag, match_type=None):
"""
Raw function to just return events of jid excluding timeout logic
Yield either the raw event data or None
Pass a list of additional regular expressions as `tags_regex` to search
the event bus for non-return data, such as minion lists returned from
syndics.
"""
while True:
raw = self.event.get_event(
wait=0.01,
tag=tag,
match_type=match_type,
full=True,
no_block=True,
auto_reconnect=self.auto_reconnect,
)
yield raw
def returns_for_job(self, jid):
return self.returners["{}.get_load".format(self.opts["master_job_cache"])](jid)
def get_iter_returns(
self,
jid,
minions,
timeout=None,
tgt="*",
tgt_type="glob",
expect_minions=False,
block=True,
**kwargs
):
"""
Watch the event system and return job data as it comes in
:returns: all of the information for the JID
"""
if not isinstance(minions, set):
if isinstance(minions, str):
minions = {minions}
elif isinstance(minions, (list, tuple)):
minions = set(list(minions))
if timeout is None:
timeout = self.opts["timeout"]
gather_job_timeout = int(
kwargs.get("gather_job_timeout", self.opts["gather_job_timeout"])
)
start = int(time.time())
# timeouts per minion, id_ -> timeout time
minion_timeouts = {}
found = set()
missing = set()
# Check to see if the jid is real, if not return the empty dict
try:
if not self.returns_for_job(jid):
log.warning("jid does not exist")
yield {}
# stop the iteration, since the jid is invalid
return
except Exception as exc: # pylint: disable=broad-except
log.warning(
"Returner unavailable: %s", exc, exc_info_on_loglevel=logging.DEBUG
)
# Wait for the hosts to check in
last_time = False
# iterator for this job's return
if self.opts["order_masters"]:
# If we are a MoM, we need to gather expected minions from downstreams masters.
ret_iter = self.get_returns_no_block(
"(salt/job|syndic/.*)/{}".format(jid), "regex"
)
else:
ret_iter = self.get_returns_no_block("salt/job/{}".format(jid))
# iterator for the info of this job
jinfo_iter = []
# open event jids that need to be un-subscribed from later
open_jids = set()
timeout_at = time.time() + timeout
gather_syndic_wait = time.time() + self.opts["syndic_wait"]
# are there still minions running the job out there
# start as True so that we ping at least once
minions_running = True
log.debug(
"get_iter_returns for jid %s sent to %s will timeout at %s",
jid,
minions,
datetime.fromtimestamp(timeout_at).time(),
)
while True:
# Process events until timeout is reached or all minions have returned
for raw in ret_iter:
# if we got None, then there were no events
if raw is None:
break
if "minions" in raw.get("data", {}):
minions.update(raw["data"]["minions"])
if "missing" in raw.get("data", {}):
missing.update(raw["data"]["missing"])
continue
if "return" not in raw["data"]:
continue
if kwargs.get("raw", False):
found.add(raw["data"]["id"])
yield raw
else:
found.add(raw["data"]["id"])
ret = {raw["data"]["id"]: {"ret": raw["data"]["return"]}}
if "out" in raw["data"]:
ret[raw["data"]["id"]]["out"] = raw["data"]["out"]
if "retcode" in raw["data"]:
ret[raw["data"]["id"]]["retcode"] = raw["data"]["retcode"]
if "jid" in raw["data"]:
ret[raw["data"]["id"]]["jid"] = raw["data"]["jid"]
if kwargs.get("_cmd_meta", False):
ret[raw["data"]["id"]].update(raw["data"])
log.debug("jid %s return from %s", jid, raw["data"]["id"])
yield ret
# if we have all of the returns (and we aren't a syndic), no need for anything fancy
if (
len(found.intersection(minions)) >= len(minions)
and not self.opts["order_masters"]
):
# All minions have returned, break out of the loop
log.debug("jid %s found all minions %s", jid, found)
break
elif (
len(found.intersection(minions)) >= len(minions)
and self.opts["order_masters"]
):
if (
len(found) >= len(minions)
and len(minions) > 0
and time.time() > gather_syndic_wait
):
# There were some minions to find and we found them
# However, this does not imply that *all* masters have yet responded with expected minion lists.
# Therefore, continue to wait up to the syndic_wait period (calculated in gather_syndic_wait) to see
# if additional lower-level masters deliver their lists of expected
# minions.
break
# If we get here we may not have gathered the minion list yet. Keep waiting
# for all lower-level masters to respond with their minion lists
# let start the timeouts for all remaining minions
for id_ in minions - found:
# if we have a new minion in the list, make sure it has a timeout
if id_ not in minion_timeouts:
minion_timeouts[id_] = time.time() + timeout
# if the jinfo has timed out and some minions are still running the job
# re-do the ping
if time.time() > timeout_at and minions_running:
# since this is a new ping, no one has responded yet
jinfo = self.gather_job_info(
jid, list(minions - found), "list", **kwargs
)
minions_running = False
# if we weren't assigned any jid that means the master thinks
# we have nothing to send
if "jid" not in jinfo:
jinfo_iter = []
else:
jinfo_iter = self.get_returns_no_block(
"salt/job/{}".format(jinfo["jid"])
)
timeout_at = time.time() + gather_job_timeout
# if you are a syndic, wait a little longer
if self.opts["order_masters"]:
timeout_at += self.opts.get("syndic_wait", 1)
# check for minions that are running the job still
for raw in jinfo_iter:
# if there are no more events, lets stop waiting for the jinfo
if raw is None:
break
try:
if raw["data"]["retcode"] > 0:
log.error(
"saltutil returning errors on minion %s", raw["data"]["id"]
)
minions.remove(raw["data"]["id"])
break
except KeyError as exc:
# This is a safe pass. We're just using the try/except to
# avoid having to deep-check for keys.
missing_key = exc.__str__().strip("'\"")
if missing_key == "retcode":
log.debug("retcode missing from client return")
else:
log.debug(
"Passing on saltutil error. Key '%s' missing "
"from client return. This may be an error in "
"the client.",
missing_key,
)
# Keep track of the jid events to unsubscribe from later
open_jids.add(jinfo["jid"])
# TODO: move to a library??
if "minions" in raw.get("data", {}):
minions.update(raw["data"]["minions"])
continue
if "syndic" in raw.get("data", {}):
minions.update(raw["syndic"])
continue
if "return" not in raw.get("data", {}):
continue
# if the job isn't running there anymore... don't count
if raw["data"]["return"] == {}:
continue
# if the minion throws an exception containing the word "return"
# the master will try to handle the string as a dict in the next
# step. Check if we have a string, log the issue and continue.
if isinstance(raw["data"]["return"], str):
log.error("unexpected return from minion: %s", raw)
continue
if (
"return" in raw["data"]["return"]
and raw["data"]["return"]["return"] == {}
):
continue
# if we didn't originally target the minion, lets add it to the list
if raw["data"]["id"] not in minions:
minions.add(raw["data"]["id"])
# update this minion's timeout, as long as the job is still running
minion_timeouts[raw["data"]["id"]] = time.time() + timeout
# a minion returned, so we know its running somewhere
minions_running = True
# if we have hit gather_job_timeout (after firing the job) AND
# if we have hit all minion timeouts, lets call it
now = time.time()
# if we have finished waiting, and no minions are running the job
# then we need to see if each minion has timedout
done = (now > timeout_at) and not minions_running
if done:
# if all minions have timeod out
for id_ in minions - found:
if now < minion_timeouts[id_]:
done = False
break
if done:
break
# don't spin
if block:
time.sleep(0.01)
else:
yield
# If there are any remaining open events, clean them up.
if open_jids:
for jid in open_jids:
self.event.unsubscribe(jid)
if expect_minions:
for minion in list(minions - found):
yield {minion: {"failed": True}}
# Filter out any minions marked as missing for which we received
# returns (prevents false events sent due to higher-level masters not
# knowing about lower-level minions).
missing -= found
# Report on missing minions
if missing:
for minion in missing:
yield {minion: {"failed": True}}
def get_returns(self, jid, minions, timeout=None):
"""
Get the returns for the command line interface via the event system
"""
minions = set(minions)
if timeout is None:
timeout = self.opts["timeout"]
start = int(time.time())
timeout_at = start + timeout
log.debug(
"get_returns for jid %s sent to %s will timeout at %s",
jid,
minions,
datetime.fromtimestamp(timeout_at).time(),
)
found = set()
ret = {}
# Check to see if the jid is real, if not return the empty dict
try:
if (
self.returners["{}.get_load".format(self.opts["master_job_cache"])](jid)
== {}
):
log.warning("jid does not exist")
return ret
except Exception as exc: # pylint: disable=broad-except
raise SaltClientError(
"Master job cache returner [{}] failed to verify jid. "
"Exception details: {}".format(self.opts["master_job_cache"], exc)
)
# Wait for the hosts to check in
while True:
time_left = timeout_at - int(time.time())
wait = max(1, time_left)
raw = self.event.get_event(wait, jid, auto_reconnect=self.auto_reconnect)
if raw is not None and "return" in raw:
found.add(raw["id"])
ret[raw["id"]] = raw["return"]
if len(found.intersection(minions)) >= len(minions):
# All minions have returned, break out of the loop
log.debug("jid %s found all minions", jid)
break
continue
# Then event system timeout was reached and nothing was returned
if len(found.intersection(minions)) >= len(minions):
# All minions have returned, break out of the loop
log.debug("jid %s found all minions", jid)
break
if int(time.time()) > timeout_at:
log.info(
"jid %s minions %s did not return in time", jid, (minions - found)
)
break
time.sleep(0.01)
return ret
def get_full_returns(self, jid, minions, timeout=None):
"""
This method starts off a watcher looking at the return data for
a specified jid, it returns all of the information for the jid
"""
# TODO: change this from ret to return... or the other way.
# Its inconsistent, we should pick one
ret = {}
# create the iterator-- since we want to get anyone in the middle
event_iter = self.get_event_iter_returns(jid, minions, timeout=timeout)
try:
data = self.returners["{}.get_jid".format(self.opts["master_job_cache"])](
jid
)
except Exception as exc: # pylint: disable=broad-except
raise SaltClientError(
"Returner {} could not fetch jid data. Exception details: {}".format(
self.opts["master_job_cache"], exc
)
)
for minion in data:
m_data = {}
if "return" in data[minion]:
m_data["ret"] = data[minion].get("return")
else:
m_data["ret"] = data[minion].get("return")
if "out" in data[minion]:
m_data["out"] = data[minion]["out"]
if minion in ret:
ret[minion].update(m_data)
else:
ret[minion] = m_data
# if we have all the minion returns, lets just return
if len(set(ret).intersection(minions)) >= len(minions):
return ret
# otherwise lets use the listener we created above to get the rest
for event_ret in event_iter:
# if nothing in the event_ret, skip
if event_ret == {}:
time.sleep(0.02)
continue
for minion, m_data in event_ret.items():
if minion in ret:
ret[minion].update(m_data)
else:
ret[minion] = m_data
# are we done yet?
if len(set(ret).intersection(minions)) >= len(minions):
return ret
# otherwise we hit the timeout, return what we have
return ret
def get_cache_returns(self, jid):
"""
Execute a single pass to gather the contents of the job cache
"""
ret = {}
try:
data = self.returners["{}.get_jid".format(self.opts["master_job_cache"])](
jid
)
except Exception as exc: # pylint: disable=broad-except
raise SaltClientError(
"Could not examine master job cache. "
"Error occurred in {} returner. "
"Exception details: {}".format(self.opts["master_job_cache"], exc)
)
for minion in data:
m_data = {}
if "return" in data[minion]:
m_data["ret"] = data[minion].get("return")
else:
m_data["ret"] = data[minion].get("return")
if "out" in data[minion]:
m_data["out"] = data[minion]["out"]
if minion in ret:
ret[minion].update(m_data)
else:
ret[minion] = m_data
return ret
def get_cli_static_event_returns(
self,
jid,
minions,
timeout=None,
tgt="*",
tgt_type="glob",
verbose=False,
show_timeout=False,
show_jid=False,
):
"""
Get the returns for the command line interface via the event system
"""
log.trace("entered - function get_cli_static_event_returns()")
minions = set(minions)
if verbose:
msg = "Executing job with jid {}".format(jid)
print(msg)
print("-" * len(msg) + "\n")
elif show_jid:
print("jid: {}".format(jid))
if timeout is None:
timeout = self.opts["timeout"]
start = int(time.time())
timeout_at = start + timeout
found = set()
ret = {}
# Check to see if the jid is real, if not return the empty dict
try:
if (
self.returners["{}.get_load".format(self.opts["master_job_cache"])](jid)
== {}
):
log.warning("jid does not exist")
return ret
except Exception as exc: # pylint: disable=broad-except
raise SaltClientError(
"Load could not be retrieved from "
"returner {}. Exception details: {}".format(
self.opts["master_job_cache"], exc
)
)
# Wait for the hosts to check in
while True:
# Process events until timeout is reached or all minions have returned
time_left = timeout_at - int(time.time())
# Wait 0 == forever, use a minimum of 1s
wait = max(1, time_left)
jid_tag = "salt/job/{}".format(jid)
raw = self.event.get_event(
wait, jid_tag, auto_reconnect=self.auto_reconnect
)
if raw is not None and "return" in raw:
if "minions" in raw.get("data", {}):
minions.update(raw["data"]["minions"])
continue
found.add(raw["id"])
ret[raw["id"]] = {"ret": raw["return"]}
ret[raw["id"]]["success"] = raw.get("success", False)
if "out" in raw:
ret[raw["id"]]["out"] = raw["out"]
if len(found.intersection(minions)) >= len(minions):
# All minions have returned, break out of the loop
break
continue
# Then event system timeout was reached and nothing was returned
if len(found.intersection(minions)) >= len(minions):
# All minions have returned, break out of the loop
break
if int(time.time()) > timeout_at:
if verbose or show_timeout:
if self.opts.get("minion_data_cache", False) or tgt_type in (
"glob",
"pcre",
"list",
):
if len(found) < len(minions):
fail = sorted(list(minions.difference(found)))
for minion in fail:
ret[minion] = {
"out": "no_return",
"ret": "Minion did not return",
}
break
time.sleep(0.01)
self._clean_up_subscriptions(jid)
return ret
def get_cli_event_returns(
self,
jid,
minions,
timeout=None,
tgt="*",
tgt_type="glob",
verbose=False,
progress=False,
show_timeout=False,
show_jid=False,
**kwargs
):
"""
Get the returns for the command line interface via the event system
"""
log.trace("func get_cli_event_returns()")
if verbose:
msg = "Executing job with jid {}".format(jid)
print(msg)
print("-" * len(msg) + "\n")
elif show_jid:
print("jid: {}".format(jid))
# lazy load the connected minions
connected_minions = None
return_count = 0
for ret in self.get_iter_returns(
jid,
minions,
timeout=timeout,
tgt=tgt,
tgt_type=tgt_type,
# (gtmanfred) expect_minions is popped here in case it is passed from a client
# call. If this is not popped, then it would be passed twice to
# get_iter_returns.
expect_minions=(
kwargs.pop("expect_minions", False) or verbose or show_timeout
),
**kwargs
):
log.debug("return event: %s", ret)
return_count = return_count + 1
if progress:
for id_, min_ret in ret.items():
if not min_ret.get("failed") is True:
yield {
"minion_count": len(minions),
"return_count": return_count,
}
# replace the return structure for missing minions
for id_, min_ret in ret.items():
if min_ret.get("failed") is True:
if connected_minions is None:
connected_minions = salt.utils.minions.CkMinions(
self.opts
).connected_ids()
if (
self.opts["minion_data_cache"]
and salt.cache.factory(self.opts).contains(
"minions/{}".format(id_), "data"
)
and connected_minions
and id_ not in connected_minions
):
yield {
id_: {
"out": "no_return",
"ret": "Minion did not return. [Not connected]",
"retcode": salt.defaults.exitcodes.EX_GENERIC,
}
}
else:
# don't report syndics as unresponsive minions
if not os.path.exists(
os.path.join(self.opts["syndic_dir"], id_)
):
yield {
id_: {
"out": "no_return",
"ret": (
"Minion did not return. [No response]\nThe"
" minions may not have all finished running and"
" any remaining minions will return upon"
" completion. To look up the return data for"
" this job later, run the following"
" command:\n\nsalt-run jobs.lookup_jid {}".format(
jid
)
),
"retcode": salt.defaults.exitcodes.EX_GENERIC,
}
}
else:
yield {id_: min_ret}
self._clean_up_subscriptions(jid)
def get_event_iter_returns(self, jid, minions, timeout=None):
"""
Gather the return data from the event system, break hard when timeout
is reached.
"""
log.trace("entered - function get_event_iter_returns()")
if timeout is None:
timeout = self.opts["timeout"]
timeout_at = time.time() + timeout
found = set()
# Check to see if the jid is real, if not return the empty dict
if (
self.returners["{}.get_load".format(self.opts["master_job_cache"])](jid)
== {}
):
log.warning("jid does not exist")
yield {}
# stop the iteration, since the jid is invalid
return
# Wait for the hosts to check in
while True:
raw = self.event.get_event(timeout, auto_reconnect=self.auto_reconnect)
if raw is None or time.time() > timeout_at:
# Timeout reached
break
if "minions" in raw.get("data", {}):
continue
try:
# There might be two jobs for the same minion, so we have to check for the jid
if jid == raw["jid"]:
found.add(raw["id"])
ret = {raw["id"]: {"ret": raw["return"]}}
else:
continue
except KeyError:
# Ignore other erroneous messages
continue
if "out" in raw:
ret[raw["id"]]["out"] = raw["out"]
yield ret
time.sleep(0.02)
def _resolve_nodegroup(self, ng):
"""
Resolve a nodegroup into its configured components
"""
if ng not in self.opts["nodegroups"]:
conf_file = self.opts.get("conf_file", "the master config file")
raise SaltInvocationError(
"Node group {} unavailable in {}".format(ng, conf_file)
)
return salt.utils.minions.nodegroup_comp(ng, self.opts["nodegroups"])
def _prep_pub(self, tgt, fun, arg, tgt_type, ret, jid, timeout, **kwargs):
"""
Set up the payload_kwargs to be sent down to the master
"""
if tgt_type == "nodegroup":
tgt = self._resolve_nodegroup(tgt)
tgt_type = "compound"
if tgt_type == "compound":
# Resolve all nodegroups, so that the minions don't have to.
new_tgt = list()
log.debug("compound resolution: original tgt: %s", tgt)
if isinstance(tgt, str):
tgt = tgt.split()
for word in tgt:
if word.startswith("N@") and len(word) > 2:
resolved = self._resolve_nodegroup(word[2:])
new_tgt.extend(resolved)
else:
new_tgt.append(word)
log.debug("compound resolution: new_tgt: %s", new_tgt)
tgt = " ".join(new_tgt)
# Convert a range expression to a list of nodes and change expression
# form to list
if tgt_type == "range" and HAS_RANGE:
tgt = self._convert_range_to_list(tgt)
tgt_type = "list"
# If an external job cache is specified add it to the ret list
if self.opts.get("ext_job_cache"):
if ret:
ret += ",{}".format(self.opts["ext_job_cache"])
else:
ret = self.opts["ext_job_cache"]
# format the payload - make a function that does this in the payload
# module
# Generate the standard keyword args to feed to format_payload
payload_kwargs = {
"cmd": "publish",
"tgt": tgt,
"fun": fun,
"arg": arg,
"key": self.key,
"tgt_type": tgt_type,
"ret": ret,
"jid": jid,
}
# if kwargs are passed, pack them.
if kwargs:
payload_kwargs["kwargs"] = kwargs
# If we have a salt user, add it to the payload
if self.opts["syndic_master"] and "user" in kwargs:
payload_kwargs["user"] = kwargs["user"]
elif self.salt_user:
payload_kwargs["user"] = self.salt_user
# If we're a syndication master, pass the timeout
if self.opts["order_masters"]:
payload_kwargs["to"] = timeout
return payload_kwargs
def pub(
self,
tgt,
fun,
arg=(),
tgt_type="glob",
ret="",
jid="",
timeout=5,
listen=False,
**kwargs
):
"""
Take the required arguments and publish the given command.
Arguments:
tgt:
The tgt is a regex or a glob used to match up the ids on
the minions. Salt works by always publishing every command
to all of the minions and then the minions determine if
the command is for them based on the tgt value.
fun:
The function name to be called on the remote host(s), this
must be a string in the format "<modulename>.<function name>"
arg:
The arg option needs to be a tuple of arguments to pass
to the calling function, if left blank
Returns:
jid:
A string, as returned by the publisher, which is the job
id, this will inform the client where to get the job results
minions:
A set, the targets that the tgt passed should match.
"""
# Make sure the publisher is running by checking the unix socket
if self.opts.get("ipc_mode", "") != "tcp" and not os.path.exists(
os.path.join(self.opts["sock_dir"], "publish_pull.ipc")
):
log.error(
"Unable to connect to the salt master publisher at %s",
self.opts["sock_dir"],
)
raise SaltClientError
payload_kwargs = self._prep_pub(
tgt, fun, arg, tgt_type, ret, jid, timeout, **kwargs
)
master_uri = "tcp://{}:{}".format(
salt.utils.network.ip_bracket(self.opts["interface"]),
str(self.opts["ret_port"]),
)
with salt.channel.client.ReqChannel.factory(
self.opts, crypt="clear", master_uri=master_uri
) as channel:
try:
# Ensure that the event subscriber is connected.
# If not, we won't get a response, so error out
if listen and not self.event.connect_pub(timeout=timeout):
raise SaltReqTimeoutError()
payload = channel.send(payload_kwargs, timeout=timeout)
except SaltReqTimeoutError as err:
log.error(err)
raise SaltReqTimeoutError(
"Salt request timed out. The master is not responding. You "
"may need to run your command with `--async` in order to "
"bypass the congested event bus. With `--async`, the CLI tool "
"will print the job id (jid) and exit immediately without "
"listening for responses. You can then use "
"`salt-run jobs.lookup_jid` to look up the results of the job "
"in the job cache later."
)
if not payload:
# The master key could have changed out from under us! Regen
# and try again if the key has changed
key = self.__read_master_key()
if key == self.key:
return payload
self.key = key
payload_kwargs["key"] = self.key
payload = channel.send(payload_kwargs)
error = payload.pop("error", None)
if error is not None:
if isinstance(error, dict):
err_name = error.get("name", "")
err_msg = error.get("message", "")
if err_name == "AuthenticationError":
raise AuthenticationError(err_msg)
elif err_name == "AuthorizationError":
raise AuthorizationError(err_msg)
raise PublishError(error)
if not payload:
return payload
return {"jid": payload["load"]["jid"], "minions": payload["load"]["minions"]}
@salt.ext.tornado.gen.coroutine
def pub_async(
self,
tgt,
fun,
arg=(),
tgt_type="glob",
ret="",
jid="",
timeout=5,
io_loop=None,
listen=True,
**kwargs
):
"""
Take the required arguments and publish the given command.
Arguments:
tgt:
The tgt is a regex or a glob used to match up the ids on
the minions. Salt works by always publishing every command
to all of the minions and then the minions determine if
the command is for them based on the tgt value.
fun:
The function name to be called on the remote host(s), this
must be a string in the format "<modulename>.<function name>"
arg:
The arg option needs to be a tuple of arguments to pass
to the calling function, if left blank
Returns:
jid:
A string, as returned by the publisher, which is the job
id, this will inform the client where to get the job results
minions:
A set, the targets that the tgt passed should match.
"""
# Make sure the publisher is running by checking the unix socket
if self.opts.get("ipc_mode", "") != "tcp" and not os.path.exists(
os.path.join(self.opts["sock_dir"], "publish_pull.ipc")
):
log.error(
"Unable to connect to the salt master publisher at %s",
self.opts["sock_dir"],
)
raise SaltClientError
payload_kwargs = self._prep_pub(
tgt, fun, arg, tgt_type, ret, jid, timeout, **kwargs
)
master_uri = (
"tcp://"
+ salt.utils.network.ip_bracket(self.opts["interface"])
+ ":"
+ str(self.opts["ret_port"])
)
with salt.channel.client.AsyncReqChannel.factory(
self.opts, io_loop=io_loop, crypt="clear", master_uri=master_uri
) as channel:
try:
# Ensure that the event subscriber is connected.
# If not, we won't get a response, so error out
if listen and not self.event.connect_pub(timeout=timeout):
raise SaltReqTimeoutError()
payload = yield channel.send(payload_kwargs, timeout=timeout)
except SaltReqTimeoutError:
raise SaltReqTimeoutError(
"Salt request timed out. The master is not responding. You "
"may need to run your command with `--async` in order to "
"bypass the congested event bus. With `--async`, the CLI tool "
"will print the job id (jid) and exit immediately without "
"listening for responses. You can then use "
"`salt-run jobs.lookup_jid` to look up the results of the job "
"in the job cache later."
)
if not payload:
# The master key could have changed out from under us! Regen
# and try again if the key has changed
key = self.__read_master_key()
if key == self.key:
raise salt.ext.tornado.gen.Return(payload)
self.key = key
payload_kwargs["key"] = self.key
payload = yield channel.send(payload_kwargs)
error = payload.pop("error", None)
if error is not None:
if isinstance(error, dict):
err_name = error.get("name", "")
err_msg = error.get("message", "")
if err_name == "AuthenticationError":
raise AuthenticationError(err_msg)
elif err_name == "AuthorizationError":
raise AuthorizationError(err_msg)
raise PublishError(error)
if not payload:
raise salt.ext.tornado.gen.Return(payload)
raise salt.ext.tornado.gen.Return(
{"jid": payload["load"]["jid"], "minions": payload["load"]["minions"]}
)
# pylint: disable=W1701
def __del__(self):
# This IS really necessary!
# When running tests, if self.events is not destroyed, we leak 2
# threads per test case which uses self.client
self.destroy()
# pylint: enable=W1701
def _clean_up_subscriptions(self, job_id):
if self.opts.get("order_masters"):
self.event.unsubscribe("syndic/.*/{}".format(job_id), "regex")
self.event.unsubscribe("salt/job/{}".format(job_id))
def destroy(self):
if self.event is not None:
self.event.destroy()
self.event = None
def __enter__(self):
return self
def __exit__(self, *args):
self.destroy()
class FunctionWrapper(dict):
"""
Create a function wrapper that looks like the functions dict on the minion
but invoked commands on the minion via a LocalClient.
This allows SLS files to be loaded with an object that calls down to the
minion when the salt functions dict is referenced.
"""
def __init__(self, opts, minion):
super().__init__()
self.opts = opts
self.minion = minion
self.local = LocalClient(self.opts["conf_file"])
self.functions = self.__load_functions()
def __missing__(self, key):
"""
Since the function key is missing, wrap this call to a command to the
minion of said key if it is available in the self.functions set
"""
if key not in self.functions:
raise KeyError
return self.run_key(key)
def __load_functions(self):
"""
Find out what functions are available on the minion
"""
return set(
self.local.cmd(self.minion, "sys.list_functions").get(self.minion, [])
)
def run_key(self, key):
"""
Return a function that executes the arguments passed via the local
client
"""
def func(*args, **kwargs):
"""
Run a remote call
"""
args = list(args)
for _key, _val in kwargs.items():
args.append("{}={}".format(_key, _val))
return self.local.cmd(self.minion, key, args)
return func
class Caller:
"""
``Caller`` is the same interface used by the :command:`salt-call`
command-line tool on the Salt Minion.
.. versionchanged:: 2015.8.0
Added the ``cmd`` method for consistency with the other Salt clients.
The existing ``function`` and ``sminion.functions`` interfaces still
exist but have been removed from the docs.
Importing and using ``Caller`` must be done on the same machine as a
Salt Minion and it must be done using the same user that the Salt Minion is
running as.
Usage:
.. code-block:: python
import salt.client
caller = salt.client.Caller()
caller.cmd('test.ping')
Note, a running master or minion daemon is not required to use this class.
Running ``salt-call --local`` simply sets :conf_minion:`file_client` to
``'local'``. The same can be achieved at the Python level by including that
setting in a minion config file.
.. versionadded:: 2014.7.0
Pass the minion config as the ``mopts`` dictionary.
.. code-block:: python
import salt.client
import salt.config
__opts__ = salt.config.minion_config('/etc/salt/minion')
__opts__['file_client'] = 'local'
caller = salt.client.Caller(mopts=__opts__)
"""
def __init__(self, c_path=os.path.join(syspaths.CONFIG_DIR, "minion"), mopts=None):
# Late-import of the minion module to keep the CLI as light as possible
import salt.minion
if mopts:
self.opts = mopts
else:
self.opts = salt.config.minion_config(c_path)
self.sminion = salt.minion.SMinion(self.opts)
def cmd(self, fun, *args, **kwargs):
"""
Call an execution module with the given arguments and keyword arguments
.. versionchanged:: 2015.8.0
Added the ``cmd`` method for consistency with the other Salt clients.
The existing ``function`` and ``sminion.functions`` interfaces still
exist but have been removed from the docs.
.. code-block:: python
caller.cmd('test.arg', 'Foo', 'Bar', baz='Baz')
caller.cmd('event.send', 'myco/myevent/something',
data={'foo': 'Foo'}, with_env=['GIT_COMMIT'], with_grains=True)
"""
return self.sminion.functions[fun](*args, **kwargs)
def function(self, fun, *args, **kwargs):
"""
Call a single salt function
"""
func = self.sminion.functions[fun]
args, kwargs = salt.minion.load_args_and_kwargs(
func, salt.utils.args.parse_input(args), kwargs
)
return func(*args, **kwargs)
class ProxyCaller:
"""
``ProxyCaller`` is the same interface used by the :command:`salt-call`
with the args ``--proxyid <proxyid>`` command-line tool on the Salt Proxy
Minion.
Importing and using ``ProxyCaller`` must be done on the same machine as a
Salt Minion and it must be done using the same user that the Salt Minion is
running as.
Usage:
.. code-block:: python
import salt.client
caller = salt.client.ProxyCaller()
caller.cmd('test.ping')
Note, a running master or minion daemon is not required to use this class.
Running ``salt-call --local`` simply sets :conf_minion:`file_client` to
``'local'``. The same can be achieved at the Python level by including that
setting in a minion config file.
.. code-block:: python
import salt.client
import salt.config
__opts__ = salt.config.proxy_config('/etc/salt/proxy', minion_id='quirky_edison')
__opts__['file_client'] = 'local'
caller = salt.client.ProxyCaller(mopts=__opts__)
.. note::
To use this for calling proxies, the :py:func:`is_proxy functions
<salt.utils.platform.is_proxy>` requires that ``--proxyid`` be an
argument on the commandline for the script this is used in, or that the
string ``proxy`` is in the name of the script.
"""
def __init__(self, c_path=os.path.join(syspaths.CONFIG_DIR, "proxy"), mopts=None):
# Late-import of the minion module to keep the CLI as light as possible
import salt.minion
self.opts = mopts or salt.config.proxy_config(c_path)
self.sminion = salt.minion.SProxyMinion(self.opts)
def cmd(self, fun, *args, **kwargs):
"""
Call an execution module with the given arguments and keyword arguments
.. code-block:: python
caller.cmd('test.arg', 'Foo', 'Bar', baz='Baz')
caller.cmd('event.send', 'myco/myevent/something',
data={'foo': 'Foo'}, with_env=['GIT_COMMIT'], with_grains=True)
"""
func = self.sminion.functions[fun]
data = {"arg": args, "fun": fun}
data.update(kwargs)
executors = getattr(self.sminion, "module_executors", []) or self.opts.get(
"module_executors", ["direct_call"]
)
if isinstance(executors, str):
executors = [executors]
for name in executors:
fname = "{}.execute".format(name)
if fname not in self.sminion.executors:
raise SaltInvocationError("Executor '{}' is not available".format(name))
return_data = self.sminion.executors[fname](
self.opts, data, func, args, kwargs
)
if return_data is not None:
break
return return_data
|
{
"content_hash": "a7280e02b478a1012028e0f7e39cfd51",
"timestamp": "",
"source": "github",
"line_count": 2274,
"max_line_length": 120,
"avg_line_length": 35.191732629727355,
"alnum_prop": 0.5077849698847875,
"repo_name": "saltstack/salt",
"id": "deb82bd74869db7c99b13d1b1416812bfc719f02",
"size": "80026",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "salt/client/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "14911"
},
{
"name": "C",
"bytes": "1571"
},
{
"name": "Cython",
"bytes": "1458"
},
{
"name": "Dockerfile",
"bytes": "184"
},
{
"name": "Groovy",
"bytes": "12318"
},
{
"name": "HCL",
"bytes": "257"
},
{
"name": "HTML",
"bytes": "8031"
},
{
"name": "Jinja",
"bytes": "45598"
},
{
"name": "Makefile",
"bytes": "713"
},
{
"name": "NSIS",
"bytes": "76572"
},
{
"name": "PowerShell",
"bytes": "75891"
},
{
"name": "Python",
"bytes": "41444811"
},
{
"name": "Rich Text Format",
"bytes": "6242"
},
{
"name": "Roff",
"bytes": "191"
},
{
"name": "Ruby",
"bytes": "961"
},
{
"name": "SaltStack",
"bytes": "35856"
},
{
"name": "Scheme",
"bytes": "895"
},
{
"name": "Scilab",
"bytes": "1147"
},
{
"name": "Shell",
"bytes": "524917"
}
],
"symlink_target": ""
}
|
from lino_book.projects.polly.settings import *
from lino.utils import i2d
class Site(Site):
title = Site.verbose_name + " demo"
the_demo_date = i2d(20141023)
SITE = Site(globals())
DEBUG = True
# the following line should always be commented out in a checked-in version
# DATABASES['default']['NAME'] = ':memory:'
|
{
"content_hash": "69952ffb4e74465df98c6c7d6d63631d",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 75,
"avg_line_length": 25.153846153846153,
"alnum_prop": 0.7064220183486238,
"repo_name": "khchine5/book",
"id": "5e9e54b0748894ba09de3963b4489854e75437af",
"size": "327",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "lino_book/projects/polly/settings/demo.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "HTML",
"bytes": "3668"
},
{
"name": "Python",
"bytes": "486198"
},
{
"name": "Shell",
"bytes": "702"
}
],
"symlink_target": ""
}
|
import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['BoxCox'] , ['Lag1Trend'] , ['NoCycle'] , ['SVR'] );
|
{
"content_hash": "700852ec7d42db162a33bbb2929e7ba6",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 74,
"avg_line_length": 36.75,
"alnum_prop": 0.6938775510204082,
"repo_name": "antoinecarme/pyaf",
"id": "1b54a6e20d5fb57e00034ede0f239783551d5711",
"size": "147",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/model_control/detailed/transf_BoxCox/model_control_one_enabled_BoxCox_Lag1Trend_NoCycle_SVR.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "6773299"
},
{
"name": "Procfile",
"bytes": "24"
},
{
"name": "Python",
"bytes": "54209093"
},
{
"name": "R",
"bytes": "807"
},
{
"name": "Shell",
"bytes": "3619"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Torrent',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=255)),
('size', models.DecimalField(max_digits=20, decimal_places=0)),
('hash', models.CharField(max_length=40)),
('num_files', models.DecimalField(max_digits=10, decimal_places=0)),
('category', models.CharField(max_length=20)),
],
options={
},
bases=(models.Model,),
),
]
|
{
"content_hash": "0a1b44f14d8fa786ca39905d3b9cd468",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 114,
"avg_line_length": 31.46153846153846,
"alnum_prop": 0.5464547677261614,
"repo_name": "xspager/openbaybrowser",
"id": "67e6db4d673d07f8374ebed26dc9daef529fc5c9",
"size": "842",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "torrent/migrations/0001_initial.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "6812"
}
],
"symlink_target": ""
}
|
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
import warnings
from google.api_core import gapic_v1, grpc_helpers_async
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.dataflow_v1beta3.types import jobs, snapshots
from .base import DEFAULT_CLIENT_INFO, JobsV1Beta3Transport
from .grpc import JobsV1Beta3GrpcTransport
class JobsV1Beta3GrpcAsyncIOTransport(JobsV1Beta3Transport):
"""gRPC AsyncIO backend transport for JobsV1Beta3.
Provides a method to create and modify Google Cloud Dataflow
jobs. A Job is a multi-stage computation graph run by the Cloud
Dataflow service.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(
cls,
host: str = "dataflow.googleapis.com",
credentials: Optional[ga_credentials.Credentials] = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
def __init__(
self,
*,
host: str = "dataflow.googleapis.com",
credentials: Optional[ga_credentials.Credentials] = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: Optional[aio.Channel] = None,
api_mtls_endpoint: Optional[str] = None,
client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None,
client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
api_audience: Optional[str] = None,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
api_audience=api_audience,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
# use the credentials which are saved
credentials=self._credentials,
# Set ``credentials_file`` to ``None`` here as
# the credentials that we saved earlier should be used.
credentials_file=None,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Return the channel from cache.
return self._grpc_channel
@property
def create_job(self) -> Callable[[jobs.CreateJobRequest], Awaitable[jobs.Job]]:
r"""Return a callable for the create job method over gRPC.
Creates a Cloud Dataflow job.
To create a job, we recommend using
``projects.locations.jobs.create`` with a [regional endpoint]
(https://cloud.google.com/dataflow/docs/concepts/regional-endpoints).
Using ``projects.jobs.create`` is not recommended, as your job
will always start in ``us-central1``.
Returns:
Callable[[~.CreateJobRequest],
Awaitable[~.Job]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_job" not in self._stubs:
self._stubs["create_job"] = self.grpc_channel.unary_unary(
"/google.dataflow.v1beta3.JobsV1Beta3/CreateJob",
request_serializer=jobs.CreateJobRequest.serialize,
response_deserializer=jobs.Job.deserialize,
)
return self._stubs["create_job"]
@property
def get_job(self) -> Callable[[jobs.GetJobRequest], Awaitable[jobs.Job]]:
r"""Return a callable for the get job method over gRPC.
Gets the state of the specified Cloud Dataflow job.
To get the state of a job, we recommend using
``projects.locations.jobs.get`` with a [regional endpoint]
(https://cloud.google.com/dataflow/docs/concepts/regional-endpoints).
Using ``projects.jobs.get`` is not recommended, as you can only
get the state of jobs that are running in ``us-central1``.
Returns:
Callable[[~.GetJobRequest],
Awaitable[~.Job]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_job" not in self._stubs:
self._stubs["get_job"] = self.grpc_channel.unary_unary(
"/google.dataflow.v1beta3.JobsV1Beta3/GetJob",
request_serializer=jobs.GetJobRequest.serialize,
response_deserializer=jobs.Job.deserialize,
)
return self._stubs["get_job"]
@property
def update_job(self) -> Callable[[jobs.UpdateJobRequest], Awaitable[jobs.Job]]:
r"""Return a callable for the update job method over gRPC.
Updates the state of an existing Cloud Dataflow job.
To update the state of an existing job, we recommend using
``projects.locations.jobs.update`` with a [regional endpoint]
(https://cloud.google.com/dataflow/docs/concepts/regional-endpoints).
Using ``projects.jobs.update`` is not recommended, as you can
only update the state of jobs that are running in
``us-central1``.
Returns:
Callable[[~.UpdateJobRequest],
Awaitable[~.Job]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_job" not in self._stubs:
self._stubs["update_job"] = self.grpc_channel.unary_unary(
"/google.dataflow.v1beta3.JobsV1Beta3/UpdateJob",
request_serializer=jobs.UpdateJobRequest.serialize,
response_deserializer=jobs.Job.deserialize,
)
return self._stubs["update_job"]
@property
def list_jobs(
self,
) -> Callable[[jobs.ListJobsRequest], Awaitable[jobs.ListJobsResponse]]:
r"""Return a callable for the list jobs method over gRPC.
List the jobs of a project.
To list the jobs of a project in a region, we recommend using
``projects.locations.jobs.list`` with a [regional endpoint]
(https://cloud.google.com/dataflow/docs/concepts/regional-endpoints).
To list the all jobs across all regions, use
``projects.jobs.aggregated``. Using ``projects.jobs.list`` is
not recommended, as you can only get the list of jobs that are
running in ``us-central1``.
Returns:
Callable[[~.ListJobsRequest],
Awaitable[~.ListJobsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_jobs" not in self._stubs:
self._stubs["list_jobs"] = self.grpc_channel.unary_unary(
"/google.dataflow.v1beta3.JobsV1Beta3/ListJobs",
request_serializer=jobs.ListJobsRequest.serialize,
response_deserializer=jobs.ListJobsResponse.deserialize,
)
return self._stubs["list_jobs"]
@property
def aggregated_list_jobs(
self,
) -> Callable[[jobs.ListJobsRequest], Awaitable[jobs.ListJobsResponse]]:
r"""Return a callable for the aggregated list jobs method over gRPC.
List the jobs of a project across all regions.
Returns:
Callable[[~.ListJobsRequest],
Awaitable[~.ListJobsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "aggregated_list_jobs" not in self._stubs:
self._stubs["aggregated_list_jobs"] = self.grpc_channel.unary_unary(
"/google.dataflow.v1beta3.JobsV1Beta3/AggregatedListJobs",
request_serializer=jobs.ListJobsRequest.serialize,
response_deserializer=jobs.ListJobsResponse.deserialize,
)
return self._stubs["aggregated_list_jobs"]
@property
def check_active_jobs(
self,
) -> Callable[
[jobs.CheckActiveJobsRequest], Awaitable[jobs.CheckActiveJobsResponse]
]:
r"""Return a callable for the check active jobs method over gRPC.
Check for existence of active jobs in the given
project across all regions.
Returns:
Callable[[~.CheckActiveJobsRequest],
Awaitable[~.CheckActiveJobsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "check_active_jobs" not in self._stubs:
self._stubs["check_active_jobs"] = self.grpc_channel.unary_unary(
"/google.dataflow.v1beta3.JobsV1Beta3/CheckActiveJobs",
request_serializer=jobs.CheckActiveJobsRequest.serialize,
response_deserializer=jobs.CheckActiveJobsResponse.deserialize,
)
return self._stubs["check_active_jobs"]
@property
def snapshot_job(
self,
) -> Callable[[jobs.SnapshotJobRequest], Awaitable[snapshots.Snapshot]]:
r"""Return a callable for the snapshot job method over gRPC.
Snapshot the state of a streaming job.
Returns:
Callable[[~.SnapshotJobRequest],
Awaitable[~.Snapshot]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "snapshot_job" not in self._stubs:
self._stubs["snapshot_job"] = self.grpc_channel.unary_unary(
"/google.dataflow.v1beta3.JobsV1Beta3/SnapshotJob",
request_serializer=jobs.SnapshotJobRequest.serialize,
response_deserializer=snapshots.Snapshot.deserialize,
)
return self._stubs["snapshot_job"]
def close(self):
return self.grpc_channel.close()
__all__ = ("JobsV1Beta3GrpcAsyncIOTransport",)
|
{
"content_hash": "35ca50a5174e512488b06210c75c3308",
"timestamp": "",
"source": "github",
"line_count": 431,
"max_line_length": 88,
"avg_line_length": 44.593967517401396,
"alnum_prop": 0.6127471383975026,
"repo_name": "googleapis/python-dataflow-client",
"id": "094b22b62004edf222eb6bcef16174aced8c48ab",
"size": "19820",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "google/cloud/dataflow_v1beta3/services/jobs_v1_beta3/transports/grpc_asyncio.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "1491941"
},
{
"name": "Shell",
"bytes": "30687"
}
],
"symlink_target": ""
}
|
import os
from simulux.utils import load_json
from simulux.constants import DIST_DEFAULTS_PATH
DEFAULT_LAYOUT = os.path.join(DIST_DEFAULTS_PATH, 'memory_layout.json')
def load_layout(layout_file=None):
'''
Load the layout from the config file (structured and hierarchical)
'''
if not layout_file:
layout_file = DEFAULT_LAYOUT
return load_json(layout_file)
'''
Memory class:
Memory (ram) is defined by:
- used
- free
- buffers
- cached
- shared
All is stored in bytes
'''
class Memory(object):
"""Define a Memory object"""
def __init__(self, conf=None):
super(Memory, self).__init__()
self.data = {
"free": 0,
"used": 0,
"buffers": 0,
"shared": 0,
"cached": 0,
"total": 0
}
# Set default layout
self.set_layout()
# Add scenario specific memory data
if conf:
self.data.update({'used': conf['memory'].get('used', 0)})
self.data.update({'buffers': conf['memory'].get('buffers', 0)})
self.data.update({'shared': conf['memory'].get('shared', 0)})
self.data.update({'cached': conf['memory'].get('cached', 0)})
self.data.update({'total': conf['memory'].get('total', 0)})
self.data.update({'free': conf['memory'].get('free', int(self.data['total'])-int(self.data['used']))})
assert self.data['used'] <= self.data['total']
assert self.data['free'] <= self.data['total']
assert self.data['used'] + self.data['free'] == self.data['total']
def set_layout(self, layout_file=None):
'''
Set the Memory configuration based on the default layout (or get it overriden)
'''
layout = load_layout(layout_file)
self.data.update({'free': layout.get('free')})
self.data.update({'used': layout.get('used')})
self.data.update({'buffers': layout.get('buffers')})
self.data.update({'shared': layout.get('shared')})
self.data.update({'cached': layout.get('cached')})
self.data.update({'total': layout.get('total')})
def set(self, mem_type, size):
'''
Set memory mem_type to size
'''
# TODO:
# - ensure size is within limits
# - update shared/cached/buffer dynamically and s + c + b < used
size = int(size)
prev = self.data.get(mem_type, 0)
diff = size - prev
if mem_type in ['shared', 'cached', 'buffers']:
# Takes memory from free and apply to used
self.data[mem_type] = size
self.data['free'] -= diff
self.data['used'] += diff
return True
if mem_type == 'used':
self.data['used'] = size
self.data['free'] -= diff
return True
if mem_type == 'free':
self.data['free'] = size
self.data['used'] -= diff
return True
if mem_type == 'total':
# Need to ensure total mem change maintains all memory
# can only reduce if diff < free
if diff < 0 and abs(diff) > self.data['free']:
print 'Not enough memory to allow shrink - would OOM...'
return False
self.data['total'] = size
self.data['free'] += diff
return True
def update(self, mem_type, size):
'''
Update memory mem_type by size (+/-)
'''
size = int(size)
if mem_type in ['shared', 'cached', 'buffers']:
# Takes memory from free and apply to used
self.data[mem_type] += size
self.data['free'] -= size
self.data['used'] += size
return True
if mem_type == 'used':
self.data['used'] += size
self.data['free'] -= size
return True
if mem_type == 'free':
self.data['free'] += size
self.data['used'] -= size
return True
return False
def dump(self):
'''
Return the full memory details
'''
memory = {
"total": self.data.get('total'),
"used": self.data.get('used'),
"free": self.data.get('free'),
"cached": self.data.get('cached'),
"buffers": self.data.get('buffers'),
"shared": self.data.get('shared')
}
return memory
|
{
"content_hash": "6a22510c5d3ae9de8bf75aed5358bb84",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 114,
"avg_line_length": 32.535714285714285,
"alnum_prop": 0.5091108671789243,
"repo_name": "zbal/simulux",
"id": "f90aab8cc7862dc0add608a06b6ffbcca316fe6c",
"size": "4555",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/simulux/memory.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "46722"
}
],
"symlink_target": ""
}
|
'''
SlapComp GUI - HUD Panel Widget
~~~~~~~~~~~~
'''
from PySide import QtCore, QtGui
HUD_DEFAULT_SIZE = (0, 0, 300, 75)
HUD_OPACITY = .5
HUD_CORNER_RADIUS = (3, 3)
HUD_BG_COLOR = QtGui.QColor(80, 80, 80, 255)
HUD_TEXT_COLOR = QtGui.QColor(240, 240, 240, 255)
HUD_BORDER_COLOR = QtCore.Qt.black
HUD_BORDER_SIZE = 2
HUD_DEFAULT_TEXT = "Test Message"
class HUDItem(QtGui.QGraphicsWidget):
def __init__(self, parent=None, label=None):
super(HUDItem, self).__init__(parent)
self.opacity = HUD_OPACITY
self.vertLayout = QtGui.QGraphicsLinearLayout(QtCore.Qt.Horizontal)
self.setLayout(self.vertLayout)
def addLabel(self, text):
proxy = QtGui.QGraphicsProxyWidget()
textWidget = QtGui.QLabel(text)
proxy.setWidget(textWidget)
self.vertLayout.addItem(proxy)
# def boundingRect(self):
# return QtCore.QRectF(QtCore.QRectF(self.size[0],
# self.size[1], self.size[2], self.size[3]))
def paint(self, painter, qstyleoptiongraphicsitem, qwidget):
painter.setOpacity(self.opacity)
painter.setPen(QtGui.QPen(HUD_BORDER_COLOR,
HUD_BORDER_SIZE))
painter.setBrush(HUD_BG_COLOR)
painter.drawRoundedRect(self.boundingRect(),
HUD_CORNER_RADIUS[0], HUD_CORNER_RADIUS[1])
#textRect = self.boundingRect().adjusted(10, 10, -10, -10)
#flags = QtCore.Qt.TextWordWrap
#font = QtGui.QFont()
#font.setPixelSize(12)
#painter.setPen(HUD_TEXT_COLOR)
#painter.setFont(font)
#painter.drawText(textRect, flags, self.text)
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
testPanel = QtGui.QGraphicsView()
testScene = QtGui.QGraphicsScene(testPanel)
testPanel.setScene(testScene)
testPanel.setRenderHints(QtGui.QPainter.Antialiasing |
QtGui.QPainter.SmoothPixmapTransform |
QtGui.QPainter.TextAntialiasing)
testPanel.setBackgroundBrush(QtGui.QColor(50, 50, 50, 255))
testHUDitem = HUDItem()
testHUDitem.addLabel("testing")
testHUDitem.addLabel("testing")
testScene.addItem(testHUDitem)
testHUDitem.grabKeyboard()
testPanel.setFocus()
testPanel.show()
sys.exit(app.exec_())
|
{
"content_hash": "62840b8d279a3394871cfdab7e0557fb",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 75,
"avg_line_length": 27.890243902439025,
"alnum_prop": 0.6519457804984696,
"repo_name": "fcnmx/dccui",
"id": "69b72a4cb20a34cc97565c79f31d69705f15744b",
"size": "2287",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "hud_item.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "273387"
}
],
"symlink_target": ""
}
|
import logging
import data_pipeline.audit.connection_factory as audit_conn_factory
import data_pipeline.constants.const as const
from .exceptions import InvalidArgumentsError
from .extractor import Extractor
from abc import ABCMeta, abstractmethod
logger = logging.getLogger(__name__)
def get_prev_run_cdcs(audit_conn_details, argv):
engine = audit_conn_factory.build_engine(audit_conn_details)
with engine.connect() as conn:
sqlstm = """
SELECT id
,process_code
,min_lsn
,max_lsn
,status
FROM {schema}.process_control
WHERE id = (
SELECT MAX(id) FROM {schema}.process_control
WHERE 1 = 1
AND profile_name = '{profilename}'
AND profile_version = {profileversion}
AND process_code IN ('{process1}','{process2}')
AND status IN ('{status1}', '{status2}')
AND min_lsn IS NOT NULL
)""".format(schema=argv.auditschema,
profilename=argv.profilename,
profileversion=argv.profileversion,
process1=const.INITSYNC,
process2=const.CDCEXTRACT,
status1=const.SUCCESS,
status2=const.WARNING)
logger.info("Executing: {}".format(sqlstm))
result = conn.execute(sqlstm)
for row in result:
return (row[2], row[3])
class CdcExtractor(Extractor):
__metaclass__ = ABCMeta
def __init__(self, db, argv, audit_factory):
super(CdcExtractor, self).__init__(
const.CDCEXTRACT, db, argv, audit_factory)
def _extract_source_data(self):
self.build_keycolumnlist(self._active_schemas, self._active_tables)
(self._start_lsn, self._end_lsn) = self._get_cdc_query_range()
if self._end_lsn is not None:
self.poll_cdcs(self._start_lsn, self._end_lsn)
self.flush()
else:
self._report_no_cdcs_found()
return self._end_lsn
def _get_cdc_query_range(self):
(prev_min_cdc_point, prev_max_cdc_point) = get_prev_run_cdcs(
self._audit_conn_details, self._argv)
self._pc.min_lsn = prev_min_cdc_point
self._pc.max_lsn = prev_max_cdc_point
if self._argv.startscn:
self._logger.info(
"Overriding computed minscn ({minscn}) with "
"supplied startscn ({startscn})"
.format(minscn=prev_max_cdc_point,
startscn=self._argv.startscn))
prev_max_cdc_point = self._argv.startscn
(min_cdc_point, max_cdc_point) = self.get_source_minmax_cdc_point(
prev_max_cdc_point)
if self._argv.endscn:
self._logger.info(
"Overriding computed maxscn ({maxscn}) with "
"supplied endscn ({endscn})"
.format(maxscn=max_cdc_point, endscn=self._argv.endscn))
max_cdc_point = self._argv.endscn
return (min_cdc_point, max_cdc_point)
def _report_no_cdcs_found(self):
message = "Completed CDCExtract - No new CDC Records detected ..."
self._pc.comment = message
self._pc.status = const.SUCCESS
self._pc.update()
self._logger.debug(message)
@abstractmethod
def get_source_minmax_cdc_point(self, min_lsn):
pass
@abstractmethod
def build_keycolumnlist(self):
pass
def _decorate_poll_cdcs(func):
def func_wrapper(self, beg_cdc_point, end_cdc_point):
self._pc.min_lsn = beg_cdc_point
self._pc.max_lsn = end_cdc_point
self._pc.comment = "Extracting CDC Records ..."
self._pc.update()
func(self, beg_cdc_point, end_cdc_point)
self._pc.comment = "Extracted CDC Records."
self._pc.update()
return func_wrapper
@_decorate_poll_cdcs
def poll_cdcs(self, beg_cdc_point, end_cdc_point):
pass
|
{
"content_hash": "e2fe698d0d9c24ca7b6ce034dff52fc3",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 75,
"avg_line_length": 33.225806451612904,
"alnum_prop": 0.5616504854368932,
"repo_name": "iagcl/data_pipeline",
"id": "cdb0ad167d14cde09a951c244bef33bc64159653",
"size": "5160",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data_pipeline/extractor/cdc_extractor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "71156"
},
{
"name": "Makefile",
"bytes": "3101"
},
{
"name": "Python",
"bytes": "807620"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
import re
import urlparse
AUTHOR = u'Bee'
SITENAME = u'Bee - a NoSQL database running in a Lua application server'
SITEURL = 'http://bee.org'
PATH = 'content'
THEME = "theme"
TIMEZONE = 'Europe/Moscow'
DEFAULT_LANG = u'en'
PLUGINS = ['plugins.beautifulsite']
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
DEFAULT_PAGINATION = False
BSITE_PATH = ['newsite']
ARTICLE_EXCLUDES = ['doc', 'newsite']
JINJA_FILTERS = {
're_replace': (lambda s, i, o: re.sub(i, o, s)),
'url_split': (lambda s: re.sub('www\.', '', urlparse.urlsplit(s).netloc))
}
INDEX_SAVE_AS = ''
ARCHIVES_SAVE_AS = ''
AUTHORS_SAVE_AS = ''
CATEGORIES_SAVE_AS = ''
TAGS_SAVE_AS = ''
TAGS_SAVE_AS = ''
TAG_SAVE_AS = ''
STATIC_PATHS = [
'robots.txt',
'ycsb',
'js/highcharts.js',
'js/ie8.js',
'js/index_tabs.js',
'js/bench_tabs.js',
'js/main.js',
'js/old_tabs.js',
'js/select.js',
'js/filesize.min.js'
]
EXTRA_PATH_METADATA = {}
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
|
{
"content_hash": "f1b4e716f6a6d26b435ec318fa36ce47",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 82,
"avg_line_length": 21.089285714285715,
"alnum_prop": 0.6486028789161727,
"repo_name": "venkatarajasekhar/bee",
"id": "a0910d15012eeec65ee3bb27e08dcc7a42f7d89c",
"size": "1229",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "doc/www/pelicanconf.py",
"mode": "33261",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "1356794"
},
{
"name": "C++",
"bytes": "927979"
},
{
"name": "CMake",
"bytes": "264992"
},
{
"name": "GAP",
"bytes": "3804"
},
{
"name": "Groff",
"bytes": "6454"
},
{
"name": "Lua",
"bytes": "531198"
},
{
"name": "Makefile",
"bytes": "369690"
},
{
"name": "Python",
"bytes": "187321"
},
{
"name": "Ragel in Ruby Host",
"bytes": "6352"
},
{
"name": "Ruby",
"bytes": "2693"
},
{
"name": "Shell",
"bytes": "3066"
}
],
"symlink_target": ""
}
|
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1_exec_action import V1ExecAction
class TestV1ExecAction(unittest.TestCase):
""" V1ExecAction unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1ExecAction(self):
"""
Test V1ExecAction
"""
# FIXME: construct object with mandatory attributes with example values
#model = kubernetes.client.models.v1_exec_action.V1ExecAction()
pass
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "f4dc45593523ac01d41abd447d5a355d",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 105,
"avg_line_length": 21.714285714285715,
"alnum_prop": 0.6820175438596491,
"repo_name": "mbohlool/client-python",
"id": "2728f1dc288701f66f877aee642e9422c9f25683",
"size": "929",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kubernetes/test/test_v1_exec_action.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "8417639"
},
{
"name": "Shell",
"bytes": "16830"
}
],
"symlink_target": ""
}
|
import os
import sys
import traceback
from .text_opts import *
try:
basestring
except NameError:
basestring = str
# basic error for API
class ASTFError(Exception):
def __init__(self, msg):
self.msg = str(msg)
self.stack = traceback.extract_stack()
def __str__(self):
self.tb = traceback.extract_tb(sys.exc_info()[2])
if not self.tb:
return self.msg
s = format_text("\n******\n", 'bold')
s += format_text("\nException stack (most recent call last):\n\n", 'underline')
for i, line in enumerate(self.tb):
fname, lineno, src = os.path.split(line[0])[1], line[1], line[3]
s += "#{:<2} {:<50} - '{}'\n".format(len(self.tb) - i - 1, format_text(fname, 'bold') + ':' +
format_text(lineno, 'bold'), format_text(src.strip(), 'bold'))
s += format_text('\nSummary error message:\n\n', 'underline')
s += format_text(self.msg + '\n', 'bold')
return s
def brief(self):
return self.msg
class ASTFErrorBadParamCombination(ASTFError):
def __init__(self, func, name1, name2):
msg = "When creating \"{0}\", must not specify both \"{1}\" and \"{2}\"".format(func, name1, name2)
ASTFError.__init__(self, msg)
class ASTFErrorMissingParam(ASTFError):
def __init__(self, func, name1, name2=None):
if name2 is not None:
msg = "When creating \"{0}\", must specify one of \"{1}\" and \"{2}\"".format(func, name1, name2)
else:
msg = "When creating \"{0}\", must specify \"{1}\"".format(func, name1)
ASTFError.__init__(self, msg)
class ASTFErrorWrongType(ASTFError):
def __init__(self, func, param, t, allow_list):
msg = "Parameter \"{0}\" to function \"{1}\" must be of type \"{2}\"".format(param, func, t[0])
if len(t) > 1:
for i in range(1, len(t)):
msg += " or {0}".format(t[i])
if allow_list:
msg += " or list of the allowed types"
ASTFError.__init__(self, msg)
class ASTFErrorBadIp(ASTFError):
def __init__(self, func, param, addr):
msg = "Bad IP \"{0}\" for parameter {1} to function {2}".format(addr, param, func)
ASTFError.__init__(self, msg)
class ASTFErrorBadIpRange(ASTFError):
def __init__(self, func, param, addr, err):
msg = "Bad IP range \"{0}\" for parameter {1} to function {2} - {3}".format(addr, param, func, err)
ASTFError.__init__(self, msg)
|
{
"content_hash": "7830d5f485c0102a2276f6bd3eccf66d",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 114,
"avg_line_length": 33.43421052631579,
"alnum_prop": 0.5505706414797323,
"repo_name": "kisel/trex-core",
"id": "b910e83f3fdf82efefdbbd32b3c445423ba00c87",
"size": "2541",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "scripts/automation/trex_control_plane/astf/trex_astf_lib/trex_astf_exceptions.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "16355010"
},
{
"name": "C++",
"bytes": "4832431"
},
{
"name": "CMake",
"bytes": "8882"
},
{
"name": "CSS",
"bytes": "333"
},
{
"name": "HTML",
"bytes": "5012"
},
{
"name": "JavaScript",
"bytes": "1234"
},
{
"name": "Makefile",
"bytes": "163741"
},
{
"name": "Python",
"bytes": "12389428"
},
{
"name": "Shell",
"bytes": "22573"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from geocoder.base import Base
from geocoder.location import Location
class Ipinfo(Base):
"""
API Reference
-------------
https://ipinfo.io
"""
provider = 'ipinfo'
method = 'geocode'
def __init__(self, location='', **kwargs):
self.location = location
if location.lower() == 'me':
self.location = ''
self.url = 'http://ipinfo.io/{0}/json'.format(self.location)
self._initialize(**kwargs)
def _catch_errors(self):
content = self.content
if content and self.status_code == 400:
self.error = content
@property
def lat(self):
loc = self.parse.get('loc')
if loc:
return Location(loc).lat
@property
def lng(self):
loc = self.parse.get('loc')
if loc:
return Location(loc).lng
@property
def address(self):
if self.city:
return u'{0}, {1}, {2}'.format(self.city, self.state, self.country)
elif self.state:
return u'{0}, {1}'.format(self.state, self.country)
elif self.country:
return u'{0}'.format(self.country)
else:
return u''
@property
def postal(self):
return self.parse.get('postal')
@property
def city(self):
return self.parse.get('city')
@property
def state(self):
return self.parse.get('region')
@property
def country(self):
return self.parse.get('country')
@property
def hostname(self):
return self.parse.get('hostname')
@property
def ip(self):
return self.parse.get('ip')
@property
def org(self):
return self.parse.get('org')
if __name__ == '__main__':
g = Ipinfo('8.8.8.8')
g.debug()
|
{
"content_hash": "a4cecafcbd2cc37acbdbbc19b84ee3a0",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 79,
"avg_line_length": 22.8375,
"alnum_prop": 0.5473453749315819,
"repo_name": "akittas/geocoder",
"id": "b6b0724e277e94565f27db1f83606bb066886e81",
"size": "1861",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "geocoder/ipinfo.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "614"
},
{
"name": "Python",
"bytes": "177426"
}
],
"symlink_target": ""
}
|
import sys
if sys.version_info[0:2] < (2, 7) or \
sys.version_info[0:2] in ( (3, 0), (3, 1) ):
try:
import unittest2 as unittest
except ImportError:
import unittest
else:
import unittest
from pysmi.parser.smi import parserFactory
from pysmi.codegen.pysnmp import PySnmpCodeGen
from pysmi.codegen.symtable import SymtableCodeGen
from pysnmp.smi.builder import MibBuilder
class TypeDeclarationTestCase(unittest.TestCase):
"""
TEST-MIB DEFINITIONS ::= BEGIN
IMPORTS
IpAddress,
Counter32,
Gauge32,
TimeTicks,
Opaque,
Integer32,
Unsigned32,
Counter64
FROM SNMPv2-SMI
TEXTUAL-CONVENTION
FROM SNMPv2-TC;
-- simple types
TestTypeInteger ::= INTEGER
TestTypeOctetString ::= OCTET STRING
TestTypeObjectIdentifier ::= OBJECT IDENTIFIER
-- application types
TestTypeIpAddress ::= IpAddress
TestTypeInteger32 ::= Integer32
TestTypeCounter32 ::= Counter32
TestTypeGauge32 ::= Gauge32
TestTypeTimeTicks ::= TimeTicks
TestTypeOpaque ::= Opaque
TestTypeCounter64 ::= Counter64
TestTypeUnsigned32 ::= Unsigned32
-- constrained subtypes
TestTypeEnum ::= INTEGER {
noResponse(-1),
noError(0),
tooBig(1)
}
TestTypeSizeRangeConstraint ::= OCTET STRING (SIZE (0..255))
TestTypeSizeConstraint ::= OCTET STRING (SIZE (8 | 11))
TestTypeRangeConstraint ::= INTEGER (0..2)
TestTypeSingleValueConstraint ::= INTEGER (0|2|4)
TestTypeBits ::= BITS {
sunday(0),
monday(1),
tuesday(2),
wednesday(3),
thursday(4),
friday(5),
saturday(6)
}
TestTextualConvention ::= TEXTUAL-CONVENTION
DISPLAY-HINT "1x:"
STATUS current
DESCRIPTION
"Test TC"
REFERENCE
"Test reference"
SYNTAX OCTET STRING
END
"""
def setUp(self):
ast = parserFactory()().parse(self.__class__.__doc__)[0]
mibInfo, symtable = SymtableCodeGen().genCode(ast, {}, genTexts=True)
self.mibInfo, pycode = PySnmpCodeGen().genCode(ast, { mibInfo.name: symtable }, genTexts=True)
codeobj = compile(pycode, 'test', 'exec')
mibBuilder = MibBuilder()
mibBuilder.loadTexts = True
self.ctx = { 'mibBuilder': mibBuilder }
exec(codeobj, self.ctx, self.ctx)
def protoTestSymbol(self, symbol, klass):
self.assertTrue(
symbol in self.ctx, 'symbol %s not present' % symbol
)
def protoTestClass(self, symbol, klass):
self.assertEqual(
self.ctx[symbol].__bases__[0].__name__, klass,
'expected class %s, got %s at %s' % (klass, self.ctx[symbol].__bases__[0].__name__, symbol)
)
def TestTextualConventionSymbol(self):
self.assertTrue(
'TestTextualConvention' in self.ctx,
'symbol not present'
)
def TestTextualConventionDisplayHint(self):
self.assertEqual(
self.ctx['TestTextualConvention'].getDisplayHint(),
'1x:',
'bad DISPLAY-HINT'
)
def TestTextualConventionStatus(self):
self.assertEqual(
self.ctx['TestTextualConvention'].getStatus(),
'current',
'bad STATUS'
)
def TestTextualConventionDescription(self):
self.assertEqual(
self.ctx['TestTextualConvention'].getDescription(),
'Test TC',
'bad DESCRIPTION'
)
def TestTextualConventionReference(self):
self.assertEqual(
self.ctx['TestTextualConvention'].getReference(),
'Test reference',
'bad REFERENCE'
)
def TestTextualConventionClass(self):
self.assertEqual(
self.ctx['TestTextualConvention'].__class__.__name__,
'TextualConvention',
'bad SYNTAX class'
)
# populate test case class with per-type methods
typesMap = (
('TestTypeInteger', 'Integer'),
('TestTypeOctetString', 'OctetString'),
('TestTypeObjectIdentifier', 'ObjectIdentifier'),
('TestTypeIpAddress', 'IpAddress'),
('TestTypeInteger32', 'Integer32'),
('TestTypeCounter32', 'Counter32'),
('TestTypeGauge32', 'Gauge32'),
('TestTypeTimeTicks', 'TimeTicks'),
('TestTypeOpaque', 'Opaque'),
('TestTypeCounter64', 'Counter64'),
('TestTypeUnsigned32', 'Unsigned32'),
('TestTypeTestTypeEnum', 'Integer'),
('TestTypeSizeRangeConstraint', 'OctetString'),
('TestTypeSizeConstraint', 'OctetString'),
('TestTypeRangeConstraint', 'Integer'),
('TestTypeSingleValueConstraint', 'Integer')
)
def decor(func, symbol, klass):
def inner(self):
func(self, symbol, klass)
return inner
for s, k in typesMap:
setattr(TypeDeclarationTestCase, 'testTypeDeclaration'+k+'SymbolTestCase',
decor(TypeDeclarationTestCase.protoTestSymbol, s, k))
setattr(TypeDeclarationTestCase, 'testTypeDeclaration'+k+'ClassTestCase',
decor(TypeDeclarationTestCase.protoTestClass, s, k))
# XXX constraints flavor not checked
if __name__ == '__main__': unittest.main()
|
{
"content_hash": "d6b1548216c8cdc57cd449efd99a95ee",
"timestamp": "",
"source": "github",
"line_count": 182,
"max_line_length": 103,
"avg_line_length": 28.60989010989011,
"alnum_prop": 0.6191665066256962,
"repo_name": "bnavarma/netsnmp",
"id": "d2f8b7e44437e0eb9809853351175c27e0255825",
"size": "5207",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_typedeclaration_smiv2_pysnmp.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "667215"
}
],
"symlink_target": ""
}
|
import zstackwoodpecker.test_state as ts_header
import os
TestAction = ts_header.TestAction
def path():
return dict(initial_formation="template5", checking_point=1, faild_point=100000, path_list=[
[TestAction.create_mini_vm, 'vm1', 'cluster=cluster1'],
[TestAction.change_vm_ha, 'vm1'],
[TestAction.create_mini_vm, 'vm2', 'cluster=cluster2'],
[TestAction.create_vm_backup, 'vm2', 'vm2-backup1'],
[TestAction.create_mini_vm, 'vm3', 'cluster=cluster2', 'flag=thick'],
[TestAction.create_volume, 'volume1', 'cluster=cluster2', 'flag=scsi'],
[TestAction.resize_data_volume, 'volume1', 5*1024*1024],
[TestAction.poweroff_only, 'cluster=cluster1'],
[TestAction.attach_volume, 'vm3', 'volume1'],
[TestAction.detach_volume, 'volume1'],
[TestAction.delete_volume, 'volume1'],
[TestAction.recover_volume, 'volume1'],
[TestAction.add_image, 'image1', 'root', 'http://172.20.1.28/mirror/diskimages/centos_vdbench.qcow2'],
[TestAction.delete_vm_backup, 'vm2-backup1'],
[TestAction.delete_image, 'image1'],
[TestAction.recover_image, 'image1'],
[TestAction.delete_image, 'image1'],
[TestAction.expunge_image, 'image1'],
[TestAction.attach_volume, 'vm2', 'volume1'],
[TestAction.create_volume_backup, 'volume1', 'volume1-backup2'],
[TestAction.change_vm_ha, 'vm2'],
[TestAction.poweroff_only, 'cluster=cluster1'],
[TestAction.create_image_from_volume, 'vm1', 'vm1-image2'],
[TestAction.detach_volume, 'volume1'],
[TestAction.create_volume, 'volume2', 'cluster=cluster2', 'flag=thin,scsi'],
[TestAction.create_vm_backup, 'vm1', 'vm1-backup3'],
[TestAction.delete_vm_backup, 'vm1-backup3'],
[TestAction.change_vm_ha, 'vm2'],
[TestAction.stop_vm, 'vm2'],
[TestAction.delete_volume, 'volume2'],
[TestAction.expunge_volume, 'volume2'],
[TestAction.create_mini_vm, 'vm4', 'cluster=cluster1'],
[TestAction.create_volume, 'volume3', 'cluster=cluster1', 'flag=scsi'],
[TestAction.attach_volume, 'vm1', 'volume3'],
[TestAction.create_volume_backup, 'volume3', 'volume3-backup4'],
[TestAction.resize_data_volume, 'volume1', 5*1024*1024],
[TestAction.poweroff_only, 'cluster=cluster2'],
[TestAction.use_volume_backup, 'volume1-backup2'],
])
'''
The final status:
Running:['vm1', 'vm4']
Stopped:['vm2', 'vm3']
Enadbled:['volume1-backup2', 'volume3-backup4', 'vm1-image2']
attached:['volume3']
Detached:['volume1']
Deleted:['vm2-backup1', 'vm1-backup3']
Expunged:['volume2', 'image1']
Ha:['vm1']
Group:
'''
|
{
"content_hash": "122e26dab4dbce115e03d8e97b9b0536",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 104,
"avg_line_length": 40.36065573770492,
"alnum_prop": 0.6986190089358245,
"repo_name": "zstackio/zstack-woodpecker",
"id": "c9c6ddd0f2fab79891588432d3ad2fd2f8cc6c77",
"size": "2462",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "integrationtest/vm/mini/multiclusters/paths/multi_path237.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2356"
},
{
"name": "Go",
"bytes": "49822"
},
{
"name": "Makefile",
"bytes": "687"
},
{
"name": "Puppet",
"bytes": "875"
},
{
"name": "Python",
"bytes": "13070596"
},
{
"name": "Shell",
"bytes": "177861"
}
],
"symlink_target": ""
}
|
"""Snippet representation after parsing."""
import re
import vim
import textwrap
from UltiSnips import _vim
from UltiSnips.compatibility import as_unicode
from UltiSnips.indent_util import IndentUtil
from UltiSnips.text import escape
from UltiSnips.text_objects import SnippetInstance
from UltiSnips.text_objects._python_code import SnippetUtilForAction
__WHITESPACE_SPLIT = re.compile(r"\s")
class _SnippetUtilCursor(object):
def __init__(self, cursor):
self._cursor = [cursor[0] - 1, cursor[1]]
self._set = False
def preserve(self):
self._set = True
self._cursor = [
_vim.buf.cursor[0],
_vim.buf.cursor[1],
]
def is_set(self):
return self._set
def set(self, line, column):
self.__setitem__(0, line)
self.__setitem__(1, column)
def to_vim_cursor(self):
return (self._cursor[0] + 1, self._cursor[1])
def __getitem__(self, index):
return self._cursor[index]
def __setitem__(self, index, value):
self._set = True
self._cursor[index] = value
def __len__(self):
return 2
def __str__(self):
return str((self._cursor[0], self._cursor[1]))
def split_at_whitespace(string):
"""Like string.split(), but keeps empty words as empty words."""
return re.split(__WHITESPACE_SPLIT, string)
def _words_for_line(trigger, before, num_words=None):
"""Gets the final 'num_words' words from 'before'.
If num_words is None, then use the number of words in 'trigger'.
"""
if num_words is None:
num_words = len(split_at_whitespace(trigger))
word_list = split_at_whitespace(before)
if len(word_list) <= num_words:
return before.strip()
else:
before_words = before
for i in range(-1, -(num_words + 1), -1):
left = before_words.rfind(word_list[i])
before_words = before_words[:left]
return before[len(before_words):].strip()
class SnippetDefinition(object):
"""Represents a snippet as parsed from a file."""
_INDENT = re.compile(r"^[ \t]*")
_TABS = re.compile(r"^\t*")
def __init__(self, priority, trigger, value, description,
options, globals, location, context, actions):
self._priority = int(priority)
self._trigger = as_unicode(trigger)
self._value = as_unicode(value)
self._description = as_unicode(description)
self._opts = options
self._matched = ''
self._last_re = None
self._globals = globals
self._location = location
self._context_code = context
self._context = None
self._actions = actions
# Make sure that we actually match our trigger in case we are
# immediately expanded.
self.matches(self._trigger)
def __repr__(self):
return '_SnippetDefinition(%r,%s,%s,%s)' % (
self._priority, self._trigger, self._description, self._opts)
def _re_match(self, trigger):
"""Test if a the current regex trigger matches `trigger`.
If so, set _last_re and _matched.
"""
for match in re.finditer(self._trigger, trigger):
if match.end() != len(trigger):
continue
else:
self._matched = trigger[match.start():match.end()]
self._last_re = match
return match
return False
def _context_match(self, visual_content):
# skip on empty buffer
if len(vim.current.buffer) == 1 and vim.current.buffer[0] == "":
return
locals = {
'context': None,
'visual_mode': '',
'visual_text': '',
'last_placeholder': None
}
if visual_content:
locals['visual_mode'] = visual_content.mode
locals['visual_text'] = visual_content.text
locals['last_placeholder'] = visual_content.placeholder
return self._eval_code('snip.context = ' + self._context_code,
locals).context
def _eval_code(self, code, additional_locals={}):
code = "\n".join([
'import re, os, vim, string, random',
'\n'.join(self._globals.get('!p', [])).replace('\r\n', '\n'),
code
])
current = vim.current
locals = {
'window': current.window,
'buffer': current.buffer,
'line': current.window.cursor[0]-1,
'column': current.window.cursor[1]-1,
'cursor': _SnippetUtilCursor(current.window.cursor),
}
locals.update(additional_locals)
snip = SnippetUtilForAction(locals)
try:
exec(code, {'snip': snip})
except Exception as e:
self._make_debug_exception(e, code)
raise
return snip
def _execute_action(
self,
action,
context,
additional_locals={}
):
mark_to_use = '`'
with _vim.save_mark(mark_to_use):
_vim.set_mark_from_pos(mark_to_use, _vim.get_cursor_pos())
cursor_line_before = _vim.buf.line_till_cursor
locals = {
'context': context,
}
locals.update(additional_locals)
snip = self._eval_code(action, locals)
if snip.cursor.is_set():
vim.current.window.cursor = snip.cursor.to_vim_cursor()
else:
new_mark_pos = _vim.get_mark_pos(mark_to_use)
cursor_invalid = False
if _vim._is_pos_zero(new_mark_pos):
cursor_invalid = True
else:
_vim.set_cursor_from_pos(new_mark_pos)
if cursor_line_before != _vim.buf.line_till_cursor:
cursor_invalid = True
if cursor_invalid:
raise RuntimeError(
'line under the cursor was modified, but ' +
'"snip.cursor" variable is not set; either set set ' +
'"snip.cursor" to new cursor position, or do not ' +
'modify cursor line'
)
return snip
def _make_debug_exception(self, e, code=''):
e.snippet_info = textwrap.dedent("""
Defined in: {}
Trigger: {}
Description: {}
Context: {}
Pre-expand: {}
Post-expand: {}
""").format(
self._location,
self._trigger,
self._description,
self._context_code if self._context_code else '<none>',
self._actions['pre_expand'] if 'pre_expand' in self._actions
else '<none>',
self._actions['post_expand'] if 'post_expand' in self._actions
else '<none>',
code,
)
e.snippet_code = code
def has_option(self, opt):
"""Check if the named option is set."""
return opt in self._opts
@property
def description(self):
"""Descriptive text for this snippet."""
return ('(%s) %s' % (self._trigger, self._description)).strip()
@property
def priority(self):
"""The snippets priority, which defines which snippet will be preferred
over others with the same trigger."""
return self._priority
@property
def trigger(self):
"""The trigger text for the snippet."""
return self._trigger
@property
def matched(self):
"""The last text that matched this snippet in match() or
could_match()."""
return self._matched
@property
def location(self):
"""Where this snippet was defined."""
return self._location
@property
def context(self):
"""The matched context."""
return self._context
def matches(self, before, visual_content=None):
"""Returns True if this snippet matches 'before'."""
# If user supplies both "w" and "i", it should perhaps be an
# error, but if permitted it seems that "w" should take precedence
# (since matching at word boundary and within a word == matching at word
# boundary).
self._matched = ''
words = _words_for_line(self._trigger, before)
if 'r' in self._opts:
try:
match = self._re_match(before)
except Exception as e:
self._make_debug_exception(e)
raise
elif 'w' in self._opts:
words_len = len(self._trigger)
words_prefix = words[:-words_len]
words_suffix = words[-words_len:]
match = (words_suffix == self._trigger)
if match and words_prefix:
# Require a word boundary between prefix and suffix.
boundary_chars = escape(words_prefix[-1:] +
words_suffix[:1], r'\"')
match = _vim.eval(
'"%s" =~# "\\\\v.<."' %
boundary_chars) != '0'
elif 'i' in self._opts:
match = words.endswith(self._trigger)
else:
match = (words == self._trigger)
# By default, we match the whole trigger
if match and not self._matched:
self._matched = self._trigger
# Ensure the match was on a word boundry if needed
if 'b' in self._opts and match:
text_before = before.rstrip()[:-len(self._matched)]
if text_before.strip(' \t') != '':
self._matched = ''
return False
self._context = None
if match and self._context_code:
self._context = self._context_match(visual_content)
if not self.context:
match = False
return match
def could_match(self, before):
"""Return True if this snippet could match the (partial) 'before'."""
self._matched = ''
# List all on whitespace.
if before and before[-1] in (' ', '\t'):
before = ''
if before and before.rstrip() is not before:
return False
words = _words_for_line(self._trigger, before)
if 'r' in self._opts:
# Test for full match only
match = self._re_match(before)
elif 'w' in self._opts:
# Trim non-empty prefix up to word boundary, if present.
qwords = escape(words, r'\"')
words_suffix = _vim.eval(
'substitute("%s", "\\\\v^.+<(.+)", "\\\\1", "")' % qwords)
match = self._trigger.startswith(words_suffix)
self._matched = words_suffix
# TODO: list_snippets() function cannot handle partial-trigger
# matches yet, so for now fail if we trimmed the prefix.
if words_suffix != words:
match = False
elif 'i' in self._opts:
# TODO: It is hard to define when a inword snippet could match,
# therefore we check only for full-word trigger.
match = self._trigger.startswith(words)
else:
match = self._trigger.startswith(words)
# By default, we match the words from the trigger
if match and not self._matched:
self._matched = words
# Ensure the match was on a word boundry if needed
if 'b' in self._opts and match:
text_before = before.rstrip()[:-len(self._matched)]
if text_before.strip(' \t') != '':
self._matched = ''
return False
return match
def instantiate(self, snippet_instance, initial_text, indent):
"""Parses the content of this snippet and brings the corresponding text
objects alive inside of Vim."""
raise NotImplementedError()
def do_pre_expand(self, visual_content, snippets_stack):
if 'pre_expand' in self._actions:
locals = {'buffer': _vim.buf, 'visual_content': visual_content}
snip = self._execute_action(
self._actions['pre_expand'], self._context, locals
)
self._context = snip.context
return snip.cursor.is_set()
else:
return False
def do_post_expand(self, start, end, snippets_stack):
if 'post_expand' in self._actions:
locals = {
'snippet_start': start,
'snippet_end': end,
'buffer': _vim.buf
}
snip = self._execute_action(
self._actions['post_expand'], snippets_stack[-1].context, locals
)
snippets_stack[-1].context = snip.context
return snip.cursor.is_set()
else:
return False
def do_post_jump(
self, tabstop_number, jump_direction, snippets_stack, current_snippet
):
if 'post_jump' in self._actions:
start = current_snippet.start
end = current_snippet.end
locals = {
'tabstop': tabstop_number,
'jump_direction': jump_direction,
'tabstops': current_snippet.get_tabstops(),
'snippet_start': start,
'snippet_end': end,
'buffer': _vim.buf
}
snip = self._execute_action(
self._actions['post_jump'], current_snippet.context, locals
)
current_snippet.context = snip.context
return snip.cursor.is_set()
else:
return False
def launch(self, text_before, visual_content, parent, start, end):
"""Launch this snippet, overwriting the text 'start' to 'end' and
keeping the 'text_before' on the launch line.
'Parent' is the parent snippet instance if any.
"""
indent = self._INDENT.match(text_before).group(0)
lines = (self._value + '\n').splitlines()
ind_util = IndentUtil()
# Replace leading tabs in the snippet definition via proper indenting
initial_text = []
for line_num, line in enumerate(lines):
if 't' in self._opts:
tabs = 0
else:
tabs = len(self._TABS.match(line).group(0))
line_ind = ind_util.ntabs_to_proper_indent(tabs)
if line_num != 0:
line_ind = indent + line_ind
result_line = line_ind + line[tabs:]
if 'm' in self._opts:
result_line = result_line.rstrip()
initial_text.append(result_line)
initial_text = '\n'.join(initial_text)
snippet_instance = SnippetInstance(
self, parent, initial_text, start, end, visual_content,
last_re=self._last_re, globals=self._globals,
context=self._context)
self.instantiate(snippet_instance, initial_text, indent)
snippet_instance.replace_initial_text(_vim.buf)
snippet_instance.update_textobjects(_vim.buf)
return snippet_instance
|
{
"content_hash": "6891a16c11bfcde57b247ed60eaf4eb8",
"timestamp": "",
"source": "github",
"line_count": 475,
"max_line_length": 80,
"avg_line_length": 31.894736842105264,
"alnum_prop": 0.5371617161716171,
"repo_name": "yslin/tools-zodlin",
"id": "4106ac36fc999cf7af03e119227a00a523e3dfe8",
"size": "15191",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ubuntu/vim/.vim/lang/all/ultisnips/pythonx/UltiSnips/snippet/definition/_base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "11080"
},
{
"name": "C",
"bytes": "1754"
},
{
"name": "Dockerfile",
"bytes": "393"
},
{
"name": "Java",
"bytes": "968"
},
{
"name": "Makefile",
"bytes": "2570"
},
{
"name": "Python",
"bytes": "360732"
},
{
"name": "Ruby",
"bytes": "5870"
},
{
"name": "Shell",
"bytes": "37611"
},
{
"name": "Vim script",
"bytes": "1847170"
}
],
"symlink_target": ""
}
|
import tables as tb
class MyClass:
foo = 'bar'
# An object of my custom class.
myObject = MyClass()
with tb.open_file('test.h5', 'w') as h5f:
h5f.root._v_attrs.obj = myObject # store the object
print(h5f.root._v_attrs.obj.foo) # retrieve it
# Delete class of stored object and reopen the file.
del MyClass, myObject
with tb.open_file('test.h5', 'r') as h5f:
print(h5f.root._v_attrs.obj.foo)
# Let us inspect the object to see what is happening.
print(repr(h5f.root._v_attrs.obj))
# Maybe unpickling the string will yield more information:
import pickle
pickle.loads(h5f.root._v_attrs.obj)
# So the problem was not in the stored object,
# but in the *environment* where it was restored.
|
{
"content_hash": "624b7a60f2a6bd3dcd6b03796c18f3d1",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 62,
"avg_line_length": 27.40740740740741,
"alnum_prop": 0.6810810810810811,
"repo_name": "FrancescAlted/PyTables",
"id": "710a705418b34d2354443b608df719c410c42829",
"size": "740",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "doc/scripts/pickletrouble.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "165578"
},
{
"name": "CMake",
"bytes": "2417"
},
{
"name": "Cython",
"bytes": "283042"
},
{
"name": "Gnuplot",
"bytes": "2104"
},
{
"name": "Makefile",
"bytes": "2290"
},
{
"name": "Python",
"bytes": "3119794"
},
{
"name": "Shell",
"bytes": "21591"
}
],
"symlink_target": ""
}
|
def shared_item_save_doc_template_values(url_root):
"""
Show documentation about sharedItemSave
"""
required_query_parameter_list = [
{
'name': 'voter_device_id',
'value': 'string', # boolean, integer, long, string
'description': 'An 88 character unique identifier linked to a voter record on the server',
},
{
'name': 'api_key',
'value': 'string (from post, cookie, or get (in that order))', # boolean, integer, long, string
'description': 'The unique key provided to any organization using the WeVoteServer APIs',
},
{
'name': 'destination_full_url',
'value': 'string', # boolean, integer, long, string
'description': 'The full URL with the final destination.',
},
{
'name': 'other_voter_email_address_text',
'value': 'string', # boolean, integer, long, string
'description': 'The email address of the person you are sharing with. '
'Required if is_remind_contact_share.',
},
{
'name': 'shared_by_voter_we_vote_id',
'value': 'string', # boolean, integer, long, string
'description': 'The voter_we_vote_id of the person who is sharing.',
},
]
optional_query_parameter_list = [
{
'name': 'ballot_item_we_vote_id',
'value': 'string', # boolean, integer, long, string
'description': 'The we_vote_id for the ballot item being shared.',
},
{
'name': 'google_civic_election_id',
'value': 'integer', # boolean, integer, long, string
'description': 'The unique identifier for a particular election.',
},
{
'name': 'is_ballot_share',
'value': 'boolean', # boolean, integer, long, string
'description': 'The kind of destination shared: Ballot page',
},
{
'name': 'is_candidate_share',
'value': 'boolean', # boolean, integer, long, string
'description': 'The kind of destination shared: Candidate page',
},
{
'name': 'is_measure_share',
'value': 'boolean', # boolean, integer, long, string
'description': 'The kind of destination shared: Measure page',
},
{
'name': 'is_office_share',
'value': 'boolean', # boolean, integer, long, string
'description': 'The kind of destination shared: Office page',
},
{
'name': 'is_organization_share',
'value': 'boolean', # boolean, integer, long, string
'description': 'The kind of destination shared: Voter Guide page',
},
{
'name': 'is_ready_share',
'value': 'boolean', # boolean, integer, long, string
'description': 'The kind of destination shared: Ready page',
},
{
'name': 'is_remind_contact_share',
'value': 'boolean', # boolean, integer, long, string
'description': 'Send remind a contact to vote email',
},
{
'name': 'organization_we_vote_id',
'value': 'string', # boolean, integer, long, string
'description': 'The organization the voter wants to share.',
},
{
'name': 'other_voter_email_address_text',
'value': 'string', # boolean, integer, long, string
'description': 'The text of the email address of the friend the current voter is sharing with.',
},
{
'name': 'other_voter_display_name',
'value': 'string', # boolean, integer, long, string
'description': 'The full display name of the friend the current voter is sharing with.',
},
{
'name': 'other_voter_first_name',
'value': 'string', # boolean, integer, long, string
'description': 'The first name (given name) of the friend the current voter is sharing with.',
},
{
'name': 'other_voter_last_name',
'value': 'string', # boolean, integer, long, string
'description': 'The last name (family name) of the friend the current voter is sharing with.',
},
{
'name': 'other_voter_we_vote_id',
'value': 'string', # boolean, integer, long, string
'description': 'The voter_we_vote_id of the friend the current voter is sharing with (if we have it).',
},
{
'name': 'shared_message',
'value': 'string', # boolean, integer, long, string
'description': 'The message to send to the friend the current voter is sharing with.',
},
]
potential_status_codes_list = [
{
'code': 'VALID_VOTER_DEVICE_ID_MISSING',
'description': 'Cannot proceed. A valid voter_device_id parameter was not included.',
},
{
'code': 'VALID_VOTER_ID_MISSING',
'description': 'Cannot proceed. A valid voter_id was not found.',
},
]
try_now_link_variables_dict = {
# 'organization_we_vote_id': 'wv85org1',
}
api_response = '{\n' \
' "status": string,\n' \
' "success": boolean,\n' \
' "candidate_we_vote_id": string,\n' \
' "date_first_shared": datetime,\n' \
' "destination_full_url": string,\n' \
' "google_civic_election_id": integer,\n' \
' "is_ballot_share": boolean,\n' \
' "is_candidate_share": boolean,\n' \
' "is_measure_share": boolean,\n' \
' "is_office_share": boolean,\n' \
' "is_organization_share": boolean,\n' \
' "is_ready_share": boolean,\n' \
' "is_remind_contact_share": boolean,\n' \
' "measure_we_vote_id": string,\n' \
' "office_we_vote_id": string,\n' \
' "other_voter_display_name": string,\n' \
' "other_voter_email_address_text": string,\n' \
' "other_voter_first_name": string,\n' \
' "other_voter_last_name": string,\n' \
' "other_voter_we_vote_id": string,\n' \
' "shared_by_display_name": string,\n' \
' "shared_by_email_address_text": string,\n' \
' "shared_by_organization_type": string,\n' \
' "shared_by_organization_we_vote_id": string,\n' \
' "shared_by_voter_we_vote_id": string,\n' \
' "shared_by_we_vote_hosted_profile_image_url_large": string,\n' \
' "shared_by_we_vote_hosted_profile_image_url_medium": string,\n' \
' "shared_by_we_vote_hosted_profile_image_url_tiny": string,\n' \
' "shared_item_code_no_opinions": string,\n' \
' "shared_item_code_all_opinions": string,\n' \
' "site_owner_organization_we_vote_id": string,\n' \
' "url_with_shared_item_code_no_opinions": string,\n' \
' "url_with_shared_item_code_all_opinions": string,\n' \
'}'
template_values = {
'api_name': 'sharedItemSave',
'api_slug': 'sharedItemSave',
'api_introduction':
"",
'try_now_link': 'apis_v1:sharedItemSaveView',
'try_now_link_variables_dict': try_now_link_variables_dict,
'url_root': url_root,
'get_or_post': 'GET',
'required_query_parameter_list': required_query_parameter_list,
'optional_query_parameter_list': optional_query_parameter_list,
'api_response': api_response,
'api_response_notes':
"",
'potential_status_codes_list': potential_status_codes_list,
}
return template_values
|
{
"content_hash": "709f619572b4ac565ff982473d34dbc1",
"timestamp": "",
"source": "github",
"line_count": 183,
"max_line_length": 116,
"avg_line_length": 46.49180327868852,
"alnum_prop": 0.4883638928067701,
"repo_name": "wevote/WeVoteServer",
"id": "bb8bcfc329fadefc4d0090e3a9d51ef71cc948cf",
"size": "8627",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "apis_v1/documentation_source/shared_item_save_doc.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3612"
},
{
"name": "HTML",
"bytes": "1559624"
},
{
"name": "JavaScript",
"bytes": "26822"
},
{
"name": "Procfile",
"bytes": "51"
},
{
"name": "Python",
"bytes": "11943600"
},
{
"name": "Shell",
"bytes": "587"
}
],
"symlink_target": ""
}
|
try:
import unittest2 as unittest
except ImportError:
import unittest
import datetime
import app_config
app_config.DATABASE_NAME = 'carebot_test.db'
from util.config import Config
from util.analytics import GoogleAnalytics
class TestAnalytics(unittest.TestCase):
def test_median(self):
values = [1, 1, 2, 2, 2, 3, 3]
median = GoogleAnalytics.median(values)
self.assertEqual(median, 2)
|
{
"content_hash": "d0fa2dcf1eef9e04bec0b82182bbc27c",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 47,
"avg_line_length": 22.42105263157895,
"alnum_prop": 0.715962441314554,
"repo_name": "thecarebot/carebot",
"id": "c8f51a578319e0b433526f9f1314535ea1b7e8ab",
"size": "448",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_analytics.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "203"
},
{
"name": "HTML",
"bytes": "2660"
},
{
"name": "Python",
"bytes": "126000"
},
{
"name": "Shell",
"bytes": "128"
}
],
"symlink_target": ""
}
|
import time
from selenium import webdriver
driver = webdriver.Chrome(executable_path='/home/ezequiel/webdriver/chromedriver') # Optional argument, if not specified will search path.
driver.get('http://www.google.com/');
time.sleep(5) # Let the user actually see something!
search_box = driver.find_element_by_name('q')
search_box.send_keys('ChromeDriver')
search_box.submit()
time.sleep(5) # Let the user actually see something!
driver.quit()
|
{
"content_hash": "895c1b1212c35dbc3a089136e96487ef",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 139,
"avg_line_length": 22.65,
"alnum_prop": 0.7527593818984547,
"repo_name": "zecruel/sand_box",
"id": "7414ff515c6b161cad4d2b5c5c38aa9857c18fc6",
"size": "453",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "webdriver/test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "AutoIt",
"bytes": "27504"
},
{
"name": "C",
"bytes": "12070518"
},
{
"name": "Lua",
"bytes": "316374"
},
{
"name": "Makefile",
"bytes": "1630"
},
{
"name": "Max",
"bytes": "226190"
},
{
"name": "Python",
"bytes": "278868"
},
{
"name": "VBA",
"bytes": "31050"
}
],
"symlink_target": ""
}
|
"""Auto-generated file, do not edit by hand. KM metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_KM = PhoneMetadata(id='KM', country_code=None, international_prefix=None,
general_desc=PhoneNumberDesc(national_number_pattern='1\\d', possible_number_pattern='\\d{2}', possible_length=(2,)),
toll_free=PhoneNumberDesc(),
premium_rate=PhoneNumberDesc(),
emergency=PhoneNumberDesc(national_number_pattern='1[78]', possible_number_pattern='\\d{2}', example_number='17', possible_length=(2,)),
short_code=PhoneNumberDesc(national_number_pattern='1[78]', possible_number_pattern='\\d{2}', example_number='17', possible_length=(2,)),
standard_rate=PhoneNumberDesc(),
carrier_specific=PhoneNumberDesc(),
short_data=True)
|
{
"content_hash": "cc48a5c18e549c54268a9116d6ff08ad",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 141,
"avg_line_length": 66.25,
"alnum_prop": 0.7333333333333333,
"repo_name": "vicky2135/lucious",
"id": "6404f678854d7c9c7fa0cc2221ef7321e7f42c9a",
"size": "795",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "oscar/lib/python2.7/site-packages/phonenumbers/shortdata/region_KM.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "896683"
},
{
"name": "C++",
"bytes": "52230"
},
{
"name": "CSS",
"bytes": "1169533"
},
{
"name": "HTML",
"bytes": "1104983"
},
{
"name": "JavaScript",
"bytes": "1055140"
},
{
"name": "Makefile",
"bytes": "145238"
},
{
"name": "Python",
"bytes": "55993261"
},
{
"name": "Shell",
"bytes": "40487"
}
],
"symlink_target": ""
}
|
class SessionHelper:
def __init__(self, app):
self.app = app
def login(self, username, password):
wd = self.app.wd
self.app.open_home_page()
wd.find_element_by_name("user").click()
wd.find_element_by_name("user").clear()
wd.find_element_by_name("user").send_keys(username)
wd.find_element_by_name("pass").click()
wd.find_element_by_name("pass").clear()
wd.find_element_by_name("pass").send_keys(password)
wd.find_element_by_xpath("//form[@id='LoginForm']/input[3]").click()
def logout(self):
wd = self.app.wd
wd.find_element_by_link_text("Logout").click()
def ensure_logout(self):
wd = self.app.wd
if self.is_logged_in():
self.logout()
def is_logged_in(self):
wd = self.app.wd
return len(wd.find_elements_by_link_text("Logout")) > 0
def is_logged_in_as(self, username):
wd = self.app.wd
return self.get_logged_user() == username
def get_logged_user(self):
wd = self.app.wd
return wd.find_element_by_xpath("html/body/div[1]/div[1]/form/b").text[1:-1]
def ensure_login(self, username, password):
wd = self.app.wd
if self.is_logged_in():
if self.is_logged_in_as(username):
return
else:
self.logout()
self.login(username, password)
|
{
"content_hash": "c1ded57ce6d5679329fe551b555b0e4d",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 84,
"avg_line_length": 30.25531914893617,
"alnum_prop": 0.5646976090014064,
"repo_name": "iakibardin/python_training",
"id": "4181cc9ab749a63375599d7b2c89c91bc9ac00d7",
"size": "1425",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fixture/session.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "59397"
}
],
"symlink_target": ""
}
|
import sys
from builtins import object
from desktop.lib.exceptions_renderable import PopupException
from desktop.lib.i18n import smart_unicode
if sys.version_info[0] > 2:
from django.utils.translation import gettext as _
else:
from django.utils.translation import ugettext as _
def get_api(request, interface):
if interface == 'navigator':
from metadata.catalog.navigator_client import NavigatorApi
return NavigatorApi(user=request.user)
elif interface == 'atlas':
from metadata.catalog.atlas_client import AtlasApi
return AtlasApi(user=request.user)
elif interface == 'dummy':
from metadata.catalog.dummy_client import DummyApi
return DummyApi(user=request.user)
else:
raise PopupException(_('Catalog connector interface not recognized: %s') % interface)
class CatalogApiException(Exception):
def __init__(self, message=None):
self.message = message or _('No error message, please check the logs.')
def __str__(self):
return str(self.message)
def __unicode__(self):
return smart_unicode(self.message)
class CatalogEntityDoesNotExistException(Exception):
def __init__(self, message=None):
self.message = message or _('No error message, please check the logs.')
def __str__(self):
return str(self.message)
def __unicode__(self):
return smart_unicode(self.message)
class CatalogAuthException(Exception):
def __init__(self, message=None):
self.message = message or _('No error message, please check the logs.')
def __str__(self):
return str(self.message)
def __unicode__(self):
return smart_unicode(self.message)
# Base API
class Api(object):
def __init__(self, user=None):
self.user = user
# To implement
def search_entities_interactive(self, query_s=None, limit=100, **filters):
"""For the top search"""
return {}
def find_entity(self, source_type, type, name, **filters):
"""e.g. From a database and table name, retrieve the enity id"""
return {}
def get_entity(self, entity_id):
return {}
def update_entity(self, entity, **metadata):
return {}
def add_tags(self, entity_id, tags):
return {}
def delete_tags(self, entity_id, tags):
return {}
def update_properties(self, entity_id, properties, modified_custom_metadata=None, deleted_custom_metadata_keys=None):
"""For updating entity comments or other attributes"""
return {}
# Common APIs
def get_database(self, name):
return self.find_entity(source_type='HIVE', type='DATABASE', name=name)
def get_table(self, database_name, table_name, is_view=False):
parent_path = '\/%s' % database_name
return self.find_entity(source_type='HIVE', type='VIEW' if is_view else 'TABLE', name=table_name, parentPath=parent_path)
def get_field(self, database_name, table_name, field_name):
parent_path = '\/%s\/%s' % (database_name, table_name)
return self.find_entity(source_type='HIVE', type='FIELD', name=field_name, parentPath=parent_path)
def get_partition(self, database_name, table_name, partition_spec):
raise NotImplementedError
def get_directory(self, path):
dir_name, dir_path = self._clean_path(path)
return self.find_entity(source_type='HDFS', type='DIRECTORY', name=dir_name, fileSystemPath=dir_path)
def get_file(self, path):
file_name, file_path = self._clean_path(path)
return self.find_entity(source_type='HDFS', type='FILE', name=file_name, fileSystemPath=file_path)
|
{
"content_hash": "f06b2ac9afaadb8433f7aaf02d9744ea",
"timestamp": "",
"source": "github",
"line_count": 127,
"max_line_length": 125,
"avg_line_length": 27.37007874015748,
"alnum_prop": 0.6962025316455697,
"repo_name": "kawamon/hue",
"id": "89d75b210c0905615e0c38c237fe4a0e68cc924c",
"size": "4268",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "desktop/libs/metadata/src/metadata/catalog/base.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ABAP",
"bytes": "962"
},
{
"name": "ActionScript",
"bytes": "1133"
},
{
"name": "Ada",
"bytes": "99"
},
{
"name": "Assembly",
"bytes": "5786"
},
{
"name": "AutoHotkey",
"bytes": "720"
},
{
"name": "Batchfile",
"bytes": "118907"
},
{
"name": "C",
"bytes": "3196521"
},
{
"name": "C#",
"bytes": "83"
},
{
"name": "C++",
"bytes": "308860"
},
{
"name": "COBOL",
"bytes": "4"
},
{
"name": "CSS",
"bytes": "1050129"
},
{
"name": "Cirru",
"bytes": "520"
},
{
"name": "Clojure",
"bytes": "794"
},
{
"name": "CoffeeScript",
"bytes": "403"
},
{
"name": "ColdFusion",
"bytes": "86"
},
{
"name": "Common Lisp",
"bytes": "632"
},
{
"name": "D",
"bytes": "324"
},
{
"name": "Dart",
"bytes": "489"
},
{
"name": "Dockerfile",
"bytes": "10981"
},
{
"name": "Eiffel",
"bytes": "375"
},
{
"name": "Elixir",
"bytes": "692"
},
{
"name": "Elm",
"bytes": "487"
},
{
"name": "Emacs Lisp",
"bytes": "411907"
},
{
"name": "Erlang",
"bytes": "487"
},
{
"name": "Forth",
"bytes": "979"
},
{
"name": "FreeMarker",
"bytes": "1017"
},
{
"name": "G-code",
"bytes": "521"
},
{
"name": "GLSL",
"bytes": "512"
},
{
"name": "Genshi",
"bytes": "946"
},
{
"name": "Gherkin",
"bytes": "699"
},
{
"name": "Go",
"bytes": "7312"
},
{
"name": "Groovy",
"bytes": "1080"
},
{
"name": "HTML",
"bytes": "24999718"
},
{
"name": "Haskell",
"bytes": "512"
},
{
"name": "Haxe",
"bytes": "447"
},
{
"name": "HiveQL",
"bytes": "43"
},
{
"name": "Io",
"bytes": "140"
},
{
"name": "JSONiq",
"bytes": "4"
},
{
"name": "Java",
"bytes": "471854"
},
{
"name": "JavaScript",
"bytes": "28075556"
},
{
"name": "Julia",
"bytes": "210"
},
{
"name": "Jupyter Notebook",
"bytes": "73168"
},
{
"name": "LSL",
"bytes": "2080"
},
{
"name": "Lean",
"bytes": "213"
},
{
"name": "Lex",
"bytes": "264449"
},
{
"name": "Liquid",
"bytes": "1883"
},
{
"name": "LiveScript",
"bytes": "5747"
},
{
"name": "Lua",
"bytes": "78382"
},
{
"name": "M4",
"bytes": "1377"
},
{
"name": "MATLAB",
"bytes": "203"
},
{
"name": "Makefile",
"bytes": "269655"
},
{
"name": "Mako",
"bytes": "3614942"
},
{
"name": "Mask",
"bytes": "597"
},
{
"name": "Myghty",
"bytes": "936"
},
{
"name": "Nix",
"bytes": "2212"
},
{
"name": "OCaml",
"bytes": "539"
},
{
"name": "Objective-C",
"bytes": "2672"
},
{
"name": "OpenSCAD",
"bytes": "333"
},
{
"name": "PHP",
"bytes": "662"
},
{
"name": "PLSQL",
"bytes": "31565"
},
{
"name": "PLpgSQL",
"bytes": "6006"
},
{
"name": "Pascal",
"bytes": "1412"
},
{
"name": "Perl",
"bytes": "4327"
},
{
"name": "PigLatin",
"bytes": "371"
},
{
"name": "PowerShell",
"bytes": "3204"
},
{
"name": "Python",
"bytes": "76440000"
},
{
"name": "R",
"bytes": "2445"
},
{
"name": "Roff",
"bytes": "95764"
},
{
"name": "Ruby",
"bytes": "1098"
},
{
"name": "Rust",
"bytes": "495"
},
{
"name": "Scala",
"bytes": "1541"
},
{
"name": "Scheme",
"bytes": "559"
},
{
"name": "Shell",
"bytes": "190718"
},
{
"name": "Smarty",
"bytes": "130"
},
{
"name": "TSQL",
"bytes": "10013"
},
{
"name": "Tcl",
"bytes": "899"
},
{
"name": "TeX",
"bytes": "165743"
},
{
"name": "Thrift",
"bytes": "317058"
},
{
"name": "TypeScript",
"bytes": "1607"
},
{
"name": "VBA",
"bytes": "2884"
},
{
"name": "VBScript",
"bytes": "938"
},
{
"name": "VHDL",
"bytes": "830"
},
{
"name": "Vala",
"bytes": "485"
},
{
"name": "Verilog",
"bytes": "274"
},
{
"name": "Vim Snippet",
"bytes": "226931"
},
{
"name": "XQuery",
"bytes": "114"
},
{
"name": "XSLT",
"bytes": "521413"
},
{
"name": "Yacc",
"bytes": "2133855"
}
],
"symlink_target": ""
}
|
"""Home Assistant representation of an UPnP/IGD."""
import asyncio
from ipaddress import IPv4Address
from typing import List, Mapping
from async_upnp_client import UpnpFactory
from async_upnp_client.aiohttp import AiohttpSessionRequester
from async_upnp_client.profiles.igd import IgdDevice
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import HomeAssistantType
import homeassistant.util.dt as dt_util
from .const import (
BYTES_RECEIVED,
BYTES_SENT,
CONF_LOCAL_IP,
DISCOVERY_LOCATION,
DISCOVERY_ST,
DISCOVERY_UDN,
DISCOVERY_USN,
DOMAIN,
LOGGER as _LOGGER,
PACKETS_RECEIVED,
PACKETS_SENT,
TIMESTAMP,
)
class Device:
"""Home Assistant representation of an UPnP/IGD."""
def __init__(self, igd_device):
"""Initialize UPnP/IGD device."""
self._igd_device: IgdDevice = igd_device
self._mapped_ports = []
@classmethod
async def async_discover(cls, hass: HomeAssistantType) -> List[Mapping]:
"""Discover UPnP/IGD devices."""
_LOGGER.debug("Discovering UPnP/IGD devices")
local_ip = None
if DOMAIN in hass.data and "config" in hass.data[DOMAIN]:
local_ip = hass.data[DOMAIN]["config"].get(CONF_LOCAL_IP)
if local_ip:
local_ip = IPv4Address(local_ip)
discovery_infos = await IgdDevice.async_search(source_ip=local_ip, timeout=10)
# add extra info and store devices
devices = []
for discovery_info in discovery_infos:
discovery_info[DISCOVERY_UDN] = discovery_info["_udn"]
discovery_info[DISCOVERY_ST] = discovery_info["st"]
discovery_info[DISCOVERY_LOCATION] = discovery_info["location"]
usn = f"{discovery_info[DISCOVERY_UDN]}::{discovery_info[DISCOVERY_ST]}"
discovery_info[DISCOVERY_USN] = usn
_LOGGER.debug("Discovered device: %s", discovery_info)
devices.append(discovery_info)
return devices
@classmethod
async def async_create_device(cls, hass: HomeAssistantType, ssdp_location: str):
"""Create UPnP/IGD device."""
# build async_upnp_client requester
session = async_get_clientsession(hass)
requester = AiohttpSessionRequester(session, True)
# create async_upnp_client device
factory = UpnpFactory(requester, disable_state_variable_validation=True)
upnp_device = await factory.async_create_device(ssdp_location)
igd_device = IgdDevice(upnp_device, None)
return cls(igd_device)
@property
def udn(self) -> str:
"""Get the UDN."""
return self._igd_device.udn
@property
def name(self) -> str:
"""Get the name."""
return self._igd_device.name
@property
def manufacturer(self) -> str:
"""Get the manufacturer."""
return self._igd_device.manufacturer
@property
def model_name(self) -> str:
"""Get the model name."""
return self._igd_device.model_name
@property
def device_type(self) -> str:
"""Get the device type."""
return self._igd_device.device_type
@property
def unique_id(self) -> str:
"""Get the unique id."""
return f"{self.udn}::{self.device_type}"
def __str__(self) -> str:
"""Get string representation."""
return f"IGD Device: {self.name}/{self.udn}"
async def async_get_traffic_data(self) -> Mapping[str, any]:
"""
Get all traffic data in one go.
Traffic data consists of:
- total bytes sent
- total bytes received
- total packets sent
- total packats received
Data is timestamped.
"""
_LOGGER.debug("Getting traffic statistics from device: %s", self)
values = await asyncio.gather(
self._igd_device.async_get_total_bytes_received(),
self._igd_device.async_get_total_bytes_sent(),
self._igd_device.async_get_total_packets_received(),
self._igd_device.async_get_total_packets_sent(),
)
return {
TIMESTAMP: dt_util.utcnow(),
BYTES_RECEIVED: values[0],
BYTES_SENT: values[1],
PACKETS_RECEIVED: values[2],
PACKETS_SENT: values[3],
}
|
{
"content_hash": "faa3d76c2d400e0e105822eb9055294d",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 86,
"avg_line_length": 31.17142857142857,
"alnum_prop": 0.6223648029330889,
"repo_name": "mKeRix/home-assistant",
"id": "05113b8f9f6a223ec571b1b6972722a568e845a9",
"size": "4364",
"binary": false,
"copies": "4",
"ref": "refs/heads/dev",
"path": "homeassistant/components/upnp/device.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1466026"
},
{
"name": "Python",
"bytes": "4770710"
},
{
"name": "Ruby",
"bytes": "379"
},
{
"name": "Shell",
"bytes": "12407"
}
],
"symlink_target": ""
}
|
"""Unittest for reflection.py, which also indirectly tests the output of the
pure-Python protocol compiler.
"""
import copy
import gc
import operator
import six
import struct
try:
import unittest2 as unittest
except ImportError:
import unittest
from google.protobuf import unittest_import_pb2
from google.protobuf import unittest_mset_pb2
from google.protobuf import unittest_pb2
from google.protobuf import descriptor_pb2
from google.protobuf import descriptor
from google.protobuf import message
from google.protobuf import reflection
from google.protobuf import text_format
from google.protobuf.internal import api_implementation
from google.protobuf.internal import more_extensions_pb2
from google.protobuf.internal import more_messages_pb2
from google.protobuf.internal import message_set_extensions_pb2
from google.protobuf.internal import wire_format
from google.protobuf.internal import test_util
from google.protobuf.internal import decoder
class _MiniDecoder(object):
"""Decodes a stream of values from a string.
Once upon a time we actually had a class called decoder.Decoder. Then we
got rid of it during a redesign that made decoding much, much faster overall.
But a couple tests in this file used it to check that the serialized form of
a message was correct. So, this class implements just the methods that were
used by said tests, so that we don't have to rewrite the tests.
"""
def __init__(self, bytes):
self._bytes = bytes
self._pos = 0
def ReadVarint(self):
result, self._pos = decoder._DecodeVarint(self._bytes, self._pos)
return result
ReadInt32 = ReadVarint
ReadInt64 = ReadVarint
ReadUInt32 = ReadVarint
ReadUInt64 = ReadVarint
def ReadSInt64(self):
return wire_format.ZigZagDecode(self.ReadVarint())
ReadSInt32 = ReadSInt64
def ReadFieldNumberAndWireType(self):
return wire_format.UnpackTag(self.ReadVarint())
def ReadFloat(self):
result = struct.unpack("<f", self._bytes[self._pos:self._pos+4])[0]
self._pos += 4
return result
def ReadDouble(self):
result = struct.unpack("<d", self._bytes[self._pos:self._pos+8])[0]
self._pos += 8
return result
def EndOfStream(self):
return self._pos == len(self._bytes)
class ReflectionTest(unittest.TestCase):
def assertListsEqual(self, values, others):
self.assertEqual(len(values), len(others))
for i in range(len(values)):
self.assertEqual(values[i], others[i])
def testScalarConstructor(self):
# Constructor with only scalar types should succeed.
proto = unittest_pb2.TestAllTypes(
optional_int32=24,
optional_double=54.321,
optional_string='optional_string')
self.assertEqual(24, proto.optional_int32)
self.assertEqual(54.321, proto.optional_double)
self.assertEqual('optional_string', proto.optional_string)
def testRepeatedScalarConstructor(self):
# Constructor with only repeated scalar types should succeed.
proto = unittest_pb2.TestAllTypes(
repeated_int32=[1, 2, 3, 4],
repeated_double=[1.23, 54.321],
repeated_bool=[True, False, False],
repeated_string=["optional_string"])
self.assertEqual([1, 2, 3, 4], list(proto.repeated_int32))
self.assertEqual([1.23, 54.321], list(proto.repeated_double))
self.assertEqual([True, False, False], list(proto.repeated_bool))
self.assertEqual(["optional_string"], list(proto.repeated_string))
def testRepeatedCompositeConstructor(self):
# Constructor with only repeated composite types should succeed.
proto = unittest_pb2.TestAllTypes(
repeated_nested_message=[
unittest_pb2.TestAllTypes.NestedMessage(
bb=unittest_pb2.TestAllTypes.FOO),
unittest_pb2.TestAllTypes.NestedMessage(
bb=unittest_pb2.TestAllTypes.BAR)],
repeated_foreign_message=[
unittest_pb2.ForeignMessage(c=-43),
unittest_pb2.ForeignMessage(c=45324),
unittest_pb2.ForeignMessage(c=12)],
repeatedgroup=[
unittest_pb2.TestAllTypes.RepeatedGroup(),
unittest_pb2.TestAllTypes.RepeatedGroup(a=1),
unittest_pb2.TestAllTypes.RepeatedGroup(a=2)])
self.assertEqual(
[unittest_pb2.TestAllTypes.NestedMessage(
bb=unittest_pb2.TestAllTypes.FOO),
unittest_pb2.TestAllTypes.NestedMessage(
bb=unittest_pb2.TestAllTypes.BAR)],
list(proto.repeated_nested_message))
self.assertEqual(
[unittest_pb2.ForeignMessage(c=-43),
unittest_pb2.ForeignMessage(c=45324),
unittest_pb2.ForeignMessage(c=12)],
list(proto.repeated_foreign_message))
self.assertEqual(
[unittest_pb2.TestAllTypes.RepeatedGroup(),
unittest_pb2.TestAllTypes.RepeatedGroup(a=1),
unittest_pb2.TestAllTypes.RepeatedGroup(a=2)],
list(proto.repeatedgroup))
def testMixedConstructor(self):
# Constructor with only mixed types should succeed.
proto = unittest_pb2.TestAllTypes(
optional_int32=24,
optional_string='optional_string',
repeated_double=[1.23, 54.321],
repeated_bool=[True, False, False],
repeated_nested_message=[
unittest_pb2.TestAllTypes.NestedMessage(
bb=unittest_pb2.TestAllTypes.FOO),
unittest_pb2.TestAllTypes.NestedMessage(
bb=unittest_pb2.TestAllTypes.BAR)],
repeated_foreign_message=[
unittest_pb2.ForeignMessage(c=-43),
unittest_pb2.ForeignMessage(c=45324),
unittest_pb2.ForeignMessage(c=12)])
self.assertEqual(24, proto.optional_int32)
self.assertEqual('optional_string', proto.optional_string)
self.assertEqual([1.23, 54.321], list(proto.repeated_double))
self.assertEqual([True, False, False], list(proto.repeated_bool))
self.assertEqual(
[unittest_pb2.TestAllTypes.NestedMessage(
bb=unittest_pb2.TestAllTypes.FOO),
unittest_pb2.TestAllTypes.NestedMessage(
bb=unittest_pb2.TestAllTypes.BAR)],
list(proto.repeated_nested_message))
self.assertEqual(
[unittest_pb2.ForeignMessage(c=-43),
unittest_pb2.ForeignMessage(c=45324),
unittest_pb2.ForeignMessage(c=12)],
list(proto.repeated_foreign_message))
def testConstructorTypeError(self):
self.assertRaises(
TypeError, unittest_pb2.TestAllTypes, optional_int32="foo")
self.assertRaises(
TypeError, unittest_pb2.TestAllTypes, optional_string=1234)
self.assertRaises(
TypeError, unittest_pb2.TestAllTypes, optional_nested_message=1234)
self.assertRaises(
TypeError, unittest_pb2.TestAllTypes, repeated_int32=1234)
self.assertRaises(
TypeError, unittest_pb2.TestAllTypes, repeated_int32=["foo"])
self.assertRaises(
TypeError, unittest_pb2.TestAllTypes, repeated_string=1234)
self.assertRaises(
TypeError, unittest_pb2.TestAllTypes, repeated_string=[1234])
self.assertRaises(
TypeError, unittest_pb2.TestAllTypes, repeated_nested_message=1234)
self.assertRaises(
TypeError, unittest_pb2.TestAllTypes, repeated_nested_message=[1234])
def testConstructorInvalidatesCachedByteSize(self):
message = unittest_pb2.TestAllTypes(optional_int32 = 12)
self.assertEqual(2, message.ByteSize())
message = unittest_pb2.TestAllTypes(
optional_nested_message = unittest_pb2.TestAllTypes.NestedMessage())
self.assertEqual(3, message.ByteSize())
message = unittest_pb2.TestAllTypes(repeated_int32 = [12])
self.assertEqual(3, message.ByteSize())
message = unittest_pb2.TestAllTypes(
repeated_nested_message = [unittest_pb2.TestAllTypes.NestedMessage()])
self.assertEqual(3, message.ByteSize())
def testSimpleHasBits(self):
# Test a scalar.
proto = unittest_pb2.TestAllTypes()
self.assertTrue(not proto.HasField('optional_int32'))
self.assertEqual(0, proto.optional_int32)
# HasField() shouldn't be true if all we've done is
# read the default value.
self.assertTrue(not proto.HasField('optional_int32'))
proto.optional_int32 = 1
# Setting a value however *should* set the "has" bit.
self.assertTrue(proto.HasField('optional_int32'))
proto.ClearField('optional_int32')
# And clearing that value should unset the "has" bit.
self.assertTrue(not proto.HasField('optional_int32'))
def testHasBitsWithSinglyNestedScalar(self):
# Helper used to test foreign messages and groups.
#
# composite_field_name should be the name of a non-repeated
# composite (i.e., foreign or group) field in TestAllTypes,
# and scalar_field_name should be the name of an integer-valued
# scalar field within that composite.
#
# I never thought I'd miss C++ macros and templates so much. :(
# This helper is semantically just:
#
# assert proto.composite_field.scalar_field == 0
# assert not proto.composite_field.HasField('scalar_field')
# assert not proto.HasField('composite_field')
#
# proto.composite_field.scalar_field = 10
# old_composite_field = proto.composite_field
#
# assert proto.composite_field.scalar_field == 10
# assert proto.composite_field.HasField('scalar_field')
# assert proto.HasField('composite_field')
#
# proto.ClearField('composite_field')
#
# assert not proto.composite_field.HasField('scalar_field')
# assert not proto.HasField('composite_field')
# assert proto.composite_field.scalar_field == 0
#
# # Now ensure that ClearField('composite_field') disconnected
# # the old field object from the object tree...
# assert old_composite_field is not proto.composite_field
# old_composite_field.scalar_field = 20
# assert not proto.composite_field.HasField('scalar_field')
# assert not proto.HasField('composite_field')
def TestCompositeHasBits(composite_field_name, scalar_field_name):
proto = unittest_pb2.TestAllTypes()
# First, check that we can get the scalar value, and see that it's the
# default (0), but that proto.HasField('omposite') and
# proto.composite.HasField('scalar') will still return False.
composite_field = getattr(proto, composite_field_name)
original_scalar_value = getattr(composite_field, scalar_field_name)
self.assertEqual(0, original_scalar_value)
# Assert that the composite object does not "have" the scalar.
self.assertTrue(not composite_field.HasField(scalar_field_name))
# Assert that proto does not "have" the composite field.
self.assertTrue(not proto.HasField(composite_field_name))
# Now set the scalar within the composite field. Ensure that the setting
# is reflected, and that proto.HasField('composite') and
# proto.composite.HasField('scalar') now both return True.
new_val = 20
setattr(composite_field, scalar_field_name, new_val)
self.assertEqual(new_val, getattr(composite_field, scalar_field_name))
# Hold on to a reference to the current composite_field object.
old_composite_field = composite_field
# Assert that the has methods now return true.
self.assertTrue(composite_field.HasField(scalar_field_name))
self.assertTrue(proto.HasField(composite_field_name))
# Now call the clear method...
proto.ClearField(composite_field_name)
# ...and ensure that the "has" bits are all back to False...
composite_field = getattr(proto, composite_field_name)
self.assertTrue(not composite_field.HasField(scalar_field_name))
self.assertTrue(not proto.HasField(composite_field_name))
# ...and ensure that the scalar field has returned to its default.
self.assertEqual(0, getattr(composite_field, scalar_field_name))
self.assertTrue(old_composite_field is not composite_field)
setattr(old_composite_field, scalar_field_name, new_val)
self.assertTrue(not composite_field.HasField(scalar_field_name))
self.assertTrue(not proto.HasField(composite_field_name))
self.assertEqual(0, getattr(composite_field, scalar_field_name))
# Test simple, single-level nesting when we set a scalar.
TestCompositeHasBits('optionalgroup', 'a')
TestCompositeHasBits('optional_nested_message', 'bb')
TestCompositeHasBits('optional_foreign_message', 'c')
TestCompositeHasBits('optional_import_message', 'd')
def testReferencesToNestedMessage(self):
proto = unittest_pb2.TestAllTypes()
nested = proto.optional_nested_message
del proto
# A previous version had a bug where this would raise an exception when
# hitting a now-dead weak reference.
nested.bb = 23
def testDisconnectingNestedMessageBeforeSettingField(self):
proto = unittest_pb2.TestAllTypes()
nested = proto.optional_nested_message
proto.ClearField('optional_nested_message') # Should disconnect from parent
self.assertTrue(nested is not proto.optional_nested_message)
nested.bb = 23
self.assertTrue(not proto.HasField('optional_nested_message'))
self.assertEqual(0, proto.optional_nested_message.bb)
def testGetDefaultMessageAfterDisconnectingDefaultMessage(self):
proto = unittest_pb2.TestAllTypes()
nested = proto.optional_nested_message
proto.ClearField('optional_nested_message')
del proto
del nested
# Force a garbage collect so that the underlying CMessages are freed along
# with the Messages they point to. This is to make sure we're not deleting
# default message instances.
gc.collect()
proto = unittest_pb2.TestAllTypes()
nested = proto.optional_nested_message
def testDisconnectingNestedMessageAfterSettingField(self):
proto = unittest_pb2.TestAllTypes()
nested = proto.optional_nested_message
nested.bb = 5
self.assertTrue(proto.HasField('optional_nested_message'))
proto.ClearField('optional_nested_message') # Should disconnect from parent
self.assertEqual(5, nested.bb)
self.assertEqual(0, proto.optional_nested_message.bb)
self.assertTrue(nested is not proto.optional_nested_message)
nested.bb = 23
self.assertTrue(not proto.HasField('optional_nested_message'))
self.assertEqual(0, proto.optional_nested_message.bb)
def testDisconnectingNestedMessageBeforeGettingField(self):
proto = unittest_pb2.TestAllTypes()
self.assertTrue(not proto.HasField('optional_nested_message'))
proto.ClearField('optional_nested_message')
self.assertTrue(not proto.HasField('optional_nested_message'))
def testDisconnectingNestedMessageAfterMerge(self):
# This test exercises the code path that does not use ReleaseMessage().
# The underlying fear is that if we use ReleaseMessage() incorrectly,
# we will have memory leaks. It's hard to check that that doesn't happen,
# but at least we can exercise that code path to make sure it works.
proto1 = unittest_pb2.TestAllTypes()
proto2 = unittest_pb2.TestAllTypes()
proto2.optional_nested_message.bb = 5
proto1.MergeFrom(proto2)
self.assertTrue(proto1.HasField('optional_nested_message'))
proto1.ClearField('optional_nested_message')
self.assertTrue(not proto1.HasField('optional_nested_message'))
def testDisconnectingLazyNestedMessage(self):
# This test exercises releasing a nested message that is lazy. This test
# only exercises real code in the C++ implementation as Python does not
# support lazy parsing, but the current C++ implementation results in
# memory corruption and a crash.
if api_implementation.Type() != 'python':
return
proto = unittest_pb2.TestAllTypes()
proto.optional_lazy_message.bb = 5
proto.ClearField('optional_lazy_message')
del proto
gc.collect()
def testHasBitsWhenModifyingRepeatedFields(self):
# Test nesting when we add an element to a repeated field in a submessage.
proto = unittest_pb2.TestNestedMessageHasBits()
proto.optional_nested_message.nestedmessage_repeated_int32.append(5)
self.assertEqual(
[5], proto.optional_nested_message.nestedmessage_repeated_int32)
self.assertTrue(proto.HasField('optional_nested_message'))
# Do the same test, but with a repeated composite field within the
# submessage.
proto.ClearField('optional_nested_message')
self.assertTrue(not proto.HasField('optional_nested_message'))
proto.optional_nested_message.nestedmessage_repeated_foreignmessage.add()
self.assertTrue(proto.HasField('optional_nested_message'))
def testHasBitsForManyLevelsOfNesting(self):
# Test nesting many levels deep.
recursive_proto = unittest_pb2.TestMutualRecursionA()
self.assertTrue(not recursive_proto.HasField('bb'))
self.assertEqual(0, recursive_proto.bb.a.bb.a.bb.optional_int32)
self.assertTrue(not recursive_proto.HasField('bb'))
recursive_proto.bb.a.bb.a.bb.optional_int32 = 5
self.assertEqual(5, recursive_proto.bb.a.bb.a.bb.optional_int32)
self.assertTrue(recursive_proto.HasField('bb'))
self.assertTrue(recursive_proto.bb.HasField('a'))
self.assertTrue(recursive_proto.bb.a.HasField('bb'))
self.assertTrue(recursive_proto.bb.a.bb.HasField('a'))
self.assertTrue(recursive_proto.bb.a.bb.a.HasField('bb'))
self.assertTrue(not recursive_proto.bb.a.bb.a.bb.HasField('a'))
self.assertTrue(recursive_proto.bb.a.bb.a.bb.HasField('optional_int32'))
def testSingularListFields(self):
proto = unittest_pb2.TestAllTypes()
proto.optional_fixed32 = 1
proto.optional_int32 = 5
proto.optional_string = 'foo'
# Access sub-message but don't set it yet.
nested_message = proto.optional_nested_message
self.assertEqual(
[ (proto.DESCRIPTOR.fields_by_name['optional_int32' ], 5),
(proto.DESCRIPTOR.fields_by_name['optional_fixed32'], 1),
(proto.DESCRIPTOR.fields_by_name['optional_string' ], 'foo') ],
proto.ListFields())
proto.optional_nested_message.bb = 123
self.assertEqual(
[ (proto.DESCRIPTOR.fields_by_name['optional_int32' ], 5),
(proto.DESCRIPTOR.fields_by_name['optional_fixed32'], 1),
(proto.DESCRIPTOR.fields_by_name['optional_string' ], 'foo'),
(proto.DESCRIPTOR.fields_by_name['optional_nested_message' ],
nested_message) ],
proto.ListFields())
def testRepeatedListFields(self):
proto = unittest_pb2.TestAllTypes()
proto.repeated_fixed32.append(1)
proto.repeated_int32.append(5)
proto.repeated_int32.append(11)
proto.repeated_string.extend(['foo', 'bar'])
proto.repeated_string.extend([])
proto.repeated_string.append('baz')
proto.repeated_string.extend(str(x) for x in range(2))
proto.optional_int32 = 21
proto.repeated_bool # Access but don't set anything; should not be listed.
self.assertEqual(
[ (proto.DESCRIPTOR.fields_by_name['optional_int32' ], 21),
(proto.DESCRIPTOR.fields_by_name['repeated_int32' ], [5, 11]),
(proto.DESCRIPTOR.fields_by_name['repeated_fixed32'], [1]),
(proto.DESCRIPTOR.fields_by_name['repeated_string' ],
['foo', 'bar', 'baz', '0', '1']) ],
proto.ListFields())
def testSingularListExtensions(self):
proto = unittest_pb2.TestAllExtensions()
proto.Extensions[unittest_pb2.optional_fixed32_extension] = 1
proto.Extensions[unittest_pb2.optional_int32_extension ] = 5
proto.Extensions[unittest_pb2.optional_string_extension ] = 'foo'
self.assertEqual(
[ (unittest_pb2.optional_int32_extension , 5),
(unittest_pb2.optional_fixed32_extension, 1),
(unittest_pb2.optional_string_extension , 'foo') ],
proto.ListFields())
def testRepeatedListExtensions(self):
proto = unittest_pb2.TestAllExtensions()
proto.Extensions[unittest_pb2.repeated_fixed32_extension].append(1)
proto.Extensions[unittest_pb2.repeated_int32_extension ].append(5)
proto.Extensions[unittest_pb2.repeated_int32_extension ].append(11)
proto.Extensions[unittest_pb2.repeated_string_extension ].append('foo')
proto.Extensions[unittest_pb2.repeated_string_extension ].append('bar')
proto.Extensions[unittest_pb2.repeated_string_extension ].append('baz')
proto.Extensions[unittest_pb2.optional_int32_extension ] = 21
self.assertEqual(
[ (unittest_pb2.optional_int32_extension , 21),
(unittest_pb2.repeated_int32_extension , [5, 11]),
(unittest_pb2.repeated_fixed32_extension, [1]),
(unittest_pb2.repeated_string_extension , ['foo', 'bar', 'baz']) ],
proto.ListFields())
def testListFieldsAndExtensions(self):
proto = unittest_pb2.TestFieldOrderings()
test_util.SetAllFieldsAndExtensions(proto)
unittest_pb2.my_extension_int
self.assertEqual(
[ (proto.DESCRIPTOR.fields_by_name['my_int' ], 1),
(unittest_pb2.my_extension_int , 23),
(proto.DESCRIPTOR.fields_by_name['my_string'], 'foo'),
(unittest_pb2.my_extension_string , 'bar'),
(proto.DESCRIPTOR.fields_by_name['my_float' ], 1.0) ],
proto.ListFields())
def testDefaultValues(self):
proto = unittest_pb2.TestAllTypes()
self.assertEqual(0, proto.optional_int32)
self.assertEqual(0, proto.optional_int64)
self.assertEqual(0, proto.optional_uint32)
self.assertEqual(0, proto.optional_uint64)
self.assertEqual(0, proto.optional_sint32)
self.assertEqual(0, proto.optional_sint64)
self.assertEqual(0, proto.optional_fixed32)
self.assertEqual(0, proto.optional_fixed64)
self.assertEqual(0, proto.optional_sfixed32)
self.assertEqual(0, proto.optional_sfixed64)
self.assertEqual(0.0, proto.optional_float)
self.assertEqual(0.0, proto.optional_double)
self.assertEqual(False, proto.optional_bool)
self.assertEqual('', proto.optional_string)
self.assertEqual(b'', proto.optional_bytes)
self.assertEqual(41, proto.default_int32)
self.assertEqual(42, proto.default_int64)
self.assertEqual(43, proto.default_uint32)
self.assertEqual(44, proto.default_uint64)
self.assertEqual(-45, proto.default_sint32)
self.assertEqual(46, proto.default_sint64)
self.assertEqual(47, proto.default_fixed32)
self.assertEqual(48, proto.default_fixed64)
self.assertEqual(49, proto.default_sfixed32)
self.assertEqual(-50, proto.default_sfixed64)
self.assertEqual(51.5, proto.default_float)
self.assertEqual(52e3, proto.default_double)
self.assertEqual(True, proto.default_bool)
self.assertEqual('hello', proto.default_string)
self.assertEqual(b'world', proto.default_bytes)
self.assertEqual(unittest_pb2.TestAllTypes.BAR, proto.default_nested_enum)
self.assertEqual(unittest_pb2.FOREIGN_BAR, proto.default_foreign_enum)
self.assertEqual(unittest_import_pb2.IMPORT_BAR,
proto.default_import_enum)
proto = unittest_pb2.TestExtremeDefaultValues()
self.assertEqual(u'\u1234', proto.utf8_string)
def testHasFieldWithUnknownFieldName(self):
proto = unittest_pb2.TestAllTypes()
self.assertRaises(ValueError, proto.HasField, 'nonexistent_field')
def testClearFieldWithUnknownFieldName(self):
proto = unittest_pb2.TestAllTypes()
self.assertRaises(ValueError, proto.ClearField, 'nonexistent_field')
def testClearRemovesChildren(self):
# Make sure there aren't any implementation bugs that are only partially
# clearing the message (which can happen in the more complex C++
# implementation which has parallel message lists).
proto = unittest_pb2.TestRequiredForeign()
for i in range(10):
proto.repeated_message.add()
proto2 = unittest_pb2.TestRequiredForeign()
proto.CopyFrom(proto2)
self.assertRaises(IndexError, lambda: proto.repeated_message[5])
def testDisallowedAssignments(self):
# It's illegal to assign values directly to repeated fields
# or to nonrepeated composite fields. Ensure that this fails.
proto = unittest_pb2.TestAllTypes()
# Repeated fields.
self.assertRaises(AttributeError, setattr, proto, 'repeated_int32', 10)
# Lists shouldn't work, either.
self.assertRaises(AttributeError, setattr, proto, 'repeated_int32', [10])
# Composite fields.
self.assertRaises(AttributeError, setattr, proto,
'optional_nested_message', 23)
# Assignment to a repeated nested message field without specifying
# the index in the array of nested messages.
self.assertRaises(AttributeError, setattr, proto.repeated_nested_message,
'bb', 34)
# Assignment to an attribute of a repeated field.
self.assertRaises(AttributeError, setattr, proto.repeated_float,
'some_attribute', 34)
# proto.nonexistent_field = 23 should fail as well.
self.assertRaises(AttributeError, setattr, proto, 'nonexistent_field', 23)
def testSingleScalarTypeSafety(self):
proto = unittest_pb2.TestAllTypes()
self.assertRaises(TypeError, setattr, proto, 'optional_int32', 1.1)
self.assertRaises(TypeError, setattr, proto, 'optional_int32', 'foo')
self.assertRaises(TypeError, setattr, proto, 'optional_string', 10)
self.assertRaises(TypeError, setattr, proto, 'optional_bytes', 10)
def testIntegerTypes(self):
def TestGetAndDeserialize(field_name, value, expected_type):
proto = unittest_pb2.TestAllTypes()
setattr(proto, field_name, value)
self.assertIsInstance(getattr(proto, field_name), expected_type)
proto2 = unittest_pb2.TestAllTypes()
proto2.ParseFromString(proto.SerializeToString())
self.assertIsInstance(getattr(proto2, field_name), expected_type)
TestGetAndDeserialize('optional_int32', 1, int)
TestGetAndDeserialize('optional_int32', 1 << 30, int)
TestGetAndDeserialize('optional_uint32', 1 << 30, int)
try:
integer_64 = long
except NameError: # Python3
integer_64 = int
if struct.calcsize('L') == 4:
# Python only has signed ints, so 32-bit python can't fit an uint32
# in an int.
TestGetAndDeserialize('optional_uint32', 1 << 31, long)
else:
# 64-bit python can fit uint32 inside an int
TestGetAndDeserialize('optional_uint32', 1 << 31, int)
TestGetAndDeserialize('optional_int64', 1 << 30, integer_64)
TestGetAndDeserialize('optional_int64', 1 << 60, integer_64)
TestGetAndDeserialize('optional_uint64', 1 << 30, integer_64)
TestGetAndDeserialize('optional_uint64', 1 << 60, integer_64)
def testSingleScalarBoundsChecking(self):
def TestMinAndMaxIntegers(field_name, expected_min, expected_max):
pb = unittest_pb2.TestAllTypes()
setattr(pb, field_name, expected_min)
self.assertEqual(expected_min, getattr(pb, field_name))
setattr(pb, field_name, expected_max)
self.assertEqual(expected_max, getattr(pb, field_name))
self.assertRaises(ValueError, setattr, pb, field_name, expected_min - 1)
self.assertRaises(ValueError, setattr, pb, field_name, expected_max + 1)
TestMinAndMaxIntegers('optional_int32', -(1 << 31), (1 << 31) - 1)
TestMinAndMaxIntegers('optional_uint32', 0, 0xffffffff)
TestMinAndMaxIntegers('optional_int64', -(1 << 63), (1 << 63) - 1)
TestMinAndMaxIntegers('optional_uint64', 0, 0xffffffffffffffff)
pb = unittest_pb2.TestAllTypes()
pb.optional_nested_enum = 1
self.assertEqual(1, pb.optional_nested_enum)
def testRepeatedScalarTypeSafety(self):
proto = unittest_pb2.TestAllTypes()
self.assertRaises(TypeError, proto.repeated_int32.append, 1.1)
self.assertRaises(TypeError, proto.repeated_int32.append, 'foo')
self.assertRaises(TypeError, proto.repeated_string, 10)
self.assertRaises(TypeError, proto.repeated_bytes, 10)
proto.repeated_int32.append(10)
proto.repeated_int32[0] = 23
self.assertRaises(IndexError, proto.repeated_int32.__setitem__, 500, 23)
self.assertRaises(TypeError, proto.repeated_int32.__setitem__, 0, 'abc')
# Repeated enums tests.
#proto.repeated_nested_enum.append(0)
def testSingleScalarGettersAndSetters(self):
proto = unittest_pb2.TestAllTypes()
self.assertEqual(0, proto.optional_int32)
proto.optional_int32 = 1
self.assertEqual(1, proto.optional_int32)
proto.optional_uint64 = 0xffffffffffff
self.assertEqual(0xffffffffffff, proto.optional_uint64)
proto.optional_uint64 = 0xffffffffffffffff
self.assertEqual(0xffffffffffffffff, proto.optional_uint64)
# TODO(robinson): Test all other scalar field types.
def testSingleScalarClearField(self):
proto = unittest_pb2.TestAllTypes()
# Should be allowed to clear something that's not there (a no-op).
proto.ClearField('optional_int32')
proto.optional_int32 = 1
self.assertTrue(proto.HasField('optional_int32'))
proto.ClearField('optional_int32')
self.assertEqual(0, proto.optional_int32)
self.assertTrue(not proto.HasField('optional_int32'))
# TODO(robinson): Test all other scalar field types.
def testEnums(self):
proto = unittest_pb2.TestAllTypes()
self.assertEqual(1, proto.FOO)
self.assertEqual(1, unittest_pb2.TestAllTypes.FOO)
self.assertEqual(2, proto.BAR)
self.assertEqual(2, unittest_pb2.TestAllTypes.BAR)
self.assertEqual(3, proto.BAZ)
self.assertEqual(3, unittest_pb2.TestAllTypes.BAZ)
def testEnum_Name(self):
self.assertEqual('FOREIGN_FOO',
unittest_pb2.ForeignEnum.Name(unittest_pb2.FOREIGN_FOO))
self.assertEqual('FOREIGN_BAR',
unittest_pb2.ForeignEnum.Name(unittest_pb2.FOREIGN_BAR))
self.assertEqual('FOREIGN_BAZ',
unittest_pb2.ForeignEnum.Name(unittest_pb2.FOREIGN_BAZ))
self.assertRaises(ValueError,
unittest_pb2.ForeignEnum.Name, 11312)
proto = unittest_pb2.TestAllTypes()
self.assertEqual('FOO',
proto.NestedEnum.Name(proto.FOO))
self.assertEqual('FOO',
unittest_pb2.TestAllTypes.NestedEnum.Name(proto.FOO))
self.assertEqual('BAR',
proto.NestedEnum.Name(proto.BAR))
self.assertEqual('BAR',
unittest_pb2.TestAllTypes.NestedEnum.Name(proto.BAR))
self.assertEqual('BAZ',
proto.NestedEnum.Name(proto.BAZ))
self.assertEqual('BAZ',
unittest_pb2.TestAllTypes.NestedEnum.Name(proto.BAZ))
self.assertRaises(ValueError,
proto.NestedEnum.Name, 11312)
self.assertRaises(ValueError,
unittest_pb2.TestAllTypes.NestedEnum.Name, 11312)
def testEnum_Value(self):
self.assertEqual(unittest_pb2.FOREIGN_FOO,
unittest_pb2.ForeignEnum.Value('FOREIGN_FOO'))
self.assertEqual(unittest_pb2.FOREIGN_BAR,
unittest_pb2.ForeignEnum.Value('FOREIGN_BAR'))
self.assertEqual(unittest_pb2.FOREIGN_BAZ,
unittest_pb2.ForeignEnum.Value('FOREIGN_BAZ'))
self.assertRaises(ValueError,
unittest_pb2.ForeignEnum.Value, 'FO')
proto = unittest_pb2.TestAllTypes()
self.assertEqual(proto.FOO,
proto.NestedEnum.Value('FOO'))
self.assertEqual(proto.FOO,
unittest_pb2.TestAllTypes.NestedEnum.Value('FOO'))
self.assertEqual(proto.BAR,
proto.NestedEnum.Value('BAR'))
self.assertEqual(proto.BAR,
unittest_pb2.TestAllTypes.NestedEnum.Value('BAR'))
self.assertEqual(proto.BAZ,
proto.NestedEnum.Value('BAZ'))
self.assertEqual(proto.BAZ,
unittest_pb2.TestAllTypes.NestedEnum.Value('BAZ'))
self.assertRaises(ValueError,
proto.NestedEnum.Value, 'Foo')
self.assertRaises(ValueError,
unittest_pb2.TestAllTypes.NestedEnum.Value, 'Foo')
def testEnum_KeysAndValues(self):
self.assertEqual(['FOREIGN_FOO', 'FOREIGN_BAR', 'FOREIGN_BAZ'],
list(unittest_pb2.ForeignEnum.keys()))
self.assertEqual([4, 5, 6],
list(unittest_pb2.ForeignEnum.values()))
self.assertEqual([('FOREIGN_FOO', 4), ('FOREIGN_BAR', 5),
('FOREIGN_BAZ', 6)],
list(unittest_pb2.ForeignEnum.items()))
proto = unittest_pb2.TestAllTypes()
self.assertEqual(['FOO', 'BAR', 'BAZ', 'NEG'], list(proto.NestedEnum.keys()))
self.assertEqual([1, 2, 3, -1], list(proto.NestedEnum.values()))
self.assertEqual([('FOO', 1), ('BAR', 2), ('BAZ', 3), ('NEG', -1)],
list(proto.NestedEnum.items()))
def testRepeatedScalars(self):
proto = unittest_pb2.TestAllTypes()
self.assertTrue(not proto.repeated_int32)
self.assertEqual(0, len(proto.repeated_int32))
proto.repeated_int32.append(5)
proto.repeated_int32.append(10)
proto.repeated_int32.append(15)
self.assertTrue(proto.repeated_int32)
self.assertEqual(3, len(proto.repeated_int32))
self.assertEqual([5, 10, 15], proto.repeated_int32)
# Test single retrieval.
self.assertEqual(5, proto.repeated_int32[0])
self.assertEqual(15, proto.repeated_int32[-1])
# Test out-of-bounds indices.
self.assertRaises(IndexError, proto.repeated_int32.__getitem__, 1234)
self.assertRaises(IndexError, proto.repeated_int32.__getitem__, -1234)
# Test incorrect types passed to __getitem__.
self.assertRaises(TypeError, proto.repeated_int32.__getitem__, 'foo')
self.assertRaises(TypeError, proto.repeated_int32.__getitem__, None)
# Test single assignment.
proto.repeated_int32[1] = 20
self.assertEqual([5, 20, 15], proto.repeated_int32)
# Test insertion.
proto.repeated_int32.insert(1, 25)
self.assertEqual([5, 25, 20, 15], proto.repeated_int32)
# Test slice retrieval.
proto.repeated_int32.append(30)
self.assertEqual([25, 20, 15], proto.repeated_int32[1:4])
self.assertEqual([5, 25, 20, 15, 30], proto.repeated_int32[:])
# Test slice assignment with an iterator
proto.repeated_int32[1:4] = (i for i in range(3))
self.assertEqual([5, 0, 1, 2, 30], proto.repeated_int32)
# Test slice assignment.
proto.repeated_int32[1:4] = [35, 40, 45]
self.assertEqual([5, 35, 40, 45, 30], proto.repeated_int32)
# Test that we can use the field as an iterator.
result = []
for i in proto.repeated_int32:
result.append(i)
self.assertEqual([5, 35, 40, 45, 30], result)
# Test single deletion.
del proto.repeated_int32[2]
self.assertEqual([5, 35, 45, 30], proto.repeated_int32)
# Test slice deletion.
del proto.repeated_int32[2:]
self.assertEqual([5, 35], proto.repeated_int32)
# Test extending.
proto.repeated_int32.extend([3, 13])
self.assertEqual([5, 35, 3, 13], proto.repeated_int32)
# Test clearing.
proto.ClearField('repeated_int32')
self.assertTrue(not proto.repeated_int32)
self.assertEqual(0, len(proto.repeated_int32))
proto.repeated_int32.append(1)
self.assertEqual(1, proto.repeated_int32[-1])
# Test assignment to a negative index.
proto.repeated_int32[-1] = 2
self.assertEqual(2, proto.repeated_int32[-1])
# Test deletion at negative indices.
proto.repeated_int32[:] = [0, 1, 2, 3]
del proto.repeated_int32[-1]
self.assertEqual([0, 1, 2], proto.repeated_int32)
del proto.repeated_int32[-2]
self.assertEqual([0, 2], proto.repeated_int32)
self.assertRaises(IndexError, proto.repeated_int32.__delitem__, -3)
self.assertRaises(IndexError, proto.repeated_int32.__delitem__, 300)
del proto.repeated_int32[-2:-1]
self.assertEqual([2], proto.repeated_int32)
del proto.repeated_int32[100:10000]
self.assertEqual([2], proto.repeated_int32)
def testRepeatedScalarsRemove(self):
proto = unittest_pb2.TestAllTypes()
self.assertTrue(not proto.repeated_int32)
self.assertEqual(0, len(proto.repeated_int32))
proto.repeated_int32.append(5)
proto.repeated_int32.append(10)
proto.repeated_int32.append(5)
proto.repeated_int32.append(5)
self.assertEqual(4, len(proto.repeated_int32))
proto.repeated_int32.remove(5)
self.assertEqual(3, len(proto.repeated_int32))
self.assertEqual(10, proto.repeated_int32[0])
self.assertEqual(5, proto.repeated_int32[1])
self.assertEqual(5, proto.repeated_int32[2])
proto.repeated_int32.remove(5)
self.assertEqual(2, len(proto.repeated_int32))
self.assertEqual(10, proto.repeated_int32[0])
self.assertEqual(5, proto.repeated_int32[1])
proto.repeated_int32.remove(10)
self.assertEqual(1, len(proto.repeated_int32))
self.assertEqual(5, proto.repeated_int32[0])
# Remove a non-existent element.
self.assertRaises(ValueError, proto.repeated_int32.remove, 123)
def testRepeatedComposites(self):
proto = unittest_pb2.TestAllTypes()
self.assertTrue(not proto.repeated_nested_message)
self.assertEqual(0, len(proto.repeated_nested_message))
m0 = proto.repeated_nested_message.add()
m1 = proto.repeated_nested_message.add()
self.assertTrue(proto.repeated_nested_message)
self.assertEqual(2, len(proto.repeated_nested_message))
self.assertListsEqual([m0, m1], proto.repeated_nested_message)
self.assertIsInstance(m0, unittest_pb2.TestAllTypes.NestedMessage)
# Test out-of-bounds indices.
self.assertRaises(IndexError, proto.repeated_nested_message.__getitem__,
1234)
self.assertRaises(IndexError, proto.repeated_nested_message.__getitem__,
-1234)
# Test incorrect types passed to __getitem__.
self.assertRaises(TypeError, proto.repeated_nested_message.__getitem__,
'foo')
self.assertRaises(TypeError, proto.repeated_nested_message.__getitem__,
None)
# Test slice retrieval.
m2 = proto.repeated_nested_message.add()
m3 = proto.repeated_nested_message.add()
m4 = proto.repeated_nested_message.add()
self.assertListsEqual(
[m1, m2, m3], proto.repeated_nested_message[1:4])
self.assertListsEqual(
[m0, m1, m2, m3, m4], proto.repeated_nested_message[:])
self.assertListsEqual(
[m0, m1], proto.repeated_nested_message[:2])
self.assertListsEqual(
[m2, m3, m4], proto.repeated_nested_message[2:])
self.assertEqual(
m0, proto.repeated_nested_message[0])
self.assertListsEqual(
[m0], proto.repeated_nested_message[:1])
# Test that we can use the field as an iterator.
result = []
for i in proto.repeated_nested_message:
result.append(i)
self.assertListsEqual([m0, m1, m2, m3, m4], result)
# Test single deletion.
del proto.repeated_nested_message[2]
self.assertListsEqual([m0, m1, m3, m4], proto.repeated_nested_message)
# Test slice deletion.
del proto.repeated_nested_message[2:]
self.assertListsEqual([m0, m1], proto.repeated_nested_message)
# Test extending.
n1 = unittest_pb2.TestAllTypes.NestedMessage(bb=1)
n2 = unittest_pb2.TestAllTypes.NestedMessage(bb=2)
proto.repeated_nested_message.extend([n1,n2])
self.assertEqual(4, len(proto.repeated_nested_message))
self.assertEqual(n1, proto.repeated_nested_message[2])
self.assertEqual(n2, proto.repeated_nested_message[3])
# Test clearing.
proto.ClearField('repeated_nested_message')
self.assertTrue(not proto.repeated_nested_message)
self.assertEqual(0, len(proto.repeated_nested_message))
# Test constructing an element while adding it.
proto.repeated_nested_message.add(bb=23)
self.assertEqual(1, len(proto.repeated_nested_message))
self.assertEqual(23, proto.repeated_nested_message[0].bb)
def testRepeatedCompositeRemove(self):
proto = unittest_pb2.TestAllTypes()
self.assertEqual(0, len(proto.repeated_nested_message))
m0 = proto.repeated_nested_message.add()
# Need to set some differentiating variable so m0 != m1 != m2:
m0.bb = len(proto.repeated_nested_message)
m1 = proto.repeated_nested_message.add()
m1.bb = len(proto.repeated_nested_message)
self.assertTrue(m0 != m1)
m2 = proto.repeated_nested_message.add()
m2.bb = len(proto.repeated_nested_message)
self.assertListsEqual([m0, m1, m2], proto.repeated_nested_message)
self.assertEqual(3, len(proto.repeated_nested_message))
proto.repeated_nested_message.remove(m0)
self.assertEqual(2, len(proto.repeated_nested_message))
self.assertEqual(m1, proto.repeated_nested_message[0])
self.assertEqual(m2, proto.repeated_nested_message[1])
# Removing m0 again or removing None should raise error
self.assertRaises(ValueError, proto.repeated_nested_message.remove, m0)
self.assertRaises(ValueError, proto.repeated_nested_message.remove, None)
self.assertEqual(2, len(proto.repeated_nested_message))
proto.repeated_nested_message.remove(m2)
self.assertEqual(1, len(proto.repeated_nested_message))
self.assertEqual(m1, proto.repeated_nested_message[0])
def testHandWrittenReflection(self):
# Hand written extensions are only supported by the pure-Python
# implementation of the API.
if api_implementation.Type() != 'python':
return
FieldDescriptor = descriptor.FieldDescriptor
foo_field_descriptor = FieldDescriptor(
name='foo_field', full_name='MyProto.foo_field',
index=0, number=1, type=FieldDescriptor.TYPE_INT64,
cpp_type=FieldDescriptor.CPPTYPE_INT64,
label=FieldDescriptor.LABEL_OPTIONAL, default_value=0,
containing_type=None, message_type=None, enum_type=None,
is_extension=False, extension_scope=None,
options=descriptor_pb2.FieldOptions())
mydescriptor = descriptor.Descriptor(
name='MyProto', full_name='MyProto', filename='ignored',
containing_type=None, nested_types=[], enum_types=[],
fields=[foo_field_descriptor], extensions=[],
options=descriptor_pb2.MessageOptions())
class MyProtoClass(six.with_metaclass(reflection.GeneratedProtocolMessageType, message.Message)):
DESCRIPTOR = mydescriptor
myproto_instance = MyProtoClass()
self.assertEqual(0, myproto_instance.foo_field)
self.assertTrue(not myproto_instance.HasField('foo_field'))
myproto_instance.foo_field = 23
self.assertEqual(23, myproto_instance.foo_field)
self.assertTrue(myproto_instance.HasField('foo_field'))
def testDescriptorProtoSupport(self):
# Hand written descriptors/reflection are only supported by the pure-Python
# implementation of the API.
if api_implementation.Type() != 'python':
return
def AddDescriptorField(proto, field_name, field_type):
AddDescriptorField.field_index += 1
new_field = proto.field.add()
new_field.name = field_name
new_field.type = field_type
new_field.number = AddDescriptorField.field_index
new_field.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL
AddDescriptorField.field_index = 0
desc_proto = descriptor_pb2.DescriptorProto()
desc_proto.name = 'Car'
fdp = descriptor_pb2.FieldDescriptorProto
AddDescriptorField(desc_proto, 'name', fdp.TYPE_STRING)
AddDescriptorField(desc_proto, 'year', fdp.TYPE_INT64)
AddDescriptorField(desc_proto, 'automatic', fdp.TYPE_BOOL)
AddDescriptorField(desc_proto, 'price', fdp.TYPE_DOUBLE)
# Add a repeated field
AddDescriptorField.field_index += 1
new_field = desc_proto.field.add()
new_field.name = 'owners'
new_field.type = fdp.TYPE_STRING
new_field.number = AddDescriptorField.field_index
new_field.label = descriptor_pb2.FieldDescriptorProto.LABEL_REPEATED
desc = descriptor.MakeDescriptor(desc_proto)
self.assertTrue('name' in desc.fields_by_name)
self.assertTrue('year' in desc.fields_by_name)
self.assertTrue('automatic' in desc.fields_by_name)
self.assertTrue('price' in desc.fields_by_name)
self.assertTrue('owners' in desc.fields_by_name)
class CarMessage(six.with_metaclass(reflection.GeneratedProtocolMessageType, message.Message)):
DESCRIPTOR = desc
prius = CarMessage()
prius.name = 'prius'
prius.year = 2010
prius.automatic = True
prius.price = 25134.75
prius.owners.extend(['bob', 'susan'])
serialized_prius = prius.SerializeToString()
new_prius = reflection.ParseMessage(desc, serialized_prius)
self.assertTrue(new_prius is not prius)
self.assertEqual(prius, new_prius)
# these are unnecessary assuming message equality works as advertised but
# explicitly check to be safe since we're mucking about in metaclass foo
self.assertEqual(prius.name, new_prius.name)
self.assertEqual(prius.year, new_prius.year)
self.assertEqual(prius.automatic, new_prius.automatic)
self.assertEqual(prius.price, new_prius.price)
self.assertEqual(prius.owners, new_prius.owners)
def testTopLevelExtensionsForOptionalScalar(self):
extendee_proto = unittest_pb2.TestAllExtensions()
extension = unittest_pb2.optional_int32_extension
self.assertTrue(not extendee_proto.HasExtension(extension))
self.assertEqual(0, extendee_proto.Extensions[extension])
# As with normal scalar fields, just doing a read doesn't actually set the
# "has" bit.
self.assertTrue(not extendee_proto.HasExtension(extension))
# Actually set the thing.
extendee_proto.Extensions[extension] = 23
self.assertEqual(23, extendee_proto.Extensions[extension])
self.assertTrue(extendee_proto.HasExtension(extension))
# Ensure that clearing works as well.
extendee_proto.ClearExtension(extension)
self.assertEqual(0, extendee_proto.Extensions[extension])
self.assertTrue(not extendee_proto.HasExtension(extension))
def testTopLevelExtensionsForRepeatedScalar(self):
extendee_proto = unittest_pb2.TestAllExtensions()
extension = unittest_pb2.repeated_string_extension
self.assertEqual(0, len(extendee_proto.Extensions[extension]))
extendee_proto.Extensions[extension].append('foo')
self.assertEqual(['foo'], extendee_proto.Extensions[extension])
string_list = extendee_proto.Extensions[extension]
extendee_proto.ClearExtension(extension)
self.assertEqual(0, len(extendee_proto.Extensions[extension]))
self.assertTrue(string_list is not extendee_proto.Extensions[extension])
# Shouldn't be allowed to do Extensions[extension] = 'a'
self.assertRaises(TypeError, operator.setitem, extendee_proto.Extensions,
extension, 'a')
def testTopLevelExtensionsForOptionalMessage(self):
extendee_proto = unittest_pb2.TestAllExtensions()
extension = unittest_pb2.optional_foreign_message_extension
self.assertTrue(not extendee_proto.HasExtension(extension))
self.assertEqual(0, extendee_proto.Extensions[extension].c)
# As with normal (non-extension) fields, merely reading from the
# thing shouldn't set the "has" bit.
self.assertTrue(not extendee_proto.HasExtension(extension))
extendee_proto.Extensions[extension].c = 23
self.assertEqual(23, extendee_proto.Extensions[extension].c)
self.assertTrue(extendee_proto.HasExtension(extension))
# Save a reference here.
foreign_message = extendee_proto.Extensions[extension]
extendee_proto.ClearExtension(extension)
self.assertTrue(foreign_message is not extendee_proto.Extensions[extension])
# Setting a field on foreign_message now shouldn't set
# any "has" bits on extendee_proto.
foreign_message.c = 42
self.assertEqual(42, foreign_message.c)
self.assertTrue(foreign_message.HasField('c'))
self.assertTrue(not extendee_proto.HasExtension(extension))
# Shouldn't be allowed to do Extensions[extension] = 'a'
self.assertRaises(TypeError, operator.setitem, extendee_proto.Extensions,
extension, 'a')
def testTopLevelExtensionsForRepeatedMessage(self):
extendee_proto = unittest_pb2.TestAllExtensions()
extension = unittest_pb2.repeatedgroup_extension
self.assertEqual(0, len(extendee_proto.Extensions[extension]))
group = extendee_proto.Extensions[extension].add()
group.a = 23
self.assertEqual(23, extendee_proto.Extensions[extension][0].a)
group.a = 42
self.assertEqual(42, extendee_proto.Extensions[extension][0].a)
group_list = extendee_proto.Extensions[extension]
extendee_proto.ClearExtension(extension)
self.assertEqual(0, len(extendee_proto.Extensions[extension]))
self.assertTrue(group_list is not extendee_proto.Extensions[extension])
# Shouldn't be allowed to do Extensions[extension] = 'a'
self.assertRaises(TypeError, operator.setitem, extendee_proto.Extensions,
extension, 'a')
def testNestedExtensions(self):
extendee_proto = unittest_pb2.TestAllExtensions()
extension = unittest_pb2.TestRequired.single
# We just test the non-repeated case.
self.assertTrue(not extendee_proto.HasExtension(extension))
required = extendee_proto.Extensions[extension]
self.assertEqual(0, required.a)
self.assertTrue(not extendee_proto.HasExtension(extension))
required.a = 23
self.assertEqual(23, extendee_proto.Extensions[extension].a)
self.assertTrue(extendee_proto.HasExtension(extension))
extendee_proto.ClearExtension(extension)
self.assertTrue(required is not extendee_proto.Extensions[extension])
self.assertTrue(not extendee_proto.HasExtension(extension))
def testRegisteredExtensions(self):
self.assertTrue('protobuf_unittest.optional_int32_extension' in
unittest_pb2.TestAllExtensions._extensions_by_name)
self.assertTrue(1 in unittest_pb2.TestAllExtensions._extensions_by_number)
# Make sure extensions haven't been registered into types that shouldn't
# have any.
self.assertEqual(0, len(unittest_pb2.TestAllTypes._extensions_by_name))
# If message A directly contains message B, and
# a.HasField('b') is currently False, then mutating any
# extension in B should change a.HasField('b') to True
# (and so on up the object tree).
def testHasBitsForAncestorsOfExtendedMessage(self):
# Optional scalar extension.
toplevel = more_extensions_pb2.TopLevelMessage()
self.assertTrue(not toplevel.HasField('submessage'))
self.assertEqual(0, toplevel.submessage.Extensions[
more_extensions_pb2.optional_int_extension])
self.assertTrue(not toplevel.HasField('submessage'))
toplevel.submessage.Extensions[
more_extensions_pb2.optional_int_extension] = 23
self.assertEqual(23, toplevel.submessage.Extensions[
more_extensions_pb2.optional_int_extension])
self.assertTrue(toplevel.HasField('submessage'))
# Repeated scalar extension.
toplevel = more_extensions_pb2.TopLevelMessage()
self.assertTrue(not toplevel.HasField('submessage'))
self.assertEqual([], toplevel.submessage.Extensions[
more_extensions_pb2.repeated_int_extension])
self.assertTrue(not toplevel.HasField('submessage'))
toplevel.submessage.Extensions[
more_extensions_pb2.repeated_int_extension].append(23)
self.assertEqual([23], toplevel.submessage.Extensions[
more_extensions_pb2.repeated_int_extension])
self.assertTrue(toplevel.HasField('submessage'))
# Optional message extension.
toplevel = more_extensions_pb2.TopLevelMessage()
self.assertTrue(not toplevel.HasField('submessage'))
self.assertEqual(0, toplevel.submessage.Extensions[
more_extensions_pb2.optional_message_extension].foreign_message_int)
self.assertTrue(not toplevel.HasField('submessage'))
toplevel.submessage.Extensions[
more_extensions_pb2.optional_message_extension].foreign_message_int = 23
self.assertEqual(23, toplevel.submessage.Extensions[
more_extensions_pb2.optional_message_extension].foreign_message_int)
self.assertTrue(toplevel.HasField('submessage'))
# Repeated message extension.
toplevel = more_extensions_pb2.TopLevelMessage()
self.assertTrue(not toplevel.HasField('submessage'))
self.assertEqual(0, len(toplevel.submessage.Extensions[
more_extensions_pb2.repeated_message_extension]))
self.assertTrue(not toplevel.HasField('submessage'))
foreign = toplevel.submessage.Extensions[
more_extensions_pb2.repeated_message_extension].add()
self.assertEqual(foreign, toplevel.submessage.Extensions[
more_extensions_pb2.repeated_message_extension][0])
self.assertTrue(toplevel.HasField('submessage'))
def testDisconnectionAfterClearingEmptyMessage(self):
toplevel = more_extensions_pb2.TopLevelMessage()
extendee_proto = toplevel.submessage
extension = more_extensions_pb2.optional_message_extension
extension_proto = extendee_proto.Extensions[extension]
extendee_proto.ClearExtension(extension)
extension_proto.foreign_message_int = 23
self.assertTrue(extension_proto is not extendee_proto.Extensions[extension])
def testExtensionFailureModes(self):
extendee_proto = unittest_pb2.TestAllExtensions()
# Try non-extension-handle arguments to HasExtension,
# ClearExtension(), and Extensions[]...
self.assertRaises(KeyError, extendee_proto.HasExtension, 1234)
self.assertRaises(KeyError, extendee_proto.ClearExtension, 1234)
self.assertRaises(KeyError, extendee_proto.Extensions.__getitem__, 1234)
self.assertRaises(KeyError, extendee_proto.Extensions.__setitem__, 1234, 5)
# Try something that *is* an extension handle, just not for
# this message...
for unknown_handle in (more_extensions_pb2.optional_int_extension,
more_extensions_pb2.optional_message_extension,
more_extensions_pb2.repeated_int_extension,
more_extensions_pb2.repeated_message_extension):
self.assertRaises(KeyError, extendee_proto.HasExtension,
unknown_handle)
self.assertRaises(KeyError, extendee_proto.ClearExtension,
unknown_handle)
self.assertRaises(KeyError, extendee_proto.Extensions.__getitem__,
unknown_handle)
self.assertRaises(KeyError, extendee_proto.Extensions.__setitem__,
unknown_handle, 5)
# Try call HasExtension() with a valid handle, but for a
# *repeated* field. (Just as with non-extension repeated
# fields, Has*() isn't supported for extension repeated fields).
self.assertRaises(KeyError, extendee_proto.HasExtension,
unittest_pb2.repeated_string_extension)
def testStaticParseFrom(self):
proto1 = unittest_pb2.TestAllTypes()
test_util.SetAllFields(proto1)
string1 = proto1.SerializeToString()
proto2 = unittest_pb2.TestAllTypes.FromString(string1)
# Messages should be equal.
self.assertEqual(proto2, proto1)
def testMergeFromSingularField(self):
# Test merge with just a singular field.
proto1 = unittest_pb2.TestAllTypes()
proto1.optional_int32 = 1
proto2 = unittest_pb2.TestAllTypes()
# This shouldn't get overwritten.
proto2.optional_string = 'value'
proto2.MergeFrom(proto1)
self.assertEqual(1, proto2.optional_int32)
self.assertEqual('value', proto2.optional_string)
def testMergeFromRepeatedField(self):
# Test merge with just a repeated field.
proto1 = unittest_pb2.TestAllTypes()
proto1.repeated_int32.append(1)
proto1.repeated_int32.append(2)
proto2 = unittest_pb2.TestAllTypes()
proto2.repeated_int32.append(0)
proto2.MergeFrom(proto1)
self.assertEqual(0, proto2.repeated_int32[0])
self.assertEqual(1, proto2.repeated_int32[1])
self.assertEqual(2, proto2.repeated_int32[2])
def testMergeFromOptionalGroup(self):
# Test merge with an optional group.
proto1 = unittest_pb2.TestAllTypes()
proto1.optionalgroup.a = 12
proto2 = unittest_pb2.TestAllTypes()
proto2.MergeFrom(proto1)
self.assertEqual(12, proto2.optionalgroup.a)
def testMergeFromRepeatedNestedMessage(self):
# Test merge with a repeated nested message.
proto1 = unittest_pb2.TestAllTypes()
m = proto1.repeated_nested_message.add()
m.bb = 123
m = proto1.repeated_nested_message.add()
m.bb = 321
proto2 = unittest_pb2.TestAllTypes()
m = proto2.repeated_nested_message.add()
m.bb = 999
proto2.MergeFrom(proto1)
self.assertEqual(999, proto2.repeated_nested_message[0].bb)
self.assertEqual(123, proto2.repeated_nested_message[1].bb)
self.assertEqual(321, proto2.repeated_nested_message[2].bb)
proto3 = unittest_pb2.TestAllTypes()
proto3.repeated_nested_message.MergeFrom(proto2.repeated_nested_message)
self.assertEqual(999, proto3.repeated_nested_message[0].bb)
self.assertEqual(123, proto3.repeated_nested_message[1].bb)
self.assertEqual(321, proto3.repeated_nested_message[2].bb)
def testMergeFromAllFields(self):
# With all fields set.
proto1 = unittest_pb2.TestAllTypes()
test_util.SetAllFields(proto1)
proto2 = unittest_pb2.TestAllTypes()
proto2.MergeFrom(proto1)
# Messages should be equal.
self.assertEqual(proto2, proto1)
# Serialized string should be equal too.
string1 = proto1.SerializeToString()
string2 = proto2.SerializeToString()
self.assertEqual(string1, string2)
def testMergeFromExtensionsSingular(self):
proto1 = unittest_pb2.TestAllExtensions()
proto1.Extensions[unittest_pb2.optional_int32_extension] = 1
proto2 = unittest_pb2.TestAllExtensions()
proto2.MergeFrom(proto1)
self.assertEqual(
1, proto2.Extensions[unittest_pb2.optional_int32_extension])
def testMergeFromExtensionsRepeated(self):
proto1 = unittest_pb2.TestAllExtensions()
proto1.Extensions[unittest_pb2.repeated_int32_extension].append(1)
proto1.Extensions[unittest_pb2.repeated_int32_extension].append(2)
proto2 = unittest_pb2.TestAllExtensions()
proto2.Extensions[unittest_pb2.repeated_int32_extension].append(0)
proto2.MergeFrom(proto1)
self.assertEqual(
3, len(proto2.Extensions[unittest_pb2.repeated_int32_extension]))
self.assertEqual(
0, proto2.Extensions[unittest_pb2.repeated_int32_extension][0])
self.assertEqual(
1, proto2.Extensions[unittest_pb2.repeated_int32_extension][1])
self.assertEqual(
2, proto2.Extensions[unittest_pb2.repeated_int32_extension][2])
def testMergeFromExtensionsNestedMessage(self):
proto1 = unittest_pb2.TestAllExtensions()
ext1 = proto1.Extensions[
unittest_pb2.repeated_nested_message_extension]
m = ext1.add()
m.bb = 222
m = ext1.add()
m.bb = 333
proto2 = unittest_pb2.TestAllExtensions()
ext2 = proto2.Extensions[
unittest_pb2.repeated_nested_message_extension]
m = ext2.add()
m.bb = 111
proto2.MergeFrom(proto1)
ext2 = proto2.Extensions[
unittest_pb2.repeated_nested_message_extension]
self.assertEqual(3, len(ext2))
self.assertEqual(111, ext2[0].bb)
self.assertEqual(222, ext2[1].bb)
self.assertEqual(333, ext2[2].bb)
def testMergeFromBug(self):
message1 = unittest_pb2.TestAllTypes()
message2 = unittest_pb2.TestAllTypes()
# Cause optional_nested_message to be instantiated within message1, even
# though it is not considered to be "present".
message1.optional_nested_message
self.assertFalse(message1.HasField('optional_nested_message'))
# Merge into message2. This should not instantiate the field is message2.
message2.MergeFrom(message1)
self.assertFalse(message2.HasField('optional_nested_message'))
def testCopyFromSingularField(self):
# Test copy with just a singular field.
proto1 = unittest_pb2.TestAllTypes()
proto1.optional_int32 = 1
proto1.optional_string = 'important-text'
proto2 = unittest_pb2.TestAllTypes()
proto2.optional_string = 'value'
proto2.CopyFrom(proto1)
self.assertEqual(1, proto2.optional_int32)
self.assertEqual('important-text', proto2.optional_string)
def testCopyFromRepeatedField(self):
# Test copy with a repeated field.
proto1 = unittest_pb2.TestAllTypes()
proto1.repeated_int32.append(1)
proto1.repeated_int32.append(2)
proto2 = unittest_pb2.TestAllTypes()
proto2.repeated_int32.append(0)
proto2.CopyFrom(proto1)
self.assertEqual(1, proto2.repeated_int32[0])
self.assertEqual(2, proto2.repeated_int32[1])
def testCopyFromAllFields(self):
# With all fields set.
proto1 = unittest_pb2.TestAllTypes()
test_util.SetAllFields(proto1)
proto2 = unittest_pb2.TestAllTypes()
proto2.CopyFrom(proto1)
# Messages should be equal.
self.assertEqual(proto2, proto1)
# Serialized string should be equal too.
string1 = proto1.SerializeToString()
string2 = proto2.SerializeToString()
self.assertEqual(string1, string2)
def testCopyFromSelf(self):
proto1 = unittest_pb2.TestAllTypes()
proto1.repeated_int32.append(1)
proto1.optional_int32 = 2
proto1.optional_string = 'important-text'
proto1.CopyFrom(proto1)
self.assertEqual(1, proto1.repeated_int32[0])
self.assertEqual(2, proto1.optional_int32)
self.assertEqual('important-text', proto1.optional_string)
def testCopyFromBadType(self):
# The python implementation doesn't raise an exception in this
# case. In theory it should.
if api_implementation.Type() == 'python':
return
proto1 = unittest_pb2.TestAllTypes()
proto2 = unittest_pb2.TestAllExtensions()
self.assertRaises(TypeError, proto1.CopyFrom, proto2)
def testDeepCopy(self):
proto1 = unittest_pb2.TestAllTypes()
proto1.optional_int32 = 1
proto2 = copy.deepcopy(proto1)
self.assertEqual(1, proto2.optional_int32)
proto1.repeated_int32.append(2)
proto1.repeated_int32.append(3)
container = copy.deepcopy(proto1.repeated_int32)
self.assertEqual([2, 3], container)
# TODO(anuraag): Implement deepcopy for repeated composite / extension dict
def testClear(self):
proto = unittest_pb2.TestAllTypes()
# C++ implementation does not support lazy fields right now so leave it
# out for now.
if api_implementation.Type() == 'python':
test_util.SetAllFields(proto)
else:
test_util.SetAllNonLazyFields(proto)
# Clear the message.
proto.Clear()
self.assertEqual(proto.ByteSize(), 0)
empty_proto = unittest_pb2.TestAllTypes()
self.assertEqual(proto, empty_proto)
# Test if extensions which were set are cleared.
proto = unittest_pb2.TestAllExtensions()
test_util.SetAllExtensions(proto)
# Clear the message.
proto.Clear()
self.assertEqual(proto.ByteSize(), 0)
empty_proto = unittest_pb2.TestAllExtensions()
self.assertEqual(proto, empty_proto)
def testDisconnectingBeforeClear(self):
proto = unittest_pb2.TestAllTypes()
nested = proto.optional_nested_message
proto.Clear()
self.assertTrue(nested is not proto.optional_nested_message)
nested.bb = 23
self.assertTrue(not proto.HasField('optional_nested_message'))
self.assertEqual(0, proto.optional_nested_message.bb)
proto = unittest_pb2.TestAllTypes()
nested = proto.optional_nested_message
nested.bb = 5
foreign = proto.optional_foreign_message
foreign.c = 6
proto.Clear()
self.assertTrue(nested is not proto.optional_nested_message)
self.assertTrue(foreign is not proto.optional_foreign_message)
self.assertEqual(5, nested.bb)
self.assertEqual(6, foreign.c)
nested.bb = 15
foreign.c = 16
self.assertFalse(proto.HasField('optional_nested_message'))
self.assertEqual(0, proto.optional_nested_message.bb)
self.assertFalse(proto.HasField('optional_foreign_message'))
self.assertEqual(0, proto.optional_foreign_message.c)
def testOneOf(self):
proto = unittest_pb2.TestAllTypes()
proto.oneof_uint32 = 10
proto.oneof_nested_message.bb = 11
self.assertEqual(11, proto.oneof_nested_message.bb)
self.assertFalse(proto.HasField('oneof_uint32'))
nested = proto.oneof_nested_message
proto.oneof_string = 'abc'
self.assertEqual('abc', proto.oneof_string)
self.assertEqual(11, nested.bb)
self.assertFalse(proto.HasField('oneof_nested_message'))
def assertInitialized(self, proto):
self.assertTrue(proto.IsInitialized())
# Neither method should raise an exception.
proto.SerializeToString()
proto.SerializePartialToString()
def assertNotInitialized(self, proto):
self.assertFalse(proto.IsInitialized())
self.assertRaises(message.EncodeError, proto.SerializeToString)
# "Partial" serialization doesn't care if message is uninitialized.
proto.SerializePartialToString()
def testIsInitialized(self):
# Trivial cases - all optional fields and extensions.
proto = unittest_pb2.TestAllTypes()
self.assertInitialized(proto)
proto = unittest_pb2.TestAllExtensions()
self.assertInitialized(proto)
# The case of uninitialized required fields.
proto = unittest_pb2.TestRequired()
self.assertNotInitialized(proto)
proto.a = proto.b = proto.c = 2
self.assertInitialized(proto)
# The case of uninitialized submessage.
proto = unittest_pb2.TestRequiredForeign()
self.assertInitialized(proto)
proto.optional_message.a = 1
self.assertNotInitialized(proto)
proto.optional_message.b = 0
proto.optional_message.c = 0
self.assertInitialized(proto)
# Uninitialized repeated submessage.
message1 = proto.repeated_message.add()
self.assertNotInitialized(proto)
message1.a = message1.b = message1.c = 0
self.assertInitialized(proto)
# Uninitialized repeated group in an extension.
proto = unittest_pb2.TestAllExtensions()
extension = unittest_pb2.TestRequired.multi
message1 = proto.Extensions[extension].add()
message2 = proto.Extensions[extension].add()
self.assertNotInitialized(proto)
message1.a = 1
message1.b = 1
message1.c = 1
self.assertNotInitialized(proto)
message2.a = 2
message2.b = 2
message2.c = 2
self.assertInitialized(proto)
# Uninitialized nonrepeated message in an extension.
proto = unittest_pb2.TestAllExtensions()
extension = unittest_pb2.TestRequired.single
proto.Extensions[extension].a = 1
self.assertNotInitialized(proto)
proto.Extensions[extension].b = 2
proto.Extensions[extension].c = 3
self.assertInitialized(proto)
# Try passing an errors list.
errors = []
proto = unittest_pb2.TestRequired()
self.assertFalse(proto.IsInitialized(errors))
self.assertEqual(errors, ['a', 'b', 'c'])
@unittest.skipIf(
api_implementation.Type() != 'cpp' or api_implementation.Version() != 2,
'Errors are only available from the most recent C++ implementation.')
def testFileDescriptorErrors(self):
file_name = 'test_file_descriptor_errors.proto'
package_name = 'test_file_descriptor_errors.proto'
file_descriptor_proto = descriptor_pb2.FileDescriptorProto()
file_descriptor_proto.name = file_name
file_descriptor_proto.package = package_name
m1 = file_descriptor_proto.message_type.add()
m1.name = 'msg1'
# Compiles the proto into the C++ descriptor pool
descriptor.FileDescriptor(
file_name,
package_name,
serialized_pb=file_descriptor_proto.SerializeToString())
# Add a FileDescriptorProto that has duplicate symbols
another_file_name = 'another_test_file_descriptor_errors.proto'
file_descriptor_proto.name = another_file_name
m2 = file_descriptor_proto.message_type.add()
m2.name = 'msg2'
with self.assertRaises(TypeError) as cm:
descriptor.FileDescriptor(
another_file_name,
package_name,
serialized_pb=file_descriptor_proto.SerializeToString())
self.assertTrue(hasattr(cm, 'exception'), '%s not raised' %
getattr(cm.expected, '__name__', cm.expected))
self.assertIn('test_file_descriptor_errors.proto', str(cm.exception))
# Error message will say something about this definition being a
# duplicate, though we don't check the message exactly to avoid a
# dependency on the C++ logging code.
self.assertIn('test_file_descriptor_errors.msg1', str(cm.exception))
def testStringUTF8Encoding(self):
proto = unittest_pb2.TestAllTypes()
# Assignment of a unicode object to a field of type 'bytes' is not allowed.
self.assertRaises(TypeError,
setattr, proto, 'optional_bytes', u'unicode object')
# Check that the default value is of python's 'unicode' type.
self.assertEqual(type(proto.optional_string), six.text_type)
proto.optional_string = six.text_type('Testing')
self.assertEqual(proto.optional_string, str('Testing'))
# Assign a value of type 'str' which can be encoded in UTF-8.
proto.optional_string = str('Testing')
self.assertEqual(proto.optional_string, six.text_type('Testing'))
# Try to assign a 'bytes' object which contains non-UTF-8.
self.assertRaises(ValueError,
setattr, proto, 'optional_string', b'a\x80a')
# No exception: Assign already encoded UTF-8 bytes to a string field.
utf8_bytes = u'Тест'.encode('utf-8')
proto.optional_string = utf8_bytes
# No exception: Assign the a non-ascii unicode object.
proto.optional_string = u'Тест'
# No exception thrown (normal str assignment containing ASCII).
proto.optional_string = 'abc'
def testStringUTF8Serialization(self):
proto = message_set_extensions_pb2.TestMessageSet()
extension_message = message_set_extensions_pb2.TestMessageSetExtension2
extension = extension_message.message_set_extension
test_utf8 = u'Тест'
test_utf8_bytes = test_utf8.encode('utf-8')
# 'Test' in another language, using UTF-8 charset.
proto.Extensions[extension].str = test_utf8
# Serialize using the MessageSet wire format (this is specified in the
# .proto file).
serialized = proto.SerializeToString()
# Check byte size.
self.assertEqual(proto.ByteSize(), len(serialized))
raw = unittest_mset_pb2.RawMessageSet()
bytes_read = raw.MergeFromString(serialized)
self.assertEqual(len(serialized), bytes_read)
message2 = message_set_extensions_pb2.TestMessageSetExtension2()
self.assertEqual(1, len(raw.item))
# Check that the type_id is the same as the tag ID in the .proto file.
self.assertEqual(raw.item[0].type_id, 98418634)
# Check the actual bytes on the wire.
self.assertTrue(raw.item[0].message.endswith(test_utf8_bytes))
bytes_read = message2.MergeFromString(raw.item[0].message)
self.assertEqual(len(raw.item[0].message), bytes_read)
self.assertEqual(type(message2.str), six.text_type)
self.assertEqual(message2.str, test_utf8)
# The pure Python API throws an exception on MergeFromString(),
# if any of the string fields of the message can't be UTF-8 decoded.
# The C++ implementation of the API has no way to check that on
# MergeFromString and thus has no way to throw the exception.
#
# The pure Python API always returns objects of type 'unicode' (UTF-8
# encoded), or 'bytes' (in 7 bit ASCII).
badbytes = raw.item[0].message.replace(
test_utf8_bytes, len(test_utf8_bytes) * b'\xff')
unicode_decode_failed = False
try:
message2.MergeFromString(badbytes)
except UnicodeDecodeError:
unicode_decode_failed = True
string_field = message2.str
self.assertTrue(unicode_decode_failed or type(string_field) is bytes)
def testBytesInTextFormat(self):
proto = unittest_pb2.TestAllTypes(optional_bytes=b'\x00\x7f\x80\xff')
self.assertEqual(u'optional_bytes: "\\000\\177\\200\\377"\n',
six.text_type(proto))
def testEmptyNestedMessage(self):
proto = unittest_pb2.TestAllTypes()
proto.optional_nested_message.MergeFrom(
unittest_pb2.TestAllTypes.NestedMessage())
self.assertTrue(proto.HasField('optional_nested_message'))
proto = unittest_pb2.TestAllTypes()
proto.optional_nested_message.CopyFrom(
unittest_pb2.TestAllTypes.NestedMessage())
self.assertTrue(proto.HasField('optional_nested_message'))
proto = unittest_pb2.TestAllTypes()
bytes_read = proto.optional_nested_message.MergeFromString(b'')
self.assertEqual(0, bytes_read)
self.assertTrue(proto.HasField('optional_nested_message'))
proto = unittest_pb2.TestAllTypes()
proto.optional_nested_message.ParseFromString(b'')
self.assertTrue(proto.HasField('optional_nested_message'))
serialized = proto.SerializeToString()
proto2 = unittest_pb2.TestAllTypes()
self.assertEqual(
len(serialized),
proto2.MergeFromString(serialized))
self.assertTrue(proto2.HasField('optional_nested_message'))
def testSetInParent(self):
proto = unittest_pb2.TestAllTypes()
self.assertFalse(proto.HasField('optionalgroup'))
proto.optionalgroup.SetInParent()
self.assertTrue(proto.HasField('optionalgroup'))
def testPackageInitializationImport(self):
"""Test that we can import nested messages from their __init__.py.
Such setup is not trivial since at the time of processing of __init__.py one
can't refer to its submodules by name in code, so expressions like
google.protobuf.internal.import_test_package.inner_pb2
don't work. They do work in imports, so we have assign an alias at import
and then use that alias in generated code.
"""
# We import here since it's the import that used to fail, and we want
# the failure to have the right context.
# pylint: disable=g-import-not-at-top
from google.protobuf.internal import import_test_package
# pylint: enable=g-import-not-at-top
msg = import_test_package.myproto.Outer()
# Just check the default value.
self.assertEqual(57, msg.inner.value)
# Since we had so many tests for protocol buffer equality, we broke these out
# into separate TestCase classes.
class TestAllTypesEqualityTest(unittest.TestCase):
def setUp(self):
self.first_proto = unittest_pb2.TestAllTypes()
self.second_proto = unittest_pb2.TestAllTypes()
def testNotHashable(self):
self.assertRaises(TypeError, hash, self.first_proto)
def testSelfEquality(self):
self.assertEqual(self.first_proto, self.first_proto)
def testEmptyProtosEqual(self):
self.assertEqual(self.first_proto, self.second_proto)
class FullProtosEqualityTest(unittest.TestCase):
"""Equality tests using completely-full protos as a starting point."""
def setUp(self):
self.first_proto = unittest_pb2.TestAllTypes()
self.second_proto = unittest_pb2.TestAllTypes()
test_util.SetAllFields(self.first_proto)
test_util.SetAllFields(self.second_proto)
def testNotHashable(self):
self.assertRaises(TypeError, hash, self.first_proto)
def testNoneNotEqual(self):
self.assertNotEqual(self.first_proto, None)
self.assertNotEqual(None, self.second_proto)
def testNotEqualToOtherMessage(self):
third_proto = unittest_pb2.TestRequired()
self.assertNotEqual(self.first_proto, third_proto)
self.assertNotEqual(third_proto, self.second_proto)
def testAllFieldsFilledEquality(self):
self.assertEqual(self.first_proto, self.second_proto)
def testNonRepeatedScalar(self):
# Nonrepeated scalar field change should cause inequality.
self.first_proto.optional_int32 += 1
self.assertNotEqual(self.first_proto, self.second_proto)
# ...as should clearing a field.
self.first_proto.ClearField('optional_int32')
self.assertNotEqual(self.first_proto, self.second_proto)
def testNonRepeatedComposite(self):
# Change a nonrepeated composite field.
self.first_proto.optional_nested_message.bb += 1
self.assertNotEqual(self.first_proto, self.second_proto)
self.first_proto.optional_nested_message.bb -= 1
self.assertEqual(self.first_proto, self.second_proto)
# Clear a field in the nested message.
self.first_proto.optional_nested_message.ClearField('bb')
self.assertNotEqual(self.first_proto, self.second_proto)
self.first_proto.optional_nested_message.bb = (
self.second_proto.optional_nested_message.bb)
self.assertEqual(self.first_proto, self.second_proto)
# Remove the nested message entirely.
self.first_proto.ClearField('optional_nested_message')
self.assertNotEqual(self.first_proto, self.second_proto)
def testRepeatedScalar(self):
# Change a repeated scalar field.
self.first_proto.repeated_int32.append(5)
self.assertNotEqual(self.first_proto, self.second_proto)
self.first_proto.ClearField('repeated_int32')
self.assertNotEqual(self.first_proto, self.second_proto)
def testRepeatedComposite(self):
# Change value within a repeated composite field.
self.first_proto.repeated_nested_message[0].bb += 1
self.assertNotEqual(self.first_proto, self.second_proto)
self.first_proto.repeated_nested_message[0].bb -= 1
self.assertEqual(self.first_proto, self.second_proto)
# Add a value to a repeated composite field.
self.first_proto.repeated_nested_message.add()
self.assertNotEqual(self.first_proto, self.second_proto)
self.second_proto.repeated_nested_message.add()
self.assertEqual(self.first_proto, self.second_proto)
def testNonRepeatedScalarHasBits(self):
# Ensure that we test "has" bits as well as value for
# nonrepeated scalar field.
self.first_proto.ClearField('optional_int32')
self.second_proto.optional_int32 = 0
self.assertNotEqual(self.first_proto, self.second_proto)
def testNonRepeatedCompositeHasBits(self):
# Ensure that we test "has" bits as well as value for
# nonrepeated composite field.
self.first_proto.ClearField('optional_nested_message')
self.second_proto.optional_nested_message.ClearField('bb')
self.assertNotEqual(self.first_proto, self.second_proto)
self.first_proto.optional_nested_message.bb = 0
self.first_proto.optional_nested_message.ClearField('bb')
self.assertEqual(self.first_proto, self.second_proto)
class ExtensionEqualityTest(unittest.TestCase):
def testExtensionEquality(self):
first_proto = unittest_pb2.TestAllExtensions()
second_proto = unittest_pb2.TestAllExtensions()
self.assertEqual(first_proto, second_proto)
test_util.SetAllExtensions(first_proto)
self.assertNotEqual(first_proto, second_proto)
test_util.SetAllExtensions(second_proto)
self.assertEqual(first_proto, second_proto)
# Ensure that we check value equality.
first_proto.Extensions[unittest_pb2.optional_int32_extension] += 1
self.assertNotEqual(first_proto, second_proto)
first_proto.Extensions[unittest_pb2.optional_int32_extension] -= 1
self.assertEqual(first_proto, second_proto)
# Ensure that we also look at "has" bits.
first_proto.ClearExtension(unittest_pb2.optional_int32_extension)
second_proto.Extensions[unittest_pb2.optional_int32_extension] = 0
self.assertNotEqual(first_proto, second_proto)
first_proto.Extensions[unittest_pb2.optional_int32_extension] = 0
self.assertEqual(first_proto, second_proto)
# Ensure that differences in cached values
# don't matter if "has" bits are both false.
first_proto = unittest_pb2.TestAllExtensions()
second_proto = unittest_pb2.TestAllExtensions()
self.assertEqual(
0, first_proto.Extensions[unittest_pb2.optional_int32_extension])
self.assertEqual(first_proto, second_proto)
class MutualRecursionEqualityTest(unittest.TestCase):
def testEqualityWithMutualRecursion(self):
first_proto = unittest_pb2.TestMutualRecursionA()
second_proto = unittest_pb2.TestMutualRecursionA()
self.assertEqual(first_proto, second_proto)
first_proto.bb.a.bb.optional_int32 = 23
self.assertNotEqual(first_proto, second_proto)
second_proto.bb.a.bb.optional_int32 = 23
self.assertEqual(first_proto, second_proto)
class ByteSizeTest(unittest.TestCase):
def setUp(self):
self.proto = unittest_pb2.TestAllTypes()
self.extended_proto = more_extensions_pb2.ExtendedMessage()
self.packed_proto = unittest_pb2.TestPackedTypes()
self.packed_extended_proto = unittest_pb2.TestPackedExtensions()
def Size(self):
return self.proto.ByteSize()
def testEmptyMessage(self):
self.assertEqual(0, self.proto.ByteSize())
def testSizedOnKwargs(self):
# Use a separate message to ensure testing right after creation.
proto = unittest_pb2.TestAllTypes()
self.assertEqual(0, proto.ByteSize())
proto_kwargs = unittest_pb2.TestAllTypes(optional_int64 = 1)
# One byte for the tag, one to encode varint 1.
self.assertEqual(2, proto_kwargs.ByteSize())
def testVarints(self):
def Test(i, expected_varint_size):
self.proto.Clear()
self.proto.optional_int64 = i
# Add one to the varint size for the tag info
# for tag 1.
self.assertEqual(expected_varint_size + 1, self.Size())
Test(0, 1)
Test(1, 1)
for i, num_bytes in zip(range(7, 63, 7), range(1, 10000)):
Test((1 << i) - 1, num_bytes)
Test(-1, 10)
Test(-2, 10)
Test(-(1 << 63), 10)
def testStrings(self):
self.proto.optional_string = ''
# Need one byte for tag info (tag #14), and one byte for length.
self.assertEqual(2, self.Size())
self.proto.optional_string = 'abc'
# Need one byte for tag info (tag #14), and one byte for length.
self.assertEqual(2 + len(self.proto.optional_string), self.Size())
self.proto.optional_string = 'x' * 128
# Need one byte for tag info (tag #14), and TWO bytes for length.
self.assertEqual(3 + len(self.proto.optional_string), self.Size())
def testOtherNumerics(self):
self.proto.optional_fixed32 = 1234
# One byte for tag and 4 bytes for fixed32.
self.assertEqual(5, self.Size())
self.proto = unittest_pb2.TestAllTypes()
self.proto.optional_fixed64 = 1234
# One byte for tag and 8 bytes for fixed64.
self.assertEqual(9, self.Size())
self.proto = unittest_pb2.TestAllTypes()
self.proto.optional_float = 1.234
# One byte for tag and 4 bytes for float.
self.assertEqual(5, self.Size())
self.proto = unittest_pb2.TestAllTypes()
self.proto.optional_double = 1.234
# One byte for tag and 8 bytes for float.
self.assertEqual(9, self.Size())
self.proto = unittest_pb2.TestAllTypes()
self.proto.optional_sint32 = 64
# One byte for tag and 2 bytes for zig-zag-encoded 64.
self.assertEqual(3, self.Size())
self.proto = unittest_pb2.TestAllTypes()
def testComposites(self):
# 3 bytes.
self.proto.optional_nested_message.bb = (1 << 14)
# Plus one byte for bb tag.
# Plus 1 byte for optional_nested_message serialized size.
# Plus two bytes for optional_nested_message tag.
self.assertEqual(3 + 1 + 1 + 2, self.Size())
def testGroups(self):
# 4 bytes.
self.proto.optionalgroup.a = (1 << 21)
# Plus two bytes for |a| tag.
# Plus 2 * two bytes for START_GROUP and END_GROUP tags.
self.assertEqual(4 + 2 + 2*2, self.Size())
def testRepeatedScalars(self):
self.proto.repeated_int32.append(10) # 1 byte.
self.proto.repeated_int32.append(128) # 2 bytes.
# Also need 2 bytes for each entry for tag.
self.assertEqual(1 + 2 + 2*2, self.Size())
def testRepeatedScalarsExtend(self):
self.proto.repeated_int32.extend([10, 128]) # 3 bytes.
# Also need 2 bytes for each entry for tag.
self.assertEqual(1 + 2 + 2*2, self.Size())
def testRepeatedScalarsRemove(self):
self.proto.repeated_int32.append(10) # 1 byte.
self.proto.repeated_int32.append(128) # 2 bytes.
# Also need 2 bytes for each entry for tag.
self.assertEqual(1 + 2 + 2*2, self.Size())
self.proto.repeated_int32.remove(128)
self.assertEqual(1 + 2, self.Size())
def testRepeatedComposites(self):
# Empty message. 2 bytes tag plus 1 byte length.
foreign_message_0 = self.proto.repeated_nested_message.add()
# 2 bytes tag plus 1 byte length plus 1 byte bb tag 1 byte int.
foreign_message_1 = self.proto.repeated_nested_message.add()
foreign_message_1.bb = 7
self.assertEqual(2 + 1 + 2 + 1 + 1 + 1, self.Size())
def testRepeatedCompositesDelete(self):
# Empty message. 2 bytes tag plus 1 byte length.
foreign_message_0 = self.proto.repeated_nested_message.add()
# 2 bytes tag plus 1 byte length plus 1 byte bb tag 1 byte int.
foreign_message_1 = self.proto.repeated_nested_message.add()
foreign_message_1.bb = 9
self.assertEqual(2 + 1 + 2 + 1 + 1 + 1, self.Size())
# 2 bytes tag plus 1 byte length plus 1 byte bb tag 1 byte int.
del self.proto.repeated_nested_message[0]
self.assertEqual(2 + 1 + 1 + 1, self.Size())
# Now add a new message.
foreign_message_2 = self.proto.repeated_nested_message.add()
foreign_message_2.bb = 12
# 2 bytes tag plus 1 byte length plus 1 byte bb tag 1 byte int.
# 2 bytes tag plus 1 byte length plus 1 byte bb tag 1 byte int.
self.assertEqual(2 + 1 + 1 + 1 + 2 + 1 + 1 + 1, self.Size())
# 2 bytes tag plus 1 byte length plus 1 byte bb tag 1 byte int.
del self.proto.repeated_nested_message[1]
self.assertEqual(2 + 1 + 1 + 1, self.Size())
del self.proto.repeated_nested_message[0]
self.assertEqual(0, self.Size())
def testRepeatedGroups(self):
# 2-byte START_GROUP plus 2-byte END_GROUP.
group_0 = self.proto.repeatedgroup.add()
# 2-byte START_GROUP plus 2-byte |a| tag + 1-byte |a|
# plus 2-byte END_GROUP.
group_1 = self.proto.repeatedgroup.add()
group_1.a = 7
self.assertEqual(2 + 2 + 2 + 2 + 1 + 2, self.Size())
def testExtensions(self):
proto = unittest_pb2.TestAllExtensions()
self.assertEqual(0, proto.ByteSize())
extension = unittest_pb2.optional_int32_extension # Field #1, 1 byte.
proto.Extensions[extension] = 23
# 1 byte for tag, 1 byte for value.
self.assertEqual(2, proto.ByteSize())
def testCacheInvalidationForNonrepeatedScalar(self):
# Test non-extension.
self.proto.optional_int32 = 1
self.assertEqual(2, self.proto.ByteSize())
self.proto.optional_int32 = 128
self.assertEqual(3, self.proto.ByteSize())
self.proto.ClearField('optional_int32')
self.assertEqual(0, self.proto.ByteSize())
# Test within extension.
extension = more_extensions_pb2.optional_int_extension
self.extended_proto.Extensions[extension] = 1
self.assertEqual(2, self.extended_proto.ByteSize())
self.extended_proto.Extensions[extension] = 128
self.assertEqual(3, self.extended_proto.ByteSize())
self.extended_proto.ClearExtension(extension)
self.assertEqual(0, self.extended_proto.ByteSize())
def testCacheInvalidationForRepeatedScalar(self):
# Test non-extension.
self.proto.repeated_int32.append(1)
self.assertEqual(3, self.proto.ByteSize())
self.proto.repeated_int32.append(1)
self.assertEqual(6, self.proto.ByteSize())
self.proto.repeated_int32[1] = 128
self.assertEqual(7, self.proto.ByteSize())
self.proto.ClearField('repeated_int32')
self.assertEqual(0, self.proto.ByteSize())
# Test within extension.
extension = more_extensions_pb2.repeated_int_extension
repeated = self.extended_proto.Extensions[extension]
repeated.append(1)
self.assertEqual(2, self.extended_proto.ByteSize())
repeated.append(1)
self.assertEqual(4, self.extended_proto.ByteSize())
repeated[1] = 128
self.assertEqual(5, self.extended_proto.ByteSize())
self.extended_proto.ClearExtension(extension)
self.assertEqual(0, self.extended_proto.ByteSize())
def testCacheInvalidationForNonrepeatedMessage(self):
# Test non-extension.
self.proto.optional_foreign_message.c = 1
self.assertEqual(5, self.proto.ByteSize())
self.proto.optional_foreign_message.c = 128
self.assertEqual(6, self.proto.ByteSize())
self.proto.optional_foreign_message.ClearField('c')
self.assertEqual(3, self.proto.ByteSize())
self.proto.ClearField('optional_foreign_message')
self.assertEqual(0, self.proto.ByteSize())
if api_implementation.Type() == 'python':
# This is only possible in pure-Python implementation of the API.
child = self.proto.optional_foreign_message
self.proto.ClearField('optional_foreign_message')
child.c = 128
self.assertEqual(0, self.proto.ByteSize())
# Test within extension.
extension = more_extensions_pb2.optional_message_extension
child = self.extended_proto.Extensions[extension]
self.assertEqual(0, self.extended_proto.ByteSize())
child.foreign_message_int = 1
self.assertEqual(4, self.extended_proto.ByteSize())
child.foreign_message_int = 128
self.assertEqual(5, self.extended_proto.ByteSize())
self.extended_proto.ClearExtension(extension)
self.assertEqual(0, self.extended_proto.ByteSize())
def testCacheInvalidationForRepeatedMessage(self):
# Test non-extension.
child0 = self.proto.repeated_foreign_message.add()
self.assertEqual(3, self.proto.ByteSize())
self.proto.repeated_foreign_message.add()
self.assertEqual(6, self.proto.ByteSize())
child0.c = 1
self.assertEqual(8, self.proto.ByteSize())
self.proto.ClearField('repeated_foreign_message')
self.assertEqual(0, self.proto.ByteSize())
# Test within extension.
extension = more_extensions_pb2.repeated_message_extension
child_list = self.extended_proto.Extensions[extension]
child0 = child_list.add()
self.assertEqual(2, self.extended_proto.ByteSize())
child_list.add()
self.assertEqual(4, self.extended_proto.ByteSize())
child0.foreign_message_int = 1
self.assertEqual(6, self.extended_proto.ByteSize())
child0.ClearField('foreign_message_int')
self.assertEqual(4, self.extended_proto.ByteSize())
self.extended_proto.ClearExtension(extension)
self.assertEqual(0, self.extended_proto.ByteSize())
def testPackedRepeatedScalars(self):
self.assertEqual(0, self.packed_proto.ByteSize())
self.packed_proto.packed_int32.append(10) # 1 byte.
self.packed_proto.packed_int32.append(128) # 2 bytes.
# The tag is 2 bytes (the field number is 90), and the varint
# storing the length is 1 byte.
int_size = 1 + 2 + 3
self.assertEqual(int_size, self.packed_proto.ByteSize())
self.packed_proto.packed_double.append(4.2) # 8 bytes
self.packed_proto.packed_double.append(3.25) # 8 bytes
# 2 more tag bytes, 1 more length byte.
double_size = 8 + 8 + 3
self.assertEqual(int_size+double_size, self.packed_proto.ByteSize())
self.packed_proto.ClearField('packed_int32')
self.assertEqual(double_size, self.packed_proto.ByteSize())
def testPackedExtensions(self):
self.assertEqual(0, self.packed_extended_proto.ByteSize())
extension = self.packed_extended_proto.Extensions[
unittest_pb2.packed_fixed32_extension]
extension.extend([1, 2, 3, 4]) # 16 bytes
# Tag is 3 bytes.
self.assertEqual(19, self.packed_extended_proto.ByteSize())
# Issues to be sure to cover include:
# * Handling of unrecognized tags ("uninterpreted_bytes").
# * Handling of MessageSets.
# * Consistent ordering of tags in the wire format,
# including ordering between extensions and non-extension
# fields.
# * Consistent serialization of negative numbers, especially
# negative int32s.
# * Handling of empty submessages (with and without "has"
# bits set).
class SerializationTest(unittest.TestCase):
def testSerializeEmtpyMessage(self):
first_proto = unittest_pb2.TestAllTypes()
second_proto = unittest_pb2.TestAllTypes()
serialized = first_proto.SerializeToString()
self.assertEqual(first_proto.ByteSize(), len(serialized))
self.assertEqual(
len(serialized),
second_proto.MergeFromString(serialized))
self.assertEqual(first_proto, second_proto)
def testSerializeAllFields(self):
first_proto = unittest_pb2.TestAllTypes()
second_proto = unittest_pb2.TestAllTypes()
test_util.SetAllFields(first_proto)
serialized = first_proto.SerializeToString()
self.assertEqual(first_proto.ByteSize(), len(serialized))
self.assertEqual(
len(serialized),
second_proto.MergeFromString(serialized))
self.assertEqual(first_proto, second_proto)
def testSerializeAllExtensions(self):
first_proto = unittest_pb2.TestAllExtensions()
second_proto = unittest_pb2.TestAllExtensions()
test_util.SetAllExtensions(first_proto)
serialized = first_proto.SerializeToString()
self.assertEqual(
len(serialized),
second_proto.MergeFromString(serialized))
self.assertEqual(first_proto, second_proto)
def testSerializeWithOptionalGroup(self):
first_proto = unittest_pb2.TestAllTypes()
second_proto = unittest_pb2.TestAllTypes()
first_proto.optionalgroup.a = 242
serialized = first_proto.SerializeToString()
self.assertEqual(
len(serialized),
second_proto.MergeFromString(serialized))
self.assertEqual(first_proto, second_proto)
def testSerializeNegativeValues(self):
first_proto = unittest_pb2.TestAllTypes()
first_proto.optional_int32 = -1
first_proto.optional_int64 = -(2 << 40)
first_proto.optional_sint32 = -3
first_proto.optional_sint64 = -(4 << 40)
first_proto.optional_sfixed32 = -5
first_proto.optional_sfixed64 = -(6 << 40)
second_proto = unittest_pb2.TestAllTypes.FromString(
first_proto.SerializeToString())
self.assertEqual(first_proto, second_proto)
def testParseTruncated(self):
# This test is only applicable for the Python implementation of the API.
if api_implementation.Type() != 'python':
return
first_proto = unittest_pb2.TestAllTypes()
test_util.SetAllFields(first_proto)
serialized = first_proto.SerializeToString()
for truncation_point in range(len(serialized) + 1):
try:
second_proto = unittest_pb2.TestAllTypes()
unknown_fields = unittest_pb2.TestEmptyMessage()
pos = second_proto._InternalParse(serialized, 0, truncation_point)
# If we didn't raise an error then we read exactly the amount expected.
self.assertEqual(truncation_point, pos)
# Parsing to unknown fields should not throw if parsing to known fields
# did not.
try:
pos2 = unknown_fields._InternalParse(serialized, 0, truncation_point)
self.assertEqual(truncation_point, pos2)
except message.DecodeError:
self.fail('Parsing unknown fields failed when parsing known fields '
'did not.')
except message.DecodeError:
# Parsing unknown fields should also fail.
self.assertRaises(message.DecodeError, unknown_fields._InternalParse,
serialized, 0, truncation_point)
def testCanonicalSerializationOrder(self):
proto = more_messages_pb2.OutOfOrderFields()
# These are also their tag numbers. Even though we're setting these in
# reverse-tag order AND they're listed in reverse tag-order in the .proto
# file, they should nonetheless be serialized in tag order.
proto.optional_sint32 = 5
proto.Extensions[more_messages_pb2.optional_uint64] = 4
proto.optional_uint32 = 3
proto.Extensions[more_messages_pb2.optional_int64] = 2
proto.optional_int32 = 1
serialized = proto.SerializeToString()
self.assertEqual(proto.ByteSize(), len(serialized))
d = _MiniDecoder(serialized)
ReadTag = d.ReadFieldNumberAndWireType
self.assertEqual((1, wire_format.WIRETYPE_VARINT), ReadTag())
self.assertEqual(1, d.ReadInt32())
self.assertEqual((2, wire_format.WIRETYPE_VARINT), ReadTag())
self.assertEqual(2, d.ReadInt64())
self.assertEqual((3, wire_format.WIRETYPE_VARINT), ReadTag())
self.assertEqual(3, d.ReadUInt32())
self.assertEqual((4, wire_format.WIRETYPE_VARINT), ReadTag())
self.assertEqual(4, d.ReadUInt64())
self.assertEqual((5, wire_format.WIRETYPE_VARINT), ReadTag())
self.assertEqual(5, d.ReadSInt32())
def testCanonicalSerializationOrderSameAsCpp(self):
# Copy of the same test we use for C++.
proto = unittest_pb2.TestFieldOrderings()
test_util.SetAllFieldsAndExtensions(proto)
serialized = proto.SerializeToString()
test_util.ExpectAllFieldsAndExtensionsInOrder(serialized)
def testMergeFromStringWhenFieldsAlreadySet(self):
first_proto = unittest_pb2.TestAllTypes()
first_proto.repeated_string.append('foobar')
first_proto.optional_int32 = 23
first_proto.optional_nested_message.bb = 42
serialized = first_proto.SerializeToString()
second_proto = unittest_pb2.TestAllTypes()
second_proto.repeated_string.append('baz')
second_proto.optional_int32 = 100
second_proto.optional_nested_message.bb = 999
bytes_parsed = second_proto.MergeFromString(serialized)
self.assertEqual(len(serialized), bytes_parsed)
# Ensure that we append to repeated fields.
self.assertEqual(['baz', 'foobar'], list(second_proto.repeated_string))
# Ensure that we overwrite nonrepeatd scalars.
self.assertEqual(23, second_proto.optional_int32)
# Ensure that we recursively call MergeFromString() on
# submessages.
self.assertEqual(42, second_proto.optional_nested_message.bb)
def testMessageSetWireFormat(self):
proto = message_set_extensions_pb2.TestMessageSet()
extension_message1 = message_set_extensions_pb2.TestMessageSetExtension1
extension_message2 = message_set_extensions_pb2.TestMessageSetExtension2
extension1 = extension_message1.message_set_extension
extension2 = extension_message2.message_set_extension
extension3 = message_set_extensions_pb2.message_set_extension3
proto.Extensions[extension1].i = 123
proto.Extensions[extension2].str = 'foo'
proto.Extensions[extension3].text = 'bar'
# Serialize using the MessageSet wire format (this is specified in the
# .proto file).
serialized = proto.SerializeToString()
raw = unittest_mset_pb2.RawMessageSet()
self.assertEqual(False,
raw.DESCRIPTOR.GetOptions().message_set_wire_format)
self.assertEqual(
len(serialized),
raw.MergeFromString(serialized))
self.assertEqual(3, len(raw.item))
message1 = message_set_extensions_pb2.TestMessageSetExtension1()
self.assertEqual(
len(raw.item[0].message),
message1.MergeFromString(raw.item[0].message))
self.assertEqual(123, message1.i)
message2 = message_set_extensions_pb2.TestMessageSetExtension2()
self.assertEqual(
len(raw.item[1].message),
message2.MergeFromString(raw.item[1].message))
self.assertEqual('foo', message2.str)
message3 = message_set_extensions_pb2.TestMessageSetExtension3()
self.assertEqual(
len(raw.item[2].message),
message3.MergeFromString(raw.item[2].message))
self.assertEqual('bar', message3.text)
# Deserialize using the MessageSet wire format.
proto2 = message_set_extensions_pb2.TestMessageSet()
self.assertEqual(
len(serialized),
proto2.MergeFromString(serialized))
self.assertEqual(123, proto2.Extensions[extension1].i)
self.assertEqual('foo', proto2.Extensions[extension2].str)
self.assertEqual('bar', proto2.Extensions[extension3].text)
# Check byte size.
self.assertEqual(proto2.ByteSize(), len(serialized))
self.assertEqual(proto.ByteSize(), len(serialized))
def testMessageSetWireFormatUnknownExtension(self):
# Create a message using the message set wire format with an unknown
# message.
raw = unittest_mset_pb2.RawMessageSet()
# Add an item.
item = raw.item.add()
item.type_id = 98418603
extension_message1 = message_set_extensions_pb2.TestMessageSetExtension1
message1 = message_set_extensions_pb2.TestMessageSetExtension1()
message1.i = 12345
item.message = message1.SerializeToString()
# Add a second, unknown extension.
item = raw.item.add()
item.type_id = 98418604
extension_message1 = message_set_extensions_pb2.TestMessageSetExtension1
message1 = message_set_extensions_pb2.TestMessageSetExtension1()
message1.i = 12346
item.message = message1.SerializeToString()
# Add another unknown extension.
item = raw.item.add()
item.type_id = 98418605
message1 = message_set_extensions_pb2.TestMessageSetExtension2()
message1.str = 'foo'
item.message = message1.SerializeToString()
serialized = raw.SerializeToString()
# Parse message using the message set wire format.
proto = message_set_extensions_pb2.TestMessageSet()
self.assertEqual(
len(serialized),
proto.MergeFromString(serialized))
# Check that the message parsed well.
extension_message1 = message_set_extensions_pb2.TestMessageSetExtension1
extension1 = extension_message1.message_set_extension
self.assertEqual(12345, proto.Extensions[extension1].i)
def testUnknownFields(self):
proto = unittest_pb2.TestAllTypes()
test_util.SetAllFields(proto)
serialized = proto.SerializeToString()
# The empty message should be parsable with all of the fields
# unknown.
proto2 = unittest_pb2.TestEmptyMessage()
# Parsing this message should succeed.
self.assertEqual(
len(serialized),
proto2.MergeFromString(serialized))
# Now test with a int64 field set.
proto = unittest_pb2.TestAllTypes()
proto.optional_int64 = 0x0fffffffffffffff
serialized = proto.SerializeToString()
# The empty message should be parsable with all of the fields
# unknown.
proto2 = unittest_pb2.TestEmptyMessage()
# Parsing this message should succeed.
self.assertEqual(
len(serialized),
proto2.MergeFromString(serialized))
def _CheckRaises(self, exc_class, callable_obj, exception):
"""This method checks if the excpetion type and message are as expected."""
try:
callable_obj()
except exc_class as ex:
# Check if the exception message is the right one.
self.assertEqual(exception, str(ex))
return
else:
raise self.failureException('%s not raised' % str(exc_class))
def testSerializeUninitialized(self):
proto = unittest_pb2.TestRequired()
self._CheckRaises(
message.EncodeError,
proto.SerializeToString,
'Message protobuf_unittest.TestRequired is missing required fields: '
'a,b,c')
# Shouldn't raise exceptions.
partial = proto.SerializePartialToString()
proto2 = unittest_pb2.TestRequired()
self.assertFalse(proto2.HasField('a'))
# proto2 ParseFromString does not check that required fields are set.
proto2.ParseFromString(partial)
self.assertFalse(proto2.HasField('a'))
proto.a = 1
self._CheckRaises(
message.EncodeError,
proto.SerializeToString,
'Message protobuf_unittest.TestRequired is missing required fields: b,c')
# Shouldn't raise exceptions.
partial = proto.SerializePartialToString()
proto.b = 2
self._CheckRaises(
message.EncodeError,
proto.SerializeToString,
'Message protobuf_unittest.TestRequired is missing required fields: c')
# Shouldn't raise exceptions.
partial = proto.SerializePartialToString()
proto.c = 3
serialized = proto.SerializeToString()
# Shouldn't raise exceptions.
partial = proto.SerializePartialToString()
proto2 = unittest_pb2.TestRequired()
self.assertEqual(
len(serialized),
proto2.MergeFromString(serialized))
self.assertEqual(1, proto2.a)
self.assertEqual(2, proto2.b)
self.assertEqual(3, proto2.c)
self.assertEqual(
len(partial),
proto2.MergeFromString(partial))
self.assertEqual(1, proto2.a)
self.assertEqual(2, proto2.b)
self.assertEqual(3, proto2.c)
def testSerializeUninitializedSubMessage(self):
proto = unittest_pb2.TestRequiredForeign()
# Sub-message doesn't exist yet, so this succeeds.
proto.SerializeToString()
proto.optional_message.a = 1
self._CheckRaises(
message.EncodeError,
proto.SerializeToString,
'Message protobuf_unittest.TestRequiredForeign '
'is missing required fields: '
'optional_message.b,optional_message.c')
proto.optional_message.b = 2
proto.optional_message.c = 3
proto.SerializeToString()
proto.repeated_message.add().a = 1
proto.repeated_message.add().b = 2
self._CheckRaises(
message.EncodeError,
proto.SerializeToString,
'Message protobuf_unittest.TestRequiredForeign is missing required fields: '
'repeated_message[0].b,repeated_message[0].c,'
'repeated_message[1].a,repeated_message[1].c')
proto.repeated_message[0].b = 2
proto.repeated_message[0].c = 3
proto.repeated_message[1].a = 1
proto.repeated_message[1].c = 3
proto.SerializeToString()
def testSerializeAllPackedFields(self):
first_proto = unittest_pb2.TestPackedTypes()
second_proto = unittest_pb2.TestPackedTypes()
test_util.SetAllPackedFields(first_proto)
serialized = first_proto.SerializeToString()
self.assertEqual(first_proto.ByteSize(), len(serialized))
bytes_read = second_proto.MergeFromString(serialized)
self.assertEqual(second_proto.ByteSize(), bytes_read)
self.assertEqual(first_proto, second_proto)
def testSerializeAllPackedExtensions(self):
first_proto = unittest_pb2.TestPackedExtensions()
second_proto = unittest_pb2.TestPackedExtensions()
test_util.SetAllPackedExtensions(first_proto)
serialized = first_proto.SerializeToString()
bytes_read = second_proto.MergeFromString(serialized)
self.assertEqual(second_proto.ByteSize(), bytes_read)
self.assertEqual(first_proto, second_proto)
def testMergePackedFromStringWhenSomeFieldsAlreadySet(self):
first_proto = unittest_pb2.TestPackedTypes()
first_proto.packed_int32.extend([1, 2])
first_proto.packed_double.append(3.0)
serialized = first_proto.SerializeToString()
second_proto = unittest_pb2.TestPackedTypes()
second_proto.packed_int32.append(3)
second_proto.packed_double.extend([1.0, 2.0])
second_proto.packed_sint32.append(4)
self.assertEqual(
len(serialized),
second_proto.MergeFromString(serialized))
self.assertEqual([3, 1, 2], second_proto.packed_int32)
self.assertEqual([1.0, 2.0, 3.0], second_proto.packed_double)
self.assertEqual([4], second_proto.packed_sint32)
def testPackedFieldsWireFormat(self):
proto = unittest_pb2.TestPackedTypes()
proto.packed_int32.extend([1, 2, 150, 3]) # 1 + 1 + 2 + 1 bytes
proto.packed_double.extend([1.0, 1000.0]) # 8 + 8 bytes
proto.packed_float.append(2.0) # 4 bytes, will be before double
serialized = proto.SerializeToString()
self.assertEqual(proto.ByteSize(), len(serialized))
d = _MiniDecoder(serialized)
ReadTag = d.ReadFieldNumberAndWireType
self.assertEqual((90, wire_format.WIRETYPE_LENGTH_DELIMITED), ReadTag())
self.assertEqual(1+1+1+2, d.ReadInt32())
self.assertEqual(1, d.ReadInt32())
self.assertEqual(2, d.ReadInt32())
self.assertEqual(150, d.ReadInt32())
self.assertEqual(3, d.ReadInt32())
self.assertEqual((100, wire_format.WIRETYPE_LENGTH_DELIMITED), ReadTag())
self.assertEqual(4, d.ReadInt32())
self.assertEqual(2.0, d.ReadFloat())
self.assertEqual((101, wire_format.WIRETYPE_LENGTH_DELIMITED), ReadTag())
self.assertEqual(8+8, d.ReadInt32())
self.assertEqual(1.0, d.ReadDouble())
self.assertEqual(1000.0, d.ReadDouble())
self.assertTrue(d.EndOfStream())
def testParsePackedFromUnpacked(self):
unpacked = unittest_pb2.TestUnpackedTypes()
test_util.SetAllUnpackedFields(unpacked)
packed = unittest_pb2.TestPackedTypes()
serialized = unpacked.SerializeToString()
self.assertEqual(
len(serialized),
packed.MergeFromString(serialized))
expected = unittest_pb2.TestPackedTypes()
test_util.SetAllPackedFields(expected)
self.assertEqual(expected, packed)
def testParseUnpackedFromPacked(self):
packed = unittest_pb2.TestPackedTypes()
test_util.SetAllPackedFields(packed)
unpacked = unittest_pb2.TestUnpackedTypes()
serialized = packed.SerializeToString()
self.assertEqual(
len(serialized),
unpacked.MergeFromString(serialized))
expected = unittest_pb2.TestUnpackedTypes()
test_util.SetAllUnpackedFields(expected)
self.assertEqual(expected, unpacked)
def testFieldNumbers(self):
proto = unittest_pb2.TestAllTypes()
self.assertEqual(unittest_pb2.TestAllTypes.NestedMessage.BB_FIELD_NUMBER, 1)
self.assertEqual(unittest_pb2.TestAllTypes.OPTIONAL_INT32_FIELD_NUMBER, 1)
self.assertEqual(unittest_pb2.TestAllTypes.OPTIONALGROUP_FIELD_NUMBER, 16)
self.assertEqual(
unittest_pb2.TestAllTypes.OPTIONAL_NESTED_MESSAGE_FIELD_NUMBER, 18)
self.assertEqual(
unittest_pb2.TestAllTypes.OPTIONAL_NESTED_ENUM_FIELD_NUMBER, 21)
self.assertEqual(unittest_pb2.TestAllTypes.REPEATED_INT32_FIELD_NUMBER, 31)
self.assertEqual(unittest_pb2.TestAllTypes.REPEATEDGROUP_FIELD_NUMBER, 46)
self.assertEqual(
unittest_pb2.TestAllTypes.REPEATED_NESTED_MESSAGE_FIELD_NUMBER, 48)
self.assertEqual(
unittest_pb2.TestAllTypes.REPEATED_NESTED_ENUM_FIELD_NUMBER, 51)
def testExtensionFieldNumbers(self):
self.assertEqual(unittest_pb2.TestRequired.single.number, 1000)
self.assertEqual(unittest_pb2.TestRequired.SINGLE_FIELD_NUMBER, 1000)
self.assertEqual(unittest_pb2.TestRequired.multi.number, 1001)
self.assertEqual(unittest_pb2.TestRequired.MULTI_FIELD_NUMBER, 1001)
self.assertEqual(unittest_pb2.optional_int32_extension.number, 1)
self.assertEqual(unittest_pb2.OPTIONAL_INT32_EXTENSION_FIELD_NUMBER, 1)
self.assertEqual(unittest_pb2.optionalgroup_extension.number, 16)
self.assertEqual(unittest_pb2.OPTIONALGROUP_EXTENSION_FIELD_NUMBER, 16)
self.assertEqual(unittest_pb2.optional_nested_message_extension.number, 18)
self.assertEqual(
unittest_pb2.OPTIONAL_NESTED_MESSAGE_EXTENSION_FIELD_NUMBER, 18)
self.assertEqual(unittest_pb2.optional_nested_enum_extension.number, 21)
self.assertEqual(unittest_pb2.OPTIONAL_NESTED_ENUM_EXTENSION_FIELD_NUMBER,
21)
self.assertEqual(unittest_pb2.repeated_int32_extension.number, 31)
self.assertEqual(unittest_pb2.REPEATED_INT32_EXTENSION_FIELD_NUMBER, 31)
self.assertEqual(unittest_pb2.repeatedgroup_extension.number, 46)
self.assertEqual(unittest_pb2.REPEATEDGROUP_EXTENSION_FIELD_NUMBER, 46)
self.assertEqual(unittest_pb2.repeated_nested_message_extension.number, 48)
self.assertEqual(
unittest_pb2.REPEATED_NESTED_MESSAGE_EXTENSION_FIELD_NUMBER, 48)
self.assertEqual(unittest_pb2.repeated_nested_enum_extension.number, 51)
self.assertEqual(unittest_pb2.REPEATED_NESTED_ENUM_EXTENSION_FIELD_NUMBER,
51)
def testInitKwargs(self):
proto = unittest_pb2.TestAllTypes(
optional_int32=1,
optional_string='foo',
optional_bool=True,
optional_bytes=b'bar',
optional_nested_message=unittest_pb2.TestAllTypes.NestedMessage(bb=1),
optional_foreign_message=unittest_pb2.ForeignMessage(c=1),
optional_nested_enum=unittest_pb2.TestAllTypes.FOO,
optional_foreign_enum=unittest_pb2.FOREIGN_FOO,
repeated_int32=[1, 2, 3])
self.assertTrue(proto.IsInitialized())
self.assertTrue(proto.HasField('optional_int32'))
self.assertTrue(proto.HasField('optional_string'))
self.assertTrue(proto.HasField('optional_bool'))
self.assertTrue(proto.HasField('optional_bytes'))
self.assertTrue(proto.HasField('optional_nested_message'))
self.assertTrue(proto.HasField('optional_foreign_message'))
self.assertTrue(proto.HasField('optional_nested_enum'))
self.assertTrue(proto.HasField('optional_foreign_enum'))
self.assertEqual(1, proto.optional_int32)
self.assertEqual('foo', proto.optional_string)
self.assertEqual(True, proto.optional_bool)
self.assertEqual(b'bar', proto.optional_bytes)
self.assertEqual(1, proto.optional_nested_message.bb)
self.assertEqual(1, proto.optional_foreign_message.c)
self.assertEqual(unittest_pb2.TestAllTypes.FOO,
proto.optional_nested_enum)
self.assertEqual(unittest_pb2.FOREIGN_FOO, proto.optional_foreign_enum)
self.assertEqual([1, 2, 3], proto.repeated_int32)
def testInitArgsUnknownFieldName(self):
def InitalizeEmptyMessageWithExtraKeywordArg():
unused_proto = unittest_pb2.TestEmptyMessage(unknown='unknown')
self._CheckRaises(
ValueError,
InitalizeEmptyMessageWithExtraKeywordArg,
'Protocol message TestEmptyMessage has no "unknown" field.')
def testInitRequiredKwargs(self):
proto = unittest_pb2.TestRequired(a=1, b=1, c=1)
self.assertTrue(proto.IsInitialized())
self.assertTrue(proto.HasField('a'))
self.assertTrue(proto.HasField('b'))
self.assertTrue(proto.HasField('c'))
self.assertTrue(not proto.HasField('dummy2'))
self.assertEqual(1, proto.a)
self.assertEqual(1, proto.b)
self.assertEqual(1, proto.c)
def testInitRequiredForeignKwargs(self):
proto = unittest_pb2.TestRequiredForeign(
optional_message=unittest_pb2.TestRequired(a=1, b=1, c=1))
self.assertTrue(proto.IsInitialized())
self.assertTrue(proto.HasField('optional_message'))
self.assertTrue(proto.optional_message.IsInitialized())
self.assertTrue(proto.optional_message.HasField('a'))
self.assertTrue(proto.optional_message.HasField('b'))
self.assertTrue(proto.optional_message.HasField('c'))
self.assertTrue(not proto.optional_message.HasField('dummy2'))
self.assertEqual(unittest_pb2.TestRequired(a=1, b=1, c=1),
proto.optional_message)
self.assertEqual(1, proto.optional_message.a)
self.assertEqual(1, proto.optional_message.b)
self.assertEqual(1, proto.optional_message.c)
def testInitRepeatedKwargs(self):
proto = unittest_pb2.TestAllTypes(repeated_int32=[1, 2, 3])
self.assertTrue(proto.IsInitialized())
self.assertEqual(1, proto.repeated_int32[0])
self.assertEqual(2, proto.repeated_int32[1])
self.assertEqual(3, proto.repeated_int32[2])
class OptionsTest(unittest.TestCase):
def testMessageOptions(self):
proto = message_set_extensions_pb2.TestMessageSet()
self.assertEqual(True,
proto.DESCRIPTOR.GetOptions().message_set_wire_format)
proto = unittest_pb2.TestAllTypes()
self.assertEqual(False,
proto.DESCRIPTOR.GetOptions().message_set_wire_format)
def testPackedOptions(self):
proto = unittest_pb2.TestAllTypes()
proto.optional_int32 = 1
proto.optional_double = 3.0
for field_descriptor, _ in proto.ListFields():
self.assertEqual(False, field_descriptor.GetOptions().packed)
proto = unittest_pb2.TestPackedTypes()
proto.packed_int32.append(1)
proto.packed_double.append(3.0)
for field_descriptor, _ in proto.ListFields():
self.assertEqual(True, field_descriptor.GetOptions().packed)
self.assertEqual(descriptor.FieldDescriptor.LABEL_REPEATED,
field_descriptor.label)
class ClassAPITest(unittest.TestCase):
@unittest.skipIf(
api_implementation.Type() == 'cpp' and api_implementation.Version() == 2,
'C++ implementation requires a call to MakeDescriptor()')
def testMakeClassWithNestedDescriptor(self):
leaf_desc = descriptor.Descriptor('leaf', 'package.parent.child.leaf', '',
containing_type=None, fields=[],
nested_types=[], enum_types=[],
extensions=[])
child_desc = descriptor.Descriptor('child', 'package.parent.child', '',
containing_type=None, fields=[],
nested_types=[leaf_desc], enum_types=[],
extensions=[])
sibling_desc = descriptor.Descriptor('sibling', 'package.parent.sibling',
'', containing_type=None, fields=[],
nested_types=[], enum_types=[],
extensions=[])
parent_desc = descriptor.Descriptor('parent', 'package.parent', '',
containing_type=None, fields=[],
nested_types=[child_desc, sibling_desc],
enum_types=[], extensions=[])
message_class = reflection.MakeClass(parent_desc)
self.assertIn('child', message_class.__dict__)
self.assertIn('sibling', message_class.__dict__)
self.assertIn('leaf', message_class.child.__dict__)
def _GetSerializedFileDescriptor(self, name):
"""Get a serialized representation of a test FileDescriptorProto.
Args:
name: All calls to this must use a unique message name, to avoid
collisions in the cpp descriptor pool.
Returns:
A string containing the serialized form of a test FileDescriptorProto.
"""
file_descriptor_str = (
'message_type {'
' name: "' + name + '"'
' field {'
' name: "flat"'
' number: 1'
' label: LABEL_REPEATED'
' type: TYPE_UINT32'
' }'
' field {'
' name: "bar"'
' number: 2'
' label: LABEL_OPTIONAL'
' type: TYPE_MESSAGE'
' type_name: "Bar"'
' }'
' nested_type {'
' name: "Bar"'
' field {'
' name: "baz"'
' number: 3'
' label: LABEL_OPTIONAL'
' type: TYPE_MESSAGE'
' type_name: "Baz"'
' }'
' nested_type {'
' name: "Baz"'
' enum_type {'
' name: "deep_enum"'
' value {'
' name: "VALUE_A"'
' number: 0'
' }'
' }'
' field {'
' name: "deep"'
' number: 4'
' label: LABEL_OPTIONAL'
' type: TYPE_UINT32'
' }'
' }'
' }'
'}')
file_descriptor = descriptor_pb2.FileDescriptorProto()
text_format.Merge(file_descriptor_str, file_descriptor)
return file_descriptor.SerializeToString()
def testParsingFlatClassWithExplicitClassDeclaration(self):
"""Test that the generated class can parse a flat message."""
# TODO(xiaofeng): This test fails with cpp implemetnation in the call
# of six.with_metaclass(). The other two callsites of with_metaclass
# in this file are both excluded from cpp test, so it might be expected
# to fail. Need someone more familiar with the python code to take a
# look at this.
if api_implementation.Type() != 'python':
return
file_descriptor = descriptor_pb2.FileDescriptorProto()
file_descriptor.ParseFromString(self._GetSerializedFileDescriptor('A'))
msg_descriptor = descriptor.MakeDescriptor(
file_descriptor.message_type[0])
class MessageClass(six.with_metaclass(reflection.GeneratedProtocolMessageType, message.Message)):
DESCRIPTOR = msg_descriptor
msg = MessageClass()
msg_str = (
'flat: 0 '
'flat: 1 '
'flat: 2 ')
text_format.Merge(msg_str, msg)
self.assertEqual(msg.flat, [0, 1, 2])
def testParsingFlatClass(self):
"""Test that the generated class can parse a flat message."""
file_descriptor = descriptor_pb2.FileDescriptorProto()
file_descriptor.ParseFromString(self._GetSerializedFileDescriptor('B'))
msg_descriptor = descriptor.MakeDescriptor(
file_descriptor.message_type[0])
msg_class = reflection.MakeClass(msg_descriptor)
msg = msg_class()
msg_str = (
'flat: 0 '
'flat: 1 '
'flat: 2 ')
text_format.Merge(msg_str, msg)
self.assertEqual(msg.flat, [0, 1, 2])
def testParsingNestedClass(self):
"""Test that the generated class can parse a nested message."""
file_descriptor = descriptor_pb2.FileDescriptorProto()
file_descriptor.ParseFromString(self._GetSerializedFileDescriptor('C'))
msg_descriptor = descriptor.MakeDescriptor(
file_descriptor.message_type[0])
msg_class = reflection.MakeClass(msg_descriptor)
msg = msg_class()
msg_str = (
'bar {'
' baz {'
' deep: 4'
' }'
'}')
text_format.Merge(msg_str, msg)
self.assertEqual(msg.bar.baz.deep, 4)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "7dea6e54682db157235c45ea29be489f",
"timestamp": "",
"source": "github",
"line_count": 2943,
"max_line_length": 101,
"avg_line_length": 41.142711518858306,
"alnum_prop": 0.7021794967088691,
"repo_name": "heke123/chromium-crosswalk",
"id": "752f2f5d90cb6a750c9cd27cb0153d7db9165252",
"size": "122775",
"binary": false,
"copies": "14",
"ref": "refs/heads/master",
"path": "third_party/protobuf/python/google/protobuf/internal/reflection_test.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
import numpy as np
from sklearn.datasets import make_blobs
from sklearn.ensemble import RandomForestClassifier
from sklearn.calibration import CalibratedClassifierCV
from sklearn.metrics import log_loss
import pandas as pd
from ga_css_5prime import MatchUtils
import tempfile
from sklearn import preprocessing
np.random.seed(0)
CSS_5PRIME_FILE = 'data/dbass-prats/CrypticSpliceSite.tsv'
AUTH_5PRIME_FILE = 'data/hs3d/Exon-Intron_5prime/EI_true_9.tsv'
NEIGH_5PRIME_FILE = 'data/dbass-prats/NeighboringSpliceSite.tsv'
CSS_3PRIME_FILE = 'data/dbass/css_3prime.tsv'
AUTH_3PRIME_FILE = 'data/hs3d/Intron-Exon_3prime/authss_3prime.tsv'
def convert():
le = preprocessing.LabelEncoder()
def loadTestData():
A5 = pd.read_csv(AUTH_5PRIME_FILE, sep='\t', header = None)
C5 = pd.read_csv(CSS_5PRIME_FILE, sep='\t', header = None)
N5 = pd.read_csv(NEIGH_5PRIME_FILE, sep='\t', header = None)
A5_label = pd.DataFrame(np.repeat(0, A5.shape[0]))
C5_label = pd.DataFrame(np.repeat(1, C5.shape[0]))
N5_label = pd.DataFrame(np.repeat(2, N5.shape[0]))
# A5 = pd.concat([A5_data, A5_label], axis = 1)
# C5 = pd.concat([A5_data, A5_label], axis = 1)
# N5 = pd.concat([A5_data, A5_label], axis = 1)
X_pos_test = pd.concat([A5, C5], axis=0)
X_neg_test = N5
X_pos_test = X_pos_test.as_matrix()
X_neg_test = X_neg_test.as_matrix()
return (X_pos_test, X_neg_test)
def extract_best_byfitness(perfMap, dataname): #neighbor/auth/css
bestGABase = None
bestFitness = -float('inf')
(retselect, retxover, retdataname, retM, retN, retbestGABase, retbestFitness) = (None, None, None, None, None, None, None)
for (select, xover), perf1 in perfMap.items():
select = select.upper().replace('_', '-')
xover = xover.upper().replace('CROSSOVER', '').replace('_', '')
dataPerfs = dict()
for (M,N,dataname), perf2 in perf1.items():
k = (M,N)
if dataname not in dataPerfs:
dataPerfs[dataname] = dict()
dataPerfs[dataname][k] = perf2
dimPerfMap = dataPerfs[dataname]
for M,N in sorted(dimPerfMap):
dimPerf = dimPerfMap[(M,N)]
bestGens = MatchUtils.find_best_gens(dimPerf.bestGABase)
fitness = None
if bestGens:
fitness = bestGens[0]._bestFitness
if fitness > bestFitness:
bestFitness = fitness
bestGABase = dimPerf.bestGABase
(retselect, retxover, retdataname, retM, retN, retbestGABase, retbestFitness) = (select, xover, dataname, M, N, bestGABase, bestFitness)
return (retselect, retxover, retdataname, retM, retN, retbestGABase, retbestFitness)
def extractPopulation(gaBase):
ret = []
genIndices = set()
for gen in gaBase._generations:
if gen._genIndex not in genIndices:
genIndices.add(gen._genIndex)
ret.append(gen._bestSolution)
return ret
def extractTrainingData(perfMap):
datanames = {'auth':0, 'css':0, 'neighbor':1}
X_trains = []
y_trains = []
for dataname, label in datanames.items():
(select, xover, dataname, M, N, bestGABase, bestFitness) = extract_best_byfitness(perfMap, dataname)
population = extractPopulation(bestGABase)
poparr = ["\t".join(pop) for pop in population if pop]
popstr = "\n".join(poparr)
tmpFile = tempfile.mkstemp()
with open(tmpFile[1], 'w') as f:
f.write(popstr)
pdpop = pd.read_csv(tmpFile[1], sep='\t', header = None)
poplen = pdpop.shape[0]
labels = pd.DataFrame(np.repeat(label, poplen))
X_trains.append(pdpop)
y_trains.append(labels)
retX = pd.concat(X_trains, axis = 0)
retY = pd.concat(y_trains)
retX = retX.as_matrix()
retY = retY.as_matrix()
return (retX, retY)
def encode(le, data):
ret = [le.transform(d) for d in data]
return ret
# def execute_score(perfMap):
# (X_pos_test, X_neg_test) = loadTestData()
# (X_train, y_train) = extractTrainingData(perfMap)
# le = preprocessing.LabelEncoder()
# le.fit(['A', 'C', 'G', 'T'])
# X_test = encode(le, X_test)
# # y_test = encode(le, y_test)
# X_train = encode(le, X_train)
# # y_train = encode(le, y_train)
# clf = RandomForestClassifier(n_estimators=25)
# clf.fit(X_train, y_train)
# y_pos = clf.predict_proba(X_pos_test)
# y_neg = clf.predict_proba(X_neg_test)
# # y_predict = clf.predict(X_test)
# # score = log_loss(y_test, clf_probs)
# # print(score)
# return (y_predict, y_test, score)
def execute(perfMap):
(X_pos_test, X_neg_test) = loadTestData()
(X_train, y_train) = extractTrainingData(perfMap)
le = preprocessing.LabelEncoder()
le.fit(['A', 'C', 'G', 'T'])
X_train = encode(le, X_train)
X_pos_test = encode(le, X_pos_test)
X_neg_test = encode(le, X_neg_test)
clf = RandomForestClassifier(n_estimators=25)
clf.fit(X_train, y_train)
y_pos = clf.predict_proba(X_pos_test)
y_neg = clf.predict_proba(X_neg_test)
# y_predict = clf.predict(X_test)
# score = log_loss(y_test, clf_probs)
# print(score)
return (y_pos, y_neg)
def script():
(y_pos, y_neg) = garf.execute(perfMap)
y_pos_str = [str(p) for p in y_pos]
pos_str = "\n".join(y_pos_str)
y_neg_str = [str(p) for p in y_neg]
neg_str = "\n".join(y_neg_str)
with open('negatives','w') as f:
f.write(neg_str)
with open('positives','w') as f:
f.write(pos_str)
|
{
"content_hash": "ca6e0fe28b8ff7686bce8afc01073276",
"timestamp": "",
"source": "github",
"line_count": 160,
"max_line_length": 150,
"avg_line_length": 32.05,
"alnum_prop": 0.672386895475819,
"repo_name": "tapomay/libgenetic",
"id": "aa5f9132b70faad90db6d3fcdff1997c0ca8c843",
"size": "5128",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src_python/bio_ga/ga_rf.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "135239"
},
{
"name": "Shell",
"bytes": "492"
}
],
"symlink_target": ""
}
|
from guizero import App, TextBox, Text, Slider, PushButton, Picture, Combo, CheckBox, ButtonGroup, Box
app = App(title="different sizes", width = 700, height=700)
text = Text(app, "lets change some sizes", width=20, height=2)
text_box = TextBox(app, "some text", width=50)
slider = Slider(app, width=300, height=30)
button = PushButton(app, width=20, height=2)
pic = Picture(app, image="guizero.gif", width=400, height=50)
combo = Combo(app, ["martin", "laura", "rik"], width="fill", height="fill")
check = CheckBox(app, "tick me", width=20, height=3)
check.bg = "blue"
button_group = ButtonGroup(app, ["cheese", "onion", "crisps"], 1, width=20, height=9)
button_group.bg = "darkgrey"
box = Box(app, width=100, height=100)
box.border = True
box.bg = "red"
app.display()
|
{
"content_hash": "f954b07cddc618871141f1b907ad0d88",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 102,
"avg_line_length": 27.892857142857142,
"alnum_prop": 0.6850192061459667,
"repo_name": "lawsie/guizero",
"id": "a035cfa719c78ee8dc72e5d1f8f07415b50b9848",
"size": "781",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/changing_sizes.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "8697"
},
{
"name": "HTML",
"bytes": "14526"
},
{
"name": "JavaScript",
"bytes": "5837"
},
{
"name": "Python",
"bytes": "230438"
}
],
"symlink_target": ""
}
|
import yaml
import os
import sys
import textwrap
from ..functional_test import TestCase
class GeneratorTest(TestCase):
def test_needy_variables(self):
empty_directory = os.path.join(self.path(), 'empty')
os.makedirs(empty_directory)
default_suffix = 'default-suffix'
with open(os.path.join(self.path(), 'needs.yaml'), 'w') as needs_file:
needs_file.write(textwrap.dedent('''
libraries:
mylib:
directory: {empty_directory}
build-directory-suffix: {{{{ suffix|default(\'{default_suffix}\') }}}}
project:
build-steps:
- echo noop
''').format(
empty_directory=empty_directory,
default_suffix=default_suffix,
))
def assert_with_suffix(suffix):
self.assertEqual(self.execute(['generate', 'jamfile', '-Dsuffix={}'.format(suffix)]), 0)
generated_file = os.path.join(self.needs_directory(), 'Jamfile')
with open(generated_file, 'r') as f:
contents = f.read()
print(contents)
self.assertTrue(suffix in contents)
self.assertFalse('foobar' in contents)
assert_with_suffix('suffix-foo')
assert_with_suffix('suffix-bar')
|
{
"content_hash": "1e6716f63cf560767cfcb78ec03e4a40",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 100,
"avg_line_length": 36.73684210526316,
"alnum_prop": 0.5372492836676218,
"repo_name": "ccbrown/needy",
"id": "b6f5e52904de991003ea961fb589d62ab31155e6",
"size": "1396",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "tests/functional/generators/test_generator.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "236093"
},
{
"name": "Shell",
"bytes": "173"
}
],
"symlink_target": ""
}
|
from __future__ import print_function
import datetime
import threading
import xmlrpclib
from SimpleXMLRPCServer import SimpleXMLRPCServer
from xml.sax.saxutils import escape
import idaapi
import idautils
import idc
# Wait for any processing to get done
idaapi.autoWait()
# On Windows with NTFS filesystem a filepath with ':'
# is treated as NTFS ADS (Alternative Data Stream)
# and so saving file with such name fails
dt = datetime.datetime.now().isoformat().replace(':', '-')
# Save the database so nothing gets lost.
if idaapi.IDA_SDK_VERSION >= 700:
idaapi.save_database(idc.GetIdbPath() + '.' + dt)
else:
idc.SaveBase(idc.GetIdbPath() + '.' + dt)
DEBUG_MARSHALLING = False
def create_marshaller(use_format=None, just_to_str=False):
assert use_format or just_to_str, 'Either pass format to use or make it converting the value to str.'
def wrapper(_marshaller, value, appender):
if use_format:
marshalled = use_format % value
elif just_to_str:
marshalled = '<value><string>%s</string></value>' % escape(str(value))
if DEBUG_MARSHALLING:
print("Marshalled: '%s'" % marshalled)
appender(marshalled)
return wrapper
xmlrpclib.Marshaller.dispatch[type(0L)] = create_marshaller("<value><i8>%d</i8></value>")
xmlrpclib.Marshaller.dispatch[type(0)] = create_marshaller("<value><i8>%d</i8></value>")
xmlrpclib.Marshaller.dispatch[idaapi.cfuncptr_t] = create_marshaller(just_to_str=True)
host = '127.0.0.1'
port = 31337
orig_LineA = idc.LineA
def LineA(*a, **kw):
v = orig_LineA(*a, **kw)
if v and v.startswith('\x01\x04; '):
v = v[4:]
return v
idc.LineA = LineA
mutex = threading.Condition()
def wrap(f):
def wrapper(*a, **kw):
rv = []
error = []
def work():
try:
result = f(*a, **kw)
rv.append(result)
except Exception as e:
error.append(e)
with mutex:
flags = idaapi.MFF_WRITE
if f == idc.SetColor:
flags |= idaapi.MFF_NOWAIT
rv.append(None)
idaapi.execute_sync(work, flags)
if error:
msg = 'Failed on calling {}.{} with args: {}, kwargs: {}\nException: {}' \
.format(f.__module__, f.__name__, a, kw, str(error[0]))
print('[!!!] ERROR:', msg)
raise error[0]
return rv[0]
return wrapper
def register_module(module):
for name, function in module.__dict__.items():
if hasattr(function, '__call__'):
server.register_function(wrap(function), name)
def decompile(addr):
"""
Function that overwrites `idaapi.decompile` for xmlrpc so that instead
of throwing an exception on `idaapi.DecompilationFailure` it just returns `None`.
(so that we don't have to parse xmlrpc Fault's exception string on pwndbg side
as it differs between IDA versions).
"""
try:
return idaapi.decompile(addr)
except idaapi.DecompilationFailure:
return None
def versions():
"""Returns IDA & Python versions"""
import sys
return {
'python': sys.version,
'ida': idaapi.get_kernel_version(),
'hexrays': idaapi.get_hexrays_version() if idaapi.init_hexrays_plugin() else None
}
server = SimpleXMLRPCServer((host, port), logRequests=True, allow_none=True)
register_module(idc)
register_module(idautils)
register_module(idaapi)
server.register_function(lambda a: eval(a, globals(), locals()), 'eval')
server.register_function(decompile) # overwrites idaapi/ida_hexrays.decompie
server.register_function(versions)
server.register_introspection_functions()
print('IDA Pro xmlrpc hosted on http://%s:%s' % (host, port))
print('Call `shutdown()` to shutdown the IDA Pro xmlrpc server.')
thread = threading.Thread(target=server.serve_forever)
thread.daemon = True
thread.start()
def shutdown():
global server
global thread
server.shutdown()
server.server_close()
del server
del thread
|
{
"content_hash": "02304ccb332c0a29b3f8577de7795ed4",
"timestamp": "",
"source": "github",
"line_count": 149,
"max_line_length": 105,
"avg_line_length": 27.28187919463087,
"alnum_prop": 0.6396063960639606,
"repo_name": "0xddaa/pwndbg",
"id": "fe7d3738e4bcb4b4bd0bd6fdd20dc6956dbdd008",
"size": "4111",
"binary": false,
"copies": "2",
"ref": "refs/heads/stable",
"path": "ida_script.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "584"
},
{
"name": "C",
"bytes": "113"
},
{
"name": "Makefile",
"bytes": "964"
},
{
"name": "Python",
"bytes": "1920581"
},
{
"name": "Shell",
"bytes": "5598"
}
],
"symlink_target": ""
}
|
"""Utility functions for writing decorators (which modify docstrings)."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
def get_qualified_name(function):
# Python 3
if hasattr(function, '__qualname__'):
return function.__qualname__
# Python 2
if hasattr(function, 'im_class'):
return function.im_class.__name__ + '.' + function.__name__
return function.__name__
def _normalize_docstring(docstring):
"""Normalizes the docstring.
Replaces tabs with spaces, removes leading and trailing blanks lines, and
removes any indentation.
Copied from PEP-257:
https://www.python.org/dev/peps/pep-0257/#handling-docstring-indentation
Args:
docstring: the docstring to normalize
Returns:
The normalized docstring
"""
if not docstring:
return ''
# Convert tabs to spaces (following the normal Python rules)
# and split into a list of lines:
lines = docstring.expandtabs().splitlines()
# Determine minimum indentation (first line doesn't count):
# (we use sys.maxsize because sys.maxint doesn't exist in Python 3)
indent = sys.maxsize
for line in lines[1:]:
stripped = line.lstrip()
if stripped:
indent = min(indent, len(line) - len(stripped))
# Remove indentation (first line is special):
trimmed = [lines[0].strip()]
if indent < sys.maxsize:
for line in lines[1:]:
trimmed.append(line[indent:].rstrip())
# Strip off trailing and leading blank lines:
while trimmed and not trimmed[-1]:
trimmed.pop()
while trimmed and not trimmed[0]:
trimmed.pop(0)
# Return a single string:
return '\n'.join(trimmed)
def add_notice_to_docstring(
doc, instructions, no_doc_str, suffix_str, notice):
"""Adds a deprecation notice to a docstring.
Args:
doc: The original docstring.
instructions: A string, describing how to fix the problem.
no_doc_str: The default value to use for `doc` if `doc` is empty.
suffix_str: Is added to the end of the first line.
notice: A list of strings. The main notice warning body.
Returns:
A new docstring, with the notice attached.
Raises:
ValueError: If `notice` is empty.
"""
if not doc:
lines = [no_doc_str]
else:
lines = _normalize_docstring(doc).splitlines()
lines[0] += ' ' + suffix_str
if not notice:
raise ValueError('The `notice` arg must not be empty.')
notice[0] = 'Warning: ' + notice[0]
notice = [''] + notice + ([instructions] if instructions else [])
if len(lines) > 1:
# Make sure that we keep our distance from the main body
if lines[1].strip():
notice.append('')
lines[1:1] = notice
else:
lines += notice
return '\n'.join(lines)
def validate_callable(func, decorator_name):
if not hasattr(func, '__call__'):
raise ValueError(
'%s is not a function. If this is a property, make sure'
' @property appears before @%s in your source code:'
'\n\n@property\n@%s\ndef method(...)' % (
func, decorator_name, decorator_name))
class classproperty(object): # pylint: disable=invalid-name
"""Class property decorator.
Example usage:
class MyClass(object):
@classproperty
def value(cls):
return '123'
> print MyClass.value
123
"""
def __init__(self, func):
self._func = func
def __get__(self, owner_self, owner_cls):
return self._func(owner_cls)
class _CachedClassProperty(object):
"""Cached class property decorator.
Transforms a class method into a property whose value is computed once
and then cached as a normal attribute for the life of the class. Example
usage:
>>> class MyClass(object):
... @cached_classproperty
... def value(cls):
... print("Computing value")
... return '<property of %s>' % cls.__name__
>>> class MySubclass(MyClass):
... pass
>>> MyClass.value
Computing value
'<property of MyClass>'
>>> MyClass.value # uses cached value
'<property of MyClass>'
>>> MySubclass.value
Computing value
'<property of MySubclass>'
This decorator is similar to `functools.cached_property`, but it adds a
property to the class, not to individual instances.
"""
def __init__(self, func):
self._func = func
self._cache = {}
def __get__(self, obj, objtype):
if objtype not in self._cache:
self._cache[objtype] = self._func(objtype)
return self._cache[objtype]
def __set__(self, obj, value):
raise AttributeError('property %s is read-only' % self._func.__name__)
def __delete__(self, obj):
raise AttributeError('property %s is read-only' % self._func.__name__)
def cached_classproperty(func):
return _CachedClassProperty(func)
cached_classproperty.__doc__ = _CachedClassProperty.__doc__
|
{
"content_hash": "d7d5c14624902b7ab72bcc0f40570c04",
"timestamp": "",
"source": "github",
"line_count": 180,
"max_line_length": 75,
"avg_line_length": 26.66111111111111,
"alnum_prop": 0.6622212961033549,
"repo_name": "frreiss/tensorflow-fred",
"id": "f63f2e6055a1c1c119f83053769404e4b7ed76ad",
"size": "5489",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tensorflow/python/util/decorator_utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "6729"
},
{
"name": "Batchfile",
"bytes": "49527"
},
{
"name": "C",
"bytes": "871761"
},
{
"name": "C#",
"bytes": "8562"
},
{
"name": "C++",
"bytes": "79093233"
},
{
"name": "CMake",
"bytes": "6500"
},
{
"name": "Dockerfile",
"bytes": "110545"
},
{
"name": "Go",
"bytes": "1852128"
},
{
"name": "HTML",
"bytes": "4686483"
},
{
"name": "Java",
"bytes": "961600"
},
{
"name": "Jupyter Notebook",
"bytes": "549457"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "MLIR",
"bytes": "1644156"
},
{
"name": "Makefile",
"bytes": "62398"
},
{
"name": "Objective-C",
"bytes": "116558"
},
{
"name": "Objective-C++",
"bytes": "303063"
},
{
"name": "PHP",
"bytes": "20523"
},
{
"name": "Pascal",
"bytes": "3982"
},
{
"name": "Pawn",
"bytes": "18876"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "Python",
"bytes": "40003007"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Roff",
"bytes": "2472"
},
{
"name": "Ruby",
"bytes": "7464"
},
{
"name": "Shell",
"bytes": "681596"
},
{
"name": "Smarty",
"bytes": "34740"
},
{
"name": "Swift",
"bytes": "62814"
},
{
"name": "Vim Snippet",
"bytes": "58"
}
],
"symlink_target": ""
}
|
"""
parser.http package (imdb package).
This package provides the IMDbHTTPAccessSystem class used to access
IMDb's data through the web interface.
the imdb.IMDb function will return an instance of this class when
called with the 'accessSystem' argument set to "http" or "web"
or "html" (this is the default).
Copyright 2004-2012 Davide Alberani <da@erlug.linux.it>
2008 H. Turgut Uyar <uyar@tekir.org>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
import sys
import socket
import logging
from urllib import FancyURLopener, quote_plus
from codecs import lookup
from imdb import IMDbBase, imdbURL_movie_main, imdbURL_person_main, \
imdbURL_character_main, imdbURL_company_main, \
imdbURL_keyword_main, imdbURL_find, imdbURL_top250, \
imdbURL_bottom100
from imdb.utils import analyze_title
from imdb._exceptions import IMDbDataAccessError, IMDbParserError
import searchMovieParser
import searchPersonParser
import searchCharacterParser
import searchCompanyParser
import searchKeywordParser
import movieParser
import personParser
import characterParser
import companyParser
import topBottomParser
# Logger for miscellaneous functions.
_aux_logger = logging.getLogger('imdbpy.parser.http.aux')
IN_GAE = False
try:
import google.appengine
IN_GAE = True
_aux_logger.info('IMDbPY is running in the Google App Engine environment')
except ImportError:
pass
class _ModuleProxy:
"""A proxy to instantiate and access parsers."""
def __init__(self, module, defaultKeys=None, oldParsers=False,
useModule=None, fallBackToNew=False):
"""Initialize a proxy for the given module; defaultKeys, if set,
muste be a dictionary of values to set for instanced objects."""
if oldParsers or fallBackToNew:
_aux_logger.warn('The old set of parsers was removed; falling ' \
'back to the new parsers.')
self.useModule = useModule
if defaultKeys is None:
defaultKeys = {}
self._defaultKeys = defaultKeys
self._module = module
def __getattr__(self, name):
"""Called only when no look-up is found."""
_sm = self._module
# Read the _OBJECTS dictionary to build the asked parser.
if name in _sm._OBJECTS:
_entry = _sm._OBJECTS[name]
# Initialize the parser.
kwds = {}
if self.useModule:
kwds = {'useModule': self.useModule}
parserClass = _entry[0][0]
obj = parserClass(**kwds)
attrsToSet = self._defaultKeys.copy()
attrsToSet.update(_entry[1] or {})
# Set attribute to the object.
for key in attrsToSet:
setattr(obj, key, attrsToSet[key])
setattr(self, name, obj)
return obj
return getattr(_sm, name)
PY_VERSION = sys.version_info[:2]
# The cookies for the "adult" search.
# Please don't mess with these account.
# Old 'IMDbPY' account.
_IMDbPY_cookie_id = 'boM2bYxz9MCsOnH9gZ0S9QHs12NWrNdApxsls1Vb5/NGrNdjcHx3dUas10UASoAjVEvhAbGagERgOpNkAPvxdbfKwaV2ikEj9SzXY1WPxABmDKQwdqzwRbM+12NSeJFGUEx3F8as10WwidLzVshDtxaPIbP13NdjVS9UZTYqgTVGrNcT9vyXU1'
_IMDbPY_cookie_uu = '3M3AXsquTU5Gur/Svik+ewflPm5Rk2ieY3BIPlLjyK3C0Dp9F8UoPgbTyKiGtZp4x1X+uAUGKD7BM2g+dVd8eqEzDErCoYvdcvGLvVLAen1y08hNQtALjVKAe+1hM8g9QbNonlG1/t4S82ieUsBbrSIQbq1yhV6tZ6ArvSbA7rgHc8n5AdReyAmDaJ5Wm/ee3VDoCnGj/LlBs2ieUZNorhHDKK5Q=='
# 'imdbpy2010' account.
_imdbpy2010_cookie_id = 'QrCdxVi+L+WgqOLrQJJgBgRRXGInphxiBPU/YXSFDyExMFzCp6YcYgSVXyEUhS/xMID8wqemHGID4DlntwZ49vemP5UXsAxiJ4D6goSmHGIgNT9hMXBaRSF2vMS3phxB0bVfQiQlP1RxdrzhB6YcRHFASyIhQVowwXCKtDSlD2YhgRvxBsCKtGemHBKH9mxSI='
_imdbpy2010_cookie_uu = 'oiEo2yoJFCA2Zbn/o7Z1LAPIwotAu6QdALv3foDb1x5F/tdrFY63XkSfty4kntS8Y8jkHSDLt3406+d+JThEilPI0mtTaOQdA/t2/iErp22jaLdeVU5ya4PIREpj7HFdpzhEHadcIAngSER50IoHDpD6Bz4Qy3b+UIhE/hBbhz5Q63ceA2hEvhPo5B0FnrL9Q8jkWjDIbA0Au3d+AOtnXoCIRL4Q28c+UOtnXpP4RL4T6OQdA+6ijUCI5B0AW2d+UOtnXpPYRL4T6OQdA8jkTUOYlC0A=='
# old 'IMDbPYweb' account.
_old_IMDbPYweb_cookie_id = 'rH1jNAkjTlNXvHolvBVBsgaPICNZbNdjVjzFwzas9JRmusdjVoqBs/Hs12NR+1WFxEoR9bGKEDUg6sNlADqXwkas12N131Rwdb+UQNGKN8PWrNdjcdqBQVLq8mbGDHP3hqzxhbD692NQi9D0JjpBtRaPIbP1zNdjUOqENQYv1ADWrNcT9vyXU1'
_old_IMDbPYweb_cookie_uu = 'su4/m8cho4c6HP+W1qgq6wchOmhnF0w+lIWvHjRUPJ6nRA9sccEafjGADJ6hQGrMd4GKqLcz2X4z5+w+M4OIKnRn7FpENH7dxDQu3bQEHyx0ZEyeRFTPHfQEX03XF+yeN1dsPpcXaqjUZAw+lGRfXRQEfz3RIX9IgVEffdBAHw2wQXyf9xdMPrQELw0QNB8dsffsqcdQemjPB0w+moLcPh0JrKrHJ9hjBzdMPpcXTH7XRwwOk='
# old 'IMDbPYweb' account values (as of 2012-12-30)
_IMDbPYweb_cookie_id = 'BCYjtpb46Go0cMHAMewWZEauhwqPL7ASCPpPVNutu6BuayHZd0U6Dk3UAqVlEM8DHLDsSr02RGQn5ff3245-R4A130NAWJ_5yqXx7X-zJey8vQM8JKdv3rTUSEJznJQlojUW1Bije-Q0FXAixs4I0sePWhd_tA41i-9AF2q3lPmaksram6ilMhN9i3IPESW1PMbk'
_IMDbPYweb_cookie_uu = 'BCYttQjEMc-NyUdFUGxThidAnBo7wwalEzj4un9uzf2XoEjtqDhNfrH7bOSuwlRkMEQ11SNyTajl-b9Q-21m4HwYu0e3jXZrjYLXLYzFkrEroCDyUREqaTwPJPSjGtFmvlaVBZEZmsWpaxe18DT5KiygKyGPZKH78Xu4im6ba-Sd31WvbXHzP8KGXPpGjhhVuv7Dcv314HCWkE832Srf9ya-Uv0FdGAmYyLbIAXuxnvpYQd6oZ8-CYkSGLIqcKWdrf5S'
# 'IMDbPY2013' account
_IMDbPY2013_cookie_id = 'BCYmoyqSm2WglmOzG-SrFWSvVpxsTZOB0qEOOqmAwCBxCbaNgKOxd0DTKzUvt7t04Pya5gV2tUrpDmYxrc1Dr54DQj2UXI7QI35__M5-HI2KrbOI3PjDz6M-_U3HG8topMfN64R24tmBixoZhMYXVaEc556lf0Z4gQNJVYRANXvwytP5v1lpfeToRlu9aVJwN4kT'
_IMDbPY2013_cookie_uu = 'BCYquDS8Y2i8R1pJxS4nB77YrhjHHXeOea2Xl9KtZvE6RZKVfMvzTGU4Vl5-yxfPbgRSiFJasyf-hhPuVvXyaHlfeBjNlbFT8hz2HzFFkQ_SxKxq05J51gi7Fv4SaAws1M-i7zmQ1TRunfJqCVIYqPwIs2NO7s4_YDH2ZoISVGLgca8OY2K58HychOZB1oRWHVeAJNhLJMrCWJBuGRLCNnQK5X9tA0dPPntr2Ussy0ouul-N1GQz-8y5vda3JJ_C6xkwmHcA6JrOdOFO_HqMWjVSXuxGEdrXC919JM9H0vooVvKeVgAEJnTh2GiVlUJUoH3c'
# Currently used account.
_cookie_id = _IMDbPY2013_cookie_id
_cookie_uu = _IMDbPY2013_cookie_uu
class _FakeURLOpener(object):
"""Fake URLOpener object, used to return empty strings instead of
errors.
"""
def __init__(self, url, headers):
self.url = url
self.headers = headers
def read(self, *args, **kwds): return ''
def close(self, *args, **kwds): pass
def info(self, *args, **kwds): return self.headers
class IMDbURLopener(FancyURLopener):
"""Fetch web pages and handle errors."""
_logger = logging.getLogger('imdbpy.parser.http.urlopener')
def __init__(self, *args, **kwargs):
self._last_url = u''
FancyURLopener.__init__(self, *args, **kwargs)
# Headers to add to every request.
# XXX: IMDb's web server doesn't like urllib-based programs,
# so lets fake to be Mozilla.
# Wow! I'm shocked by my total lack of ethic! <g>
for header in ('User-Agent', 'User-agent', 'user-agent'):
self.del_header(header)
self.set_header('User-Agent', 'Mozilla/5.0')
self.set_header('Accept-Language', 'en-us,en;q=0.5')
# XXX: This class is used also to perform "Exact Primary
# [Title|Name]" searches, and so by default the cookie is set.
c_header = 'uu=%s; id=%s' % (_cookie_uu, _cookie_id)
self.set_header('Cookie', c_header)
def get_proxy(self):
"""Return the used proxy, or an empty string."""
return self.proxies.get('http', '')
def set_proxy(self, proxy):
"""Set the proxy."""
if not proxy:
if self.proxies.has_key('http'):
del self.proxies['http']
else:
if not proxy.lower().startswith('http://'):
proxy = 'http://%s' % proxy
self.proxies['http'] = proxy
def set_header(self, header, value, _overwrite=True):
"""Set a default header."""
if _overwrite:
self.del_header(header)
self.addheaders.append((header, value))
def get_header(self, header):
"""Return the first value of a header, or None
if not present."""
for index in xrange(len(self.addheaders)):
if self.addheaders[index][0] == header:
return self.addheaders[index][1]
return None
def del_header(self, header):
"""Remove a default header."""
for index in xrange(len(self.addheaders)):
if self.addheaders[index][0] == header:
del self.addheaders[index]
break
def retrieve_unicode(self, url, size=-1):
"""Retrieves the given URL, and returns a unicode string,
trying to guess the encoding of the data (assuming latin_1
by default)"""
encode = None
try:
if size != -1:
self.set_header('Range', 'bytes=0-%d' % size)
uopener = self.open(url)
kwds = {}
if PY_VERSION > (2, 3) and not IN_GAE:
kwds['size'] = size
content = uopener.read(**kwds)
self._last_url = uopener.url
# Maybe the server is so nice to tell us the charset...
server_encode = uopener.info().getparam('charset')
# Otherwise, look at the content-type HTML meta tag.
if server_encode is None and content:
begin_h = content.find('text/html; charset=')
if begin_h != -1:
end_h = content[19+begin_h:].find('"')
if end_h != -1:
server_encode = content[19+begin_h:19+begin_h+end_h]
if server_encode:
try:
if lookup(server_encode):
encode = server_encode
except (LookupError, ValueError, TypeError):
pass
uopener.close()
if size != -1:
self.del_header('Range')
self.close()
except IOError, e:
if size != -1:
# Ensure that the Range header is removed.
self.del_header('Range')
raise IMDbDataAccessError({'errcode': e.errno,
'errmsg': str(e.strerror),
'url': url,
'proxy': self.get_proxy(),
'exception type': 'IOError',
'original exception': e})
if encode is None:
encode = 'latin_1'
# The detection of the encoding is error prone...
self._logger.warn('Unable to detect the encoding of the retrieved '
'page [%s]; falling back to default latin1.', encode)
##print unicode(content, encode, 'replace').encode('utf8')
return unicode(content, encode, 'replace')
def http_error_default(self, url, fp, errcode, errmsg, headers):
if errcode == 404:
self._logger.warn('404 code returned for %s: %s (headers: %s)',
url, errmsg, headers)
return _FakeURLOpener(url, headers)
raise IMDbDataAccessError({'url': 'http:%s' % url,
'errcode': errcode,
'errmsg': errmsg,
'headers': headers,
'error type': 'http_error_default',
'proxy': self.get_proxy()})
def open_unknown(self, fullurl, data=None):
raise IMDbDataAccessError({'fullurl': fullurl,
'data': str(data),
'error type': 'open_unknown',
'proxy': self.get_proxy()})
def open_unknown_proxy(self, proxy, fullurl, data=None):
raise IMDbDataAccessError({'proxy': str(proxy),
'fullurl': fullurl,
'error type': 'open_unknown_proxy',
'data': str(data)})
class IMDbHTTPAccessSystem(IMDbBase):
"""The class used to access IMDb's data through the web."""
accessSystem = 'http'
_http_logger = logging.getLogger('imdbpy.parser.http')
def __init__(self, isThin=0, adultSearch=1, proxy=-1, oldParsers=False,
fallBackToNew=False, useModule=None, cookie_id=-1,
timeout=30, cookie_uu=None, *arguments, **keywords):
"""Initialize the access system."""
IMDbBase.__init__(self, *arguments, **keywords)
self.urlOpener = IMDbURLopener()
# When isThin is set, we're parsing the "maindetails" page
# of a movie (instead of the "combined" page) and movie/person
# references are not collected if no defaultModFunct is provided.
#
# NOTE: httpThin was removed since IMDbPY 4.8.
self.isThin = isThin
self._getRefs = True
self._mdparse = False
if isThin:
self._http_logger.warn('"httpThin" access system no longer ' +
'supported; "http" used automatically', exc_info=False)
self.isThin = 0
if self.accessSystem in ('httpThin', 'webThin', 'htmlThin'):
self.accessSystem = 'http'
self.set_timeout(timeout)
self.do_adult_search(adultSearch)
if cookie_id != -1:
if cookie_id is None:
self.del_cookies()
elif cookie_uu is not None:
self.set_cookies(cookie_id, cookie_uu)
if proxy != -1:
self.set_proxy(proxy)
if useModule is not None:
if not isinstance(useModule, (list, tuple)) and ',' in useModule:
useModule = useModule.split(',')
_def = {'_modFunct': self._defModFunct, '_as': self.accessSystem}
# Proxy objects.
self.smProxy = _ModuleProxy(searchMovieParser, defaultKeys=_def,
oldParsers=oldParsers, useModule=useModule,
fallBackToNew=fallBackToNew)
self.spProxy = _ModuleProxy(searchPersonParser, defaultKeys=_def,
oldParsers=oldParsers, useModule=useModule,
fallBackToNew=fallBackToNew)
self.scProxy = _ModuleProxy(searchCharacterParser, defaultKeys=_def,
oldParsers=oldParsers, useModule=useModule,
fallBackToNew=fallBackToNew)
self.scompProxy = _ModuleProxy(searchCompanyParser, defaultKeys=_def,
oldParsers=oldParsers, useModule=useModule,
fallBackToNew=fallBackToNew)
self.skProxy = _ModuleProxy(searchKeywordParser, defaultKeys=_def,
oldParsers=oldParsers, useModule=useModule,
fallBackToNew=fallBackToNew)
self.mProxy = _ModuleProxy(movieParser, defaultKeys=_def,
oldParsers=oldParsers, useModule=useModule,
fallBackToNew=fallBackToNew)
self.pProxy = _ModuleProxy(personParser, defaultKeys=_def,
oldParsers=oldParsers, useModule=useModule,
fallBackToNew=fallBackToNew)
self.cProxy = _ModuleProxy(characterParser, defaultKeys=_def,
oldParsers=oldParsers, useModule=useModule,
fallBackToNew=fallBackToNew)
self.compProxy = _ModuleProxy(companyParser, defaultKeys=_def,
oldParsers=oldParsers, useModule=useModule,
fallBackToNew=fallBackToNew)
self.topBottomProxy = _ModuleProxy(topBottomParser, defaultKeys=_def,
oldParsers=oldParsers, useModule=useModule,
fallBackToNew=fallBackToNew)
def _normalize_movieID(self, movieID):
"""Normalize the given movieID."""
try:
return '%07d' % int(movieID)
except ValueError, e:
raise IMDbParserError('invalid movieID "%s": %s' % (movieID, e))
def _normalize_personID(self, personID):
"""Normalize the given personID."""
try:
return '%07d' % int(personID)
except ValueError, e:
raise IMDbParserError('invalid personID "%s": %s' % (personID, e))
def _normalize_characterID(self, characterID):
"""Normalize the given characterID."""
try:
return '%07d' % int(characterID)
except ValueError, e:
raise IMDbParserError('invalid characterID "%s": %s' % \
(characterID, e))
def _normalize_companyID(self, companyID):
"""Normalize the given companyID."""
try:
return '%07d' % int(companyID)
except ValueError, e:
raise IMDbParserError('invalid companyID "%s": %s' % \
(companyID, e))
def get_imdbMovieID(self, movieID):
"""Translate a movieID in an imdbID; in this implementation
the movieID _is_ the imdbID.
"""
return movieID
def get_imdbPersonID(self, personID):
"""Translate a personID in an imdbID; in this implementation
the personID _is_ the imdbID.
"""
return personID
def get_imdbCharacterID(self, characterID):
"""Translate a characterID in an imdbID; in this implementation
the characterID _is_ the imdbID.
"""
return characterID
def get_imdbCompanyID(self, companyID):
"""Translate a companyID in an imdbID; in this implementation
the companyID _is_ the imdbID.
"""
return companyID
def get_proxy(self):
"""Return the used proxy or an empty string."""
return self.urlOpener.get_proxy()
def set_proxy(self, proxy):
"""Set the web proxy to use.
It should be a string like 'http://localhost:8080/'; if the
string is empty, no proxy will be used.
If set, the value of the environment variable HTTP_PROXY is
automatically used.
"""
self.urlOpener.set_proxy(proxy)
def set_timeout(self, timeout):
"""Set the default timeout, in seconds, of the connection."""
try:
timeout = int(timeout)
except Exception:
timeout = 0
if timeout <= 0:
timeout = None
socket.setdefaulttimeout(timeout)
def set_cookies(self, cookie_id, cookie_uu):
"""Set a cookie to access an IMDb's account."""
c_header = 'id=%s; uu=%s' % (cookie_id, cookie_uu)
self.urlOpener.set_header('Cookie', c_header)
def del_cookies(self):
"""Remove the used cookie."""
self.urlOpener.del_header('Cookie')
def do_adult_search(self, doAdult,
cookie_id=_cookie_id, cookie_uu=_cookie_uu):
"""If doAdult is true, 'adult' movies are included in the
search results; cookie_id and cookie_uu are optional
parameters to select a specific account (see your cookie
or cookies.txt file."""
if doAdult:
self.set_cookies(cookie_id, cookie_uu)
#c_header = 'id=%s; uu=%s' % (cookie_id, cookie_uu)
#self.urlOpener.set_header('Cookie', c_header)
else:
self.urlOpener.del_header('Cookie')
def _retrieve(self, url, size=-1, _noCookies=False):
"""Retrieve the given URL."""
##print url
_cookies = None
# XXX: quite obscene, but in some very limited
# cases (/ttXXXXXXX/epdate) if the cookies
# are set, a 500 error is returned.
if _noCookies:
_cookies = self.urlOpener.get_header('Cookie')
self.del_cookies()
self._http_logger.debug('fetching url %s (size: %d)', url, size)
try:
ret = self.urlOpener.retrieve_unicode(url, size=size)
finally:
if _noCookies and _cookies:
self.urlOpener.set_header('Cookie', _cookies)
return ret
def _get_search_content(self, kind, ton, results):
"""Retrieve the web page for a given search.
kind can be 'tt' (for titles), 'nm' (for names),
'char' (for characters) or 'co' (for companies).
ton is the title or the name to search.
results is the maximum number of results to be retrieved."""
if isinstance(ton, unicode):
try:
ton = ton.encode('utf-8')
except Exception, e:
try:
ton = ton.encode('iso8859-1')
except Exception, e:
pass
##params = 'q=%s&%s=on&mx=%s' % (quote_plus(ton), kind, str(results))
params = 'q=%s&s=%s&mx=%s' % (quote_plus(ton), kind, str(results))
if kind == 'ep':
params = params.replace('s=ep&', 's=tt&ttype=ep&', 1)
cont = self._retrieve(self.urls['find'] % params)
#print 'URL:', imdbURL_find % params
if cont.find('Your search returned more than') == -1 or \
cont.find("displayed the exact matches") == -1:
return cont
# The retrieved page contains no results, because too many
# titles or names contain the string we're looking for.
params = 'q=%s&ls=%s&lm=0' % (quote_plus(ton), kind)
size = 131072 + results * 512
return self._retrieve(self.urls['find'] % params, size=size)
def _search_movie(self, title, results):
# The URL of the query.
# XXX: To retrieve the complete results list:
# params = urllib.urlencode({'more': 'tt', 'q': title})
##params = urllib.urlencode({'tt': 'on','mx': str(results),'q': title})
##params = 'q=%s&tt=on&mx=%s' % (quote_plus(title), str(results))
##cont = self._retrieve(imdbURL_find % params)
cont = self._get_search_content('tt', title, results)
return self.smProxy.search_movie_parser.parse(cont, results=results)['data']
def _search_episode(self, title, results):
t_dict = analyze_title(title)
if t_dict['kind'] == 'episode':
title = t_dict['title']
cont = self._get_search_content('ep', title, results)
return self.smProxy.search_movie_parser.parse(cont, results=results)['data']
def get_movie_main(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'combined')
return self.mProxy.movie_parser.parse(cont, mdparse=self._mdparse)
def get_movie_full_credits(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'fullcredits')
return self.mProxy.movie_parser.parse(cont)
def get_movie_plot(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'plotsummary')
return self.mProxy.plot_parser.parse(cont, getRefs=self._getRefs)
def get_movie_awards(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'awards')
return self.mProxy.movie_awards_parser.parse(cont)
def get_movie_taglines(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'taglines')
return self.mProxy.taglines_parser.parse(cont)
def get_movie_keywords(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'keywords')
return self.mProxy.keywords_parser.parse(cont)
def get_movie_alternate_versions(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'alternateversions')
return self.mProxy.alternateversions_parser.parse(cont,
getRefs=self._getRefs)
def get_movie_crazy_credits(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'crazycredits')
return self.mProxy.crazycredits_parser.parse(cont,
getRefs=self._getRefs)
def get_movie_goofs(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'goofs')
return self.mProxy.goofs_parser.parse(cont, getRefs=self._getRefs)
def get_movie_quotes(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'quotes')
return self.mProxy.quotes_parser.parse(cont, getRefs=self._getRefs)
def get_movie_release_dates(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'releaseinfo')
ret = self.mProxy.releasedates_parser.parse(cont)
ret['info sets'] = ('release dates', 'akas')
return ret
get_movie_akas = get_movie_release_dates
get_movie_release_info = get_movie_release_dates
def get_movie_vote_details(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'ratings')
return self.mProxy.ratings_parser.parse(cont)
def get_movie_official_sites(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'officialsites')
return self.mProxy.officialsites_parser.parse(cont)
def get_movie_trivia(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'trivia')
return self.mProxy.trivia_parser.parse(cont, getRefs=self._getRefs)
def get_movie_connections(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'movieconnections')
return self.mProxy.connections_parser.parse(cont)
def get_movie_technical(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'technical')
return self.mProxy.tech_parser.parse(cont)
def get_movie_business(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'business')
return self.mProxy.business_parser.parse(cont, getRefs=self._getRefs)
def get_movie_literature(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'literature')
return self.mProxy.literature_parser.parse(cont)
def get_movie_locations(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'locations')
return self.mProxy.locations_parser.parse(cont)
def get_movie_soundtrack(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'soundtrack')
return self.mProxy.soundtrack_parser.parse(cont)
def get_movie_dvd(self, movieID):
self._http_logger.warn('dvd information no longer available', exc_info=False)
return {}
def get_movie_recommendations(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'recommendations')
return self.mProxy.rec_parser.parse(cont)
def get_movie_critic_reviews(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'criticreviews')
return self.mProxy.criticrev_parser.parse(cont)
def get_movie_external_reviews(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'externalreviews')
return self.mProxy.externalrev_parser.parse(cont)
def get_movie_newsgroup_reviews(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'newsgroupreviews')
return self.mProxy.newsgrouprev_parser.parse(cont)
def get_movie_misc_sites(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'miscsites')
return self.mProxy.misclinks_parser.parse(cont)
def get_movie_sound_clips(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'soundsites')
return self.mProxy.soundclips_parser.parse(cont)
def get_movie_video_clips(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'videosites')
return self.mProxy.videoclips_parser.parse(cont)
def get_movie_photo_sites(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'photosites')
return self.mProxy.photosites_parser.parse(cont)
def get_movie_news(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'news')
return self.mProxy.news_parser.parse(cont, getRefs=self._getRefs)
def get_movie_amazon_reviews(self, movieID):
self._http_logger.warn('amazon review no longer available', exc_info=False)
return {}
def get_movie_guests(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'epcast')
return self.mProxy.episodes_cast_parser.parse(cont)
get_movie_episodes_cast = get_movie_guests
def get_movie_merchandising_links(self, movieID):
self._http_logger.warn('merchandising links no longer available',
exc_info=False)
return {}
def _purge_seasons_data(self, data_d):
if '_current_season' in data_d['data']:
del data_d['data']['_current_season']
if '_seasons' in data_d['data']:
del data_d['data']['_seasons']
return data_d
def get_movie_episodes(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'episodes')
data_d = self.mProxy.season_episodes_parser.parse(cont)
if not data_d and 'data' in data_d:
return {}
_current_season = data_d['data'].get('_current_season', '')
_seasons = data_d['data'].get('_seasons') or []
data_d = self._purge_seasons_data(data_d)
data_d['data'].setdefault('episodes', {})
nr_eps = len(data_d['data']['episodes'].get(_current_season) or [])
for season in _seasons:
if season == _current_season:
continue
other_cont = self._retrieve(self.urls['movie_main'] % movieID + 'episodes?season=' + str(season))
other_d = self.mProxy.season_episodes_parser.parse(other_cont)
other_d = self._purge_seasons_data(other_d)
other_d['data'].setdefault('episodes', {})
if not (other_d and other_d['data'] and other_d['data']['episodes'][season]):
continue
nr_eps += len(other_d['data']['episodes'].get(season) or [])
data_d['data']['episodes'][season] = other_d['data']['episodes'][season]
data_d['data']['number of episodes'] = nr_eps
return data_d
def get_movie_episodes_rating(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'epdate', _noCookies=True)
data_d = self.mProxy.eprating_parser.parse(cont)
# set movie['episode of'].movieID for every episode.
if data_d.get('data', {}).has_key('episodes rating'):
for item in data_d['data']['episodes rating']:
episode = item['episode']
episode['episode of'].movieID = movieID
return data_d
def get_movie_faqs(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'faq')
return self.mProxy.movie_faqs_parser.parse(cont, getRefs=self._getRefs)
def get_movie_airing(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'tvschedule')
return self.mProxy.airing_parser.parse(cont)
get_movie_tv_schedule = get_movie_airing
def get_movie_synopsis(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'synopsis')
return self.mProxy.synopsis_parser.parse(cont)
def get_movie_parents_guide(self, movieID):
cont = self._retrieve(self.urls['movie_main'] % movieID + 'parentalguide')
return self.mProxy.parentsguide_parser.parse(cont)
def _search_person(self, name, results):
# The URL of the query.
# XXX: To retrieve the complete results list:
# params = urllib.urlencode({'more': 'nm', 'q': name})
##params = urllib.urlencode({'nm': 'on', 'mx': str(results), 'q': name})
#params = 'q=%s&nm=on&mx=%s' % (quote_plus(name), str(results))
#cont = self._retrieve(imdbURL_find % params)
cont = self._get_search_content('nm', name, results)
return self.spProxy.search_person_parser.parse(cont, results=results)['data']
def get_person_main(self, personID):
cont = self._retrieve(self.urls['person_main'] % personID + 'maindetails')
ret = self.pProxy.maindetails_parser.parse(cont)
ret['info sets'] = ('main', 'filmography')
return ret
def get_person_filmography(self, personID):
return self.get_person_main(personID)
def get_person_biography(self, personID):
cont = self._retrieve(self.urls['person_main'] % personID + 'bio')
return self.pProxy.bio_parser.parse(cont, getRefs=self._getRefs)
def get_person_awards(self, personID):
cont = self._retrieve(self.urls['person_main'] % personID + 'awards')
return self.pProxy.person_awards_parser.parse(cont)
def get_person_other_works(self, personID):
cont = self._retrieve(self.urls['person_main'] % personID + 'otherworks')
return self.pProxy.otherworks_parser.parse(cont, getRefs=self._getRefs)
#def get_person_agent(self, personID):
# cont = self._retrieve(self.urls['person_main'] % personID + 'agent')
# return self.pProxy.agent_parser.parse(cont)
def get_person_publicity(self, personID):
cont = self._retrieve(self.urls['person_main'] % personID + 'publicity')
return self.pProxy.publicity_parser.parse(cont)
def get_person_official_sites(self, personID):
cont = self._retrieve(self.urls['person_main'] % personID + 'officialsites')
return self.pProxy.person_officialsites_parser.parse(cont)
def get_person_news(self, personID):
cont = self._retrieve(self.urls['person_main'] % personID + 'news')
return self.pProxy.news_parser.parse(cont)
def get_person_episodes(self, personID):
cont = self._retrieve(self.urls['person_main'] % personID + 'filmoseries')
return self.pProxy.person_series_parser.parse(cont)
def get_person_merchandising_links(self, personID):
cont = self._retrieve(self.urls['person_main'] % personID + 'forsale')
return self.pProxy.sales_parser.parse(cont)
def get_person_genres_links(self, personID):
cont = self._retrieve(self.urls['person_main'] % personID + 'filmogenre')
return self.pProxy.person_genres_parser.parse(cont)
def get_person_keywords_links(self, personID):
cont = self._retrieve(self.urls['person_main'] % personID + 'filmokey')
return self.pProxy.person_keywords_parser.parse(cont)
def _search_character(self, name, results):
cont = self._get_search_content('ch', name, results)
return self.scProxy.search_character_parser.parse(cont, results=results)['data']
def get_character_main(self, characterID):
cont = self._retrieve(self.urls['character_main'] % characterID)
ret = self.cProxy.character_main_parser.parse(cont)
ret['info sets'] = ('main', 'filmography')
return ret
get_character_filmography = get_character_main
def get_character_biography(self, characterID):
cont = self._retrieve(self.urls['character_main'] % characterID + 'bio')
return self.cProxy.character_bio_parser.parse(cont,
getRefs=self._getRefs)
def get_character_episodes(self, characterID):
cont = self._retrieve(self.urls['character_main'] % characterID +
'filmoseries')
return self.cProxy.character_series_parser.parse(cont)
def get_character_quotes(self, characterID):
cont = self._retrieve(self.urls['character_main'] % characterID + 'quotes')
return self.cProxy.character_quotes_parser.parse(cont,
getRefs=self._getRefs)
def _search_company(self, name, results):
cont = self._get_search_content('co', name, results)
url = self.urlOpener._last_url
return self.scompProxy.search_company_parser.parse(cont, url=url,
results=results)['data']
def get_company_main(self, companyID):
cont = self._retrieve(self.urls['company_main'] % companyID)
ret = self.compProxy.company_main_parser.parse(cont)
return ret
def _search_keyword(self, keyword, results):
# XXX: the IMDb web server seems to have some serious problem with
# non-ascii keyword.
# E.g.: http://akas.imdb.com/keyword/fianc%E9/
# will return a 500 Internal Server Error: Redirect Recursion.
keyword = keyword.encode('utf8', 'ignore')
try:
cont = self._get_search_content('kw', keyword, results)
except IMDbDataAccessError:
self._http_logger.warn('unable to search for keyword %s', keyword,
exc_info=True)
return []
return self.skProxy.search_keyword_parser.parse(cont, results=results)['data']
def _get_keyword(self, keyword, results):
keyword = keyword.encode('utf8', 'ignore')
try:
cont = self._retrieve(self.urls['keyword_main'] % keyword)
except IMDbDataAccessError:
self._http_logger.warn('unable to get keyword %s', keyword,
exc_info=True)
return []
return self.skProxy.search_moviekeyword_parser.parse(cont, results=results)['data']
def _get_top_bottom_movies(self, kind):
if kind == 'top':
parser = self.topBottomProxy.top250_parser
url = self.urls['top250']
elif kind == 'bottom':
parser = self.topBottomProxy.bottom100_parser
url = self.urls['bottom100']
else:
return []
cont = self._retrieve(url)
return parser.parse(cont)['data']
|
{
"content_hash": "98508cf5e0acfcfebd46aac6fb7e628b",
"timestamp": "",
"source": "github",
"line_count": 843,
"max_line_length": 350,
"avg_line_length": 45.80189798339264,
"alnum_prop": 0.6171298334671467,
"repo_name": "boxed/CMi",
"id": "16f8518df3daeb08a5e82202253226306d5201e0",
"size": "38611",
"binary": false,
"copies": "20",
"ref": "refs/heads/master",
"path": "web_frontend/imdb/parser/http/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "13145"
},
{
"name": "CSS",
"bytes": "4207"
},
{
"name": "HTML",
"bytes": "11330"
},
{
"name": "JavaScript",
"bytes": "42021"
},
{
"name": "Objective-C",
"bytes": "143521"
},
{
"name": "Python",
"bytes": "4579559"
}
],
"symlink_target": ""
}
|
from boto import handler
import xml.sax
class Rule(object):
"""
A Lifcycle rule for an S3 bucket.
:ivar id: Unique identifier for the rule. The value cannot be longer
than 255 characters.
:ivar prefix: Prefix identifying one or more objects to which the
rule applies.
:ivar status: If Enabled, the rule is currently being applied.
If Disabled, the rule is not currently being applied.
:ivar expiration: Indicates the lifetime, in days, of the objects
that are subject to the rule. The value must be a non-zero
positive integer.
"""
def __init__(self, id=None, prefix=None, status=None, expiration=None):
self.id = id
self.prefix = prefix
self.status = status
self.expiration = expiration
def __repr__(self):
return '<Rule: %s>' % self.id
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'ID':
self.id = value
elif name == 'Prefix':
self.prefix = value
elif name == 'Status':
self.status = value
elif name == 'Days':
self.expiration = int(value)
else:
setattr(self, name, value)
def to_xml(self):
s = '<Rule>'
s += '<ID>%s</ID>' % self.id
s += '<Prefix>%s</Prefix>' % self.prefix
s += '<Status>%s</Status>' % self.status
s += '<Expiration><Days>%d</Days></Expiration>' % self.expiration
s += '</Rule>'
return s
class Lifecycle(list):
"""
A container for the rules associated with a Lifecycle configuration.
"""
def startElement(self, name, attrs, connection):
if name == 'Rule':
rule = Rule()
self.append(rule)
return rule
return None
def endElement(self, name, value, connection):
setattr(self, name, value)
def to_xml(self):
"""
Returns a string containing the XML version of the Lifecycle
configuration as defined by S3.
"""
s = '<LifecycleConfiguration>'
for rule in self:
s += rule.to_xml()
s += '</LifecycleConfiguration>'
return s
def add_rule(self, id, prefix, status, expiration):
"""
Add a rule to this Lifecycle configuration. This only adds
the rule to the local copy. To install the new rule(s) on
the bucket, you need to pass this Lifecycle config object
to the configure_lifecycle method of the Bucket object.
:type id: str
:param id: Unique identifier for the rule. The value cannot be longer
than 255 characters.
:type prefix: str
:iparam prefix: Prefix identifying one or more objects to which the
rule applies.
:type status: str
:param status: If 'Enabled', the rule is currently being applied.
If 'Disabled', the rule is not currently being applied.
:type expiration: int
:param expiration: Indicates the lifetime, in days, of the objects
that are subject to the rule. The value must be a non-zero
positive integer.
"""
rule = Rule(id, prefix, status, expiration)
self.append(rule)
|
{
"content_hash": "5836634d0fe3780853afdc7292f4c1fc",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 77,
"avg_line_length": 31.710280373831775,
"alnum_prop": 0.5773651635720601,
"repo_name": "yyuu/botornado",
"id": "7ca6b61d9eeefc3ea6949dc3a935c6963d050fd3",
"size": "4493",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "boto/s3/lifecycle.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1974446"
}
],
"symlink_target": ""
}
|
"""Red Hat Enterprise Linux Linux specific platform info."""
import platform
from gcimagebundlelib import linux
class RHEL(linux.LinuxPlatform):
"""Red Hat Enterprise Linux specific information."""
@staticmethod
def IsThisPlatform(root='/'):
(distribution, _, _) = platform.linux_distribution()
if distribution == 'Red Hat Enterprise Linux Server':
return True
return False
def __init__(self):
super(RHEL, self).__init__()
def GetPreferredFilesystemType(self):
(_,version,_) = platform.linux_distribution()
if version.startswith('7'):
return 'xfs'
return 'ext4'
|
{
"content_hash": "fbd4e0a066c497009564fe24501302d2",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 60,
"avg_line_length": 23,
"alnum_prop": 0.679549114331723,
"repo_name": "tytso/compute-image-packages",
"id": "9ebf1ef5afad0b47ff9d25087b3c803569e90872",
"size": "1215",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "gcimagebundle/gcimagebundlelib/rhel.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "182649"
},
{
"name": "Shell",
"bytes": "53099"
}
],
"symlink_target": ""
}
|
import warnings
from ..base import BaseEstimator, TransformerMixin
from ..utils.validation import _allclose_dense_sparse
def _identity(X):
"""The identity function."""
return X
class FunctionTransformer(TransformerMixin, BaseEstimator):
"""Constructs a transformer from an arbitrary callable.
A FunctionTransformer forwards its X (and optionally y) arguments to a
user-defined function or function object and returns the result of this
function. This is useful for stateless transformations such as taking the
log of frequencies, doing custom scaling, etc.
Note: If a lambda is used as the function, then the resulting
transformer will not be pickleable.
.. versionadded:: 0.17
Read more in the :ref:`User Guide <function_transformer>`.
Parameters
----------
func : callable, default=None
The callable to use for the transformation. This will be passed
the same arguments as transform, with args and kwargs forwarded.
If func is None, then func will be the identity function.
inverse_func : callable, default=None
The callable to use for the inverse transformation. This will be
passed the same arguments as inverse transform, with args and
kwargs forwarded. If inverse_func is None, then inverse_func
will be the identity function.
validate : bool, default=False
Indicate that the input X array should be checked before calling
``func``. The possibilities are:
- If False, there is no input validation.
- If True, then X will be converted to a 2-dimensional NumPy array or
sparse matrix. If the conversion is not possible an exception is
raised.
.. versionchanged:: 0.22
The default of ``validate`` changed from True to False.
accept_sparse : bool, default=False
Indicate that func accepts a sparse matrix as input. If validate is
False, this has no effect. Otherwise, if accept_sparse is false,
sparse matrix inputs will cause an exception to be raised.
check_inverse : bool, default=True
Whether to check that or ``func`` followed by ``inverse_func`` leads to
the original inputs. It can be used for a sanity check, raising a
warning when the condition is not fulfilled.
.. versionadded:: 0.20
kw_args : dict, default=None
Dictionary of additional keyword arguments to pass to func.
.. versionadded:: 0.18
inv_kw_args : dict, default=None
Dictionary of additional keyword arguments to pass to inverse_func.
.. versionadded:: 0.18
Examples
--------
>>> import numpy as np
>>> from sklearn.preprocessing import FunctionTransformer
>>> transformer = FunctionTransformer(np.log1p)
>>> X = np.array([[0, 1], [2, 3]])
>>> transformer.transform(X)
array([[0. , 0.6931...],
[1.0986..., 1.3862...]])
"""
def __init__(
self,
func=None,
inverse_func=None,
*,
validate=False,
accept_sparse=False,
check_inverse=True,
kw_args=None,
inv_kw_args=None,
):
self.func = func
self.inverse_func = inverse_func
self.validate = validate
self.accept_sparse = accept_sparse
self.check_inverse = check_inverse
self.kw_args = kw_args
self.inv_kw_args = inv_kw_args
def _check_input(self, X):
if self.validate:
return self._validate_data(X, accept_sparse=self.accept_sparse)
return X
def _check_inverse_transform(self, X):
"""Check that func and inverse_func are the inverse."""
idx_selected = slice(None, None, max(1, X.shape[0] // 100))
X_round_trip = self.inverse_transform(self.transform(X[idx_selected]))
if not _allclose_dense_sparse(X[idx_selected], X_round_trip):
warnings.warn(
"The provided functions are not strictly"
" inverse of each other. If you are sure you"
" want to proceed regardless, set"
" 'check_inverse=False'.",
UserWarning,
)
def fit(self, X, y=None):
"""Fit transformer by checking X.
If ``validate`` is ``True``, ``X`` will be checked.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Input array.
Returns
-------
self
"""
X = self._check_input(X)
if self.check_inverse and not (self.func is None or self.inverse_func is None):
self._check_inverse_transform(X)
return self
def transform(self, X):
"""Transform X using the forward function.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Input array.
Returns
-------
X_out : array-like, shape (n_samples, n_features)
Transformed input.
"""
return self._transform(X, func=self.func, kw_args=self.kw_args)
def inverse_transform(self, X):
"""Transform X using the inverse function.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Input array.
Returns
-------
X_out : array-like, shape (n_samples, n_features)
Transformed input.
"""
return self._transform(X, func=self.inverse_func, kw_args=self.inv_kw_args)
def _transform(self, X, func=None, kw_args=None):
X = self._check_input(X)
if func is None:
func = _identity
return func(X, **(kw_args if kw_args else {}))
def _more_tags(self):
return {"no_validation": not self.validate, "stateless": True}
|
{
"content_hash": "331c5dd27fb72bf3f040b70121ea7250",
"timestamp": "",
"source": "github",
"line_count": 180,
"max_line_length": 87,
"avg_line_length": 32.36666666666667,
"alnum_prop": 0.6048746996223824,
"repo_name": "amueller/scikit-learn",
"id": "345cc96bb1c2eef00b73a543a93f16f15c03835c",
"size": "5826",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "sklearn/preprocessing/_function_transformer.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "2232"
},
{
"name": "C",
"bytes": "41206"
},
{
"name": "C++",
"bytes": "146835"
},
{
"name": "Makefile",
"bytes": "1711"
},
{
"name": "Python",
"bytes": "9958394"
},
{
"name": "Shell",
"bytes": "44588"
}
],
"symlink_target": ""
}
|
from marshmallow import Schema, fields, post_load, EXCLUDE
from ..resource import Resource
from collections import namedtuple
class Activity(Resource):
"""
https://dev.chartmogul.com/v1.0/reference#list-customer-subscriptions
"""
_path = "/activities"
_root_key = 'entries'
_many = namedtuple('Activities', [_root_key, "has_more", "per_page"])
class _Schema(Schema):
activity_arr = fields.Number(data_key='activity-arr')
activity_mrr = fields.Number(data_key='activity-mrr')
activity_mrr_movement = fields.Number(data_key='activity-mrr-movement')
currency = fields.String()
date = fields.DateTime()
description = fields.String()
type = fields.String()
subscription_external_id = fields.String(data_key='subscription-external-id')
plan_external_id = fields.String(data_key='plan-external-id')
customer_name = fields.String(data_key='customer-name')
customer_uuid = fields.String(data_key='customer-uuid')
customer_external_id = fields.String(data_key='customer-external-id')
billing_connector_uuid = fields.String(data_key='billing-connector-uuid')
uuid = fields.String(data_key='uuid')
@post_load
def make(self, data, **kwargs):
return Activity(**data)
_schema = _Schema(unknown=EXCLUDE)
|
{
"content_hash": "fb5e458aae025b76bd863df08008c852",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 85,
"avg_line_length": 40.14705882352941,
"alnum_prop": 0.6593406593406593,
"repo_name": "chartmogul/chartmogul-python",
"id": "1e599a24751b4c2a5e44acad96a679107b59b18c",
"size": "1365",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "chartmogul/api/activity.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "141728"
}
],
"symlink_target": ""
}
|
"""
Generating and counting primes.
"""
import random
from bisect import bisect
from primetest import isprime
from sympy.core.numbers import integer_nthroot
# Using arrays for sieving instead of lists greatly reduces
# memory consumption
from array import array as _array
def _arange(a, b):
ar = _array('l', [0]*(b-a))
for i, e in enumerate(xrange(a, b)):
ar[i] = e
return ar
class Sieve:
"""An infinite list of prime numbers, implemented as a dynamically
growing sieve of Eratosthenes. When a lookup is requested involving
a number that has not been sieved, the sieve is automatically
extended up to that number."""
_list = _array('l', [2, 3, 5, 7, 11, 13])
def __repr__(self):
return "<Sieve with %i primes sieved: 2, 3, 5, ... %i, %i>" % \
(len(self._list), self._list[-2], self._list[-1])
def extend(self, N):
"""Grow the sieve to cover all numbers <= N."""
if N <= self._list[-1]:
return
# We need to sieve against all bases up to sqrt(n). If there
# are too few, extend the list recursively.
maxbase = int(N**0.5)+1
self.extend(maxbase)
# Create a new sieve starting from N**0.5
begin = self._list[-1] + 1
newsieve = _arange(begin, N+1)
# Now eliminate all multiples of primes in [2, N**0.5]
for p in self.primerange(2, maxbase):
# Start counting at a multiple of p, offsetting
# the index to account for the new sieve's base index
startindex = (-begin) % p
for i in xrange(startindex, len(newsieve), p):
newsieve[i] = 0
# Merge the sieves
self._list += _array('l', [x for x in newsieve if x])
def extend_to_no(self, n):
"""Extend to include (at least) the nth prime number"""
while len(self._list) < n:
self.extend(int(self._list[-1] * 1.5))
def primerange(self, a, b):
"""Generate all prime numbers in the range [a, b)."""
assert a <= b
if b < 2:
return
a = max(2, a)
self.extend(b)
i = self.search(a)[1]
maxi = len(self._list) + 1
while i < maxi:
p = self._list[i-1]
if p < b:
yield p
i += 1
else:
return
def search(self, n):
"""For n >= 2, return the tightest a, b such that
self[a] <= n <= self[b]"""
assert n >= 2
if n > self._list[-1]:
self.extend(n)
b = bisect(self._list, n)
if self._list[b-1] == n:
return b, b
else:
return b, b+1
def __contains__(self, n):
if n < 2:
return False
a, b = self.search(n)
return a == b
def __getitem__(self, n):
"""Return the nth prime number"""
self.extend_to_no(n)
return self._list[n-1]
# Generate a global object for repeated use in trial division etc
sieve = Sieve()
def prime(n):
""" Return the nth prime, with the primes indexed as prime(1) = 2,
prime(2) = 3, etc.... The nth prime is approximately n*log(n) and
can never be larger than 2**n.
Reference: http://primes.utm.edu/glossary/xpage/BertrandsPostulate.html
"""
assert n > 0
return sieve[n]
def primepi(n):
""" Return the value of the prime counting function pi(n) = the number
of prime numbers less than or equal to n. The number n need not
necessarily be an integer.
"""
if n < 2:
return 0
else:
n = int(n)
return sieve.search(n)[0]
def nextprime(n, i=1):
""" Return the ith prime greater than n.
Potential primes are located at 6*j +/- 1.
>>> from sympy import nextprime
>>> [(i, nextprime(i)) for i in range(10, 15)]
[(10, 11), (11, 13), (12, 13), (13, 17), (14, 17)]
>>> nextprime(2, i=2) # the 2nd prime after 2
5
"""
if i > 1:
pr = n
j = 1
while 1:
pr = nextprime(pr)
j += 1
if j > i:
break
return pr
n = int(n)
if n < 2:
return 2
if n < 7:
return {2: 3, 3: 5, 4: 5, 5: 7, 6: 7}[n]
nn = 6*(n//6)
if nn == n:
n += 1
if isprime(n):
return n
n += 4
elif n - nn == 5:
n += 2
if isprime(n):
return n
n += 4
else:
n = nn + 5
while 1:
if isprime(n):
return n
n += 2
if isprime(n):
return n
n += 4
def prevprime(n):
""" Return the largest prime smaller than n.
Potential primes are located at 6*j +/- 1.
>>> from sympy import prevprime
>>> [(i, prevprime(i)) for i in range(10, 15)]
[(10, 7), (11, 7), (12, 11), (13, 11), (14, 13)]
"""
n = int(n)
if n < 3:
raise ValueError("no preceding primes")
if n < 8:
return {3: 2, 4: 3, 5: 3, 6: 5, 7: 5}[n]
nn = 6*(n//6)
if n - nn <= 1:
n = nn - 1
if isprime(n):
return n
n -= 4
else:
n = nn + 1
while 1:
if isprime(n):
return n
n -= 2
if isprime(n):
return n
n -= 4
def primerange(a, b):
""" Generate a list of all prime numbers in the range [a, b).
Some famous conjectures about the occurence of primes in a given
range are [1]:
- Twin primes: though often not, the following will give 2 primes an oo
number of times:
primerange(6*n - 1, 6*n + 2)
- Legendre's: the following always yields at least one prime
primerange(n**2, (n+1)**2+1)
- Bertrand's (proven): there is always a prime in the range
primerange(n, 2*n)
- Brocard's: there are at least four primes in the range
primerange(prime(n)**2, prime(n+1)**2)
The average gap between primes is log(n) [2];
the gap between primes can be arbitrarily large since sequences of
composite numbers are arbitrarily large, e.g. the numbers in the sequence
n!+2, n!+3 ... n!+n are all composite.
References:
[1] http://en.wikipedia.org/wiki/Prime_number
[2] http://primes.utm.edu/notes/gaps.html
"""
assert a <= b
a -= 1
while 1:
a = nextprime(a)
if a < b:
yield a
else:
return
def randprime(a, b):
""" Return a random prime number in the range [a, b).
Bertrand's postulate assures that
randprime(a, 2*a) will always succeed for a > 1.
Reference: http://en.wikipedia.org/wiki/Bertrand's_postulate
"""
n = random.randint(a-1, b)
p = nextprime(n)
if p >= b:
p = prevprime(b)
if p < a:
raise ValueError("no primes exist in the specified range")
return p
def primorial(n, nth=True):
""" Returns the product of either a) the first n primes (default) or
b) the primes less than or equal to n (when `nth`=False).
>>> from sympy.ntheory.generate import primorial, randprime, primerange
>>> from sympy import factorint, Mul, primefactors
>>> primorial(4) # the first 4 primes are 2, 3, 5, 7
210
>>> primorial(4, nth=0) # primes <= 4 are 2 and 3
6
>>> primorial(1)
2
>>> primorial(1, nth=0)
1
One can argue that the primes are infinite since if you take
a set of primes and multiply them together (e.g. the primorial) and
then add or subtract 1, the result cannot be divided by any of the
original factors, hence either 1 or more primes must divide this
product of primes.
>>> factorint(primorial(4) + 1)
{211: 1}
>>> factorint(primorial(4) - 1)
{11: 1, 19: 1}
>>> p = list(primerange(10, 20))
>>> sorted(set(primefactors(Mul(*p) + 1)).difference(set(p)))
[2, 5, 31, 149]
"""
if n < 1:
raise ValueError("primorial argument must be >= 1")
p = 1
if nth:
for i in range(1, n + 1):
p *= prime(i)
else:
for i in primerange(2, n + 1):
p *= i
return p
def cycle_length(f, x0, nmax=None, values=False):
"""For a given iterated sequence, return a generator that gives
the length of the iterated cycle (lambda) and the length of terms
before the cycle begins (mu); if ``values`` is True then the
terms of the sequence will be returned instead.
Note: more than the first lambda + mu terms may be returned and this
is the cost of cycle detection with Brent's method; there are, however,
generally less terms calculated than would have been calculated if the
proper ending point were determined, e.g. by using Floyd's method.
>>> from sympy.ntheory.generate import cycle_length
>>> from random import Random
This will yield successive values of i <-- func(i):
>>> def iter(func, i):
... while 1:
... ii = func(i)
... yield ii
... i = ii
...
A function is defined:
>>> func = lambda i: (i**2 + 1) % 51
and given a seed of 2 and the mu and lambda terms calculated:
>>> cycle_length(func, 4).next()
(6, 2)
We can see what is meant by looking at the output:
>>> n = cycle_length(func, 4, values=True)
>>> list(ni for ni in n)
[17, 35, 2, 5, 26, 14, 44, 50, 2, 5, 26, 14]
\_______________/
6 values after
the first 2
If a sequence is suspected of being longer than you might wish, ``nmax``
can be used to exit early (in which mu will be returned as None:
>>> cycle_length(func, 4, nmax = 4).next()
(4, None)
>>> [ni for ni in cycle_length(func, 4, nmax = 4, values=True)]
[17, 35, 2, 5]
Code modified from:
http://en.wikipedia.org/wiki/Cycle_detection.
"""
# main phase: search successive powers of two
power = lam = 1
tortoise, hare = x0, f(x0) # f(x0) is the element/node next to x0.
i = 0
while tortoise != hare and (not nmax or i < nmax):
i += 1
if power == lam: # time to start a new power of two?
tortoise = hare
power *= 2
lam = 0
if values:
yield hare
hare = f(hare)
lam += 1
if nmax and i == nmax:
if values:
return
else:
yield nmax, None
return
if not values:
# Find the position of the first repetition of length lambda
mu = 0
tortoise = hare = x0
for i in range(lam):
hare = f(hare)
while tortoise != hare:
tortoise = f(tortoise)
hare = f(hare)
mu += 1
if mu:
mu -= 1
yield lam, mu
|
{
"content_hash": "ee27eae6a38fc992d8a2e1008c726c52",
"timestamp": "",
"source": "github",
"line_count": 389,
"max_line_length": 81,
"avg_line_length": 28.632390745501286,
"alnum_prop": 0.5216376369186568,
"repo_name": "tarballs-are-good/sympy",
"id": "d489a7c083b1ae2738ba8d5f57ed3483703596b8",
"size": "11138",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "sympy/ntheory/generate.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from assopy import models as amodels
from conference import models as cmodels
from p3 import models
class Command(BaseCommand):
"""
"""
@transaction.atomic
def handle(self, *args, **options):
for u in amodels.AssopyUser.objects.all().select_related('user'):
print(u.name())
try:
profile = cmodels.AttendeeProfile.objects.get(user=u.user)
except cmodels.AttendeeProfile.DoesNotExist:
profile = cmodels.AttendeeProfile.objects.getOrCreateForUser(u.user)
if u.photo:
try:
profile.image.save(u.photo.name, u.photo.file, save=False)
except IOError:
pass
profile.birthday = u.birthday
profile.phone = u.phone
profile.personal_homepage = u.www
profile.save()
try:
p3p = profile.p3_profile
except models.P3Profile.DoesNotExist:
p3p = models.P3Profile(profile=profile)
p3p.twitter = u.twitter
if not u.photo:
url = u.photo_url()
if 'gravatar.com' in url:
p3p.image_gravatar = True
else:
p3p.image_url = url
p3p.save()
|
{
"content_hash": "a4bc8be92dee199b5ddc1cd12c820cf2",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 84,
"avg_line_length": 34.58536585365854,
"alnum_prop": 0.5557122708039492,
"repo_name": "EuroPython/epcon",
"id": "7828b7fe4c60f3b84410f998d39f99e5e5d94e8f",
"size": "1419",
"binary": false,
"copies": "1",
"ref": "refs/heads/ep2021",
"path": "p3/management/commands/sync_profiles.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "6475"
},
{
"name": "Dockerfile",
"bytes": "609"
},
{
"name": "HTML",
"bytes": "412025"
},
{
"name": "JavaScript",
"bytes": "421281"
},
{
"name": "Makefile",
"bytes": "4679"
},
{
"name": "Python",
"bytes": "991334"
},
{
"name": "Shell",
"bytes": "1182"
}
],
"symlink_target": ""
}
|
"""
@file shuffle_lines.py
@brief shuffle lines in a file
@author ChenglongChen
"""
import sys
import csv
import random
import numpy as np
from string import atoi
def main():
# collect argvs
seed = atoi(sys.argv[1])
file_in = sys.argv[2]
file_out = sys.argv[3]
# read
with open(file_in) as in_:
lines = in_.readlines()
# shuffle
random.seed(seed)
random.shuffle(lines)
# write
with open(file_out, "w") as out_:
for line in lines:
out_.write(line)
if __name__ == "__main__":
main()
|
{
"content_hash": "33756e915e6cc1c130132eca2c02af96",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 37,
"avg_line_length": 16.142857142857142,
"alnum_prop": 0.5893805309734513,
"repo_name": "ChenglongChen/caffe-windows",
"id": "469fd11903e9dfa96cea103ec2b58539f1b49146",
"size": "588",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/kaggle-bowl/caffe_windows/utils/shuffle_lines.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "333"
},
{
"name": "C",
"bytes": "200"
},
{
"name": "C++",
"bytes": "2172671"
},
{
"name": "CMake",
"bytes": "8635"
},
{
"name": "Cuda",
"bytes": "125124"
},
{
"name": "Matlab",
"bytes": "9721"
},
{
"name": "Protocol Buffer",
"bytes": "34168"
},
{
"name": "Python",
"bytes": "210562"
},
{
"name": "Shell",
"bytes": "1920"
}
],
"symlink_target": ""
}
|
"""Blueprint for the /packages endpoint.
If a package is *not* in the ``PRIVATE_EGGS`` set, pryvate will redirect
the client to another CheeseShop. However if a package *is* in
``PRIVATE_EGGS`` but the ``filename`` is not available, it will return
a ``404`` response, otherwise it will return a ``200`` response with the
contents of ``filename``
"""
|
{
"content_hash": "400862c3a65f16e0180ce11fbdacdac7",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 72,
"avg_line_length": 43.875,
"alnum_prop": 0.7236467236467237,
"repo_name": "Dinoshauer/pryvate",
"id": "ecee2806e4660f774c85f7c1645f7fa976141123",
"size": "351",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pryvate/blueprints/packages/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "386"
},
{
"name": "Python",
"bytes": "17807"
}
],
"symlink_target": ""
}
|
"""
A reader for the simple .pry config file format. The format is as follows:
base = basedirectory
coverage = coveragedirectory
exclude = newline separated
paths excluded
from coverage
The special _magic flag is needed to allow pry to run coverage analysis on
itsef.
"""
import ConfigParser, cStringIO, os.path
class Config:
_valid = set(["base", "coverage", "exclude", "_magic"])
def __init__(self, path):
self.path = path
if os.path.isfile(path):
self.c = ConfigParser.SafeConfigParser()
data = open(path).read()
# You know, Python is my favourite language and all, but its
# standard library can be utterly moronic
io = cStringIO.StringIO()
io.write("[pry]\n")
io.write(data)
io.reset()
self.c.readfp(io)
options = set(self.c.options("pry"))
if not self._valid.issuperset(options):
bad = options - self._valid
bad = ",".join(list(bad))
raise ValueError, "Unknown options in config file: %s"%bad
items = dict(self.c.items("pry"))
self.base = items.get("base", "..").strip()
self.coverage = items.get("coverage", "..").strip()
if items.has_key("_magic"):
self._magic = True
else:
self._magic = False
ex = items.get("exclude", ".")
ex = ex.split("\n")
self.exclude = [i.strip() for i in ex]
else:
self.base = ".."
self.coverage = ".."
self.exclude = ["."]
self._magic = False
|
{
"content_hash": "dec8c1c49a8c8059086b71856006949c",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 79,
"avg_line_length": 36.395833333333336,
"alnum_prop": 0.5082999427590155,
"repo_name": "cortesi/pry",
"id": "495fcd13a12a2dbc5545f358fa8facc11117f764",
"size": "1747",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "libpry/config.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "104838"
},
{
"name": "Shell",
"bytes": "321"
}
],
"symlink_target": ""
}
|
import random
from django import template
register = template.Library()
def _lower_and_upper(a, b):
a = a or 0
if a and not b:
b = 0
elif not (a or b):
b = 1
return min(a, b), max(a, b)
@register.simple_tag()
def rand_float(a=None, b=None):
lower, upper = _lower_and_upper(a, b)
return lower + (upper - lower) * random.random()
@register.simple_tag()
def rand_int(a=None, b=None):
return random.randint(*_lower_and_upper(a, b))
@register.simple_tag()
def rand_from(*args):
if not args:
raise ValueError("At least one argument must be given to chose from")
return random.choice(args)
|
{
"content_hash": "c715b72b85acb1beb79a35290e3fd80e",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 77,
"avg_line_length": 19.264705882352942,
"alnum_prop": 0.6244274809160305,
"repo_name": "OmegaDroid/gtfo",
"id": "c6f3c02cea2a7db4a125c33d8368d58873d05458",
"size": "655",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gtfo/gtfo_filters/templatetags/random.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "6763"
},
{
"name": "Python",
"bytes": "26978"
},
{
"name": "Shell",
"bytes": "6706"
}
],
"symlink_target": ""
}
|
logger.info("Loading 2 objects to table teams_team...")
# fields: id, ref, name
loader.save(create_teams_team(1,u'E',['Eupen', '', '']))
loader.save(create_teams_team(2,u'S',['St. Vith', '', '']))
loader.flush_deferred_objects()
|
{
"content_hash": "e26d669bddadbaa0dfdc038534576388",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 59,
"avg_line_length": 38.333333333333336,
"alnum_prop": 0.6478260869565218,
"repo_name": "lino-framework/book",
"id": "e3d2d31af2866296e853ea6765cf5e65fe6a2a6c",
"size": "254",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lino_book/projects/lydia/tests/dumps/18.12.0/teams_team.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "HTML",
"bytes": "3668"
},
{
"name": "JavaScript",
"bytes": "7140"
},
{
"name": "Python",
"bytes": "991438"
},
{
"name": "Shell",
"bytes": "989"
}
],
"symlink_target": ""
}
|
from django import forms
import app.models
from django.forms.util import ErrorList
class AppForm(forms.Form):
name = forms.CharField(max_length=100,
help_text=u'Enter a name for the App')
description = forms.CharField(max_length=200,
help_text=u'Enter a description',
widget=forms.Textarea)
author = forms.CharField(max_length=200,
help_text="Author for the App")
version = forms.IntegerField()
file = forms.FileField()
|
{
"content_hash": "615088babe4a3452260939b9c7877e64",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 49,
"avg_line_length": 30.866666666666667,
"alnum_prop": 0.7084233261339092,
"repo_name": "creativepsyco/panex-web",
"id": "442084130c86e4f4bd096af5040e1e5dfa5a050e",
"size": "463",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "58516"
},
{
"name": "Python",
"bytes": "29003"
}
],
"symlink_target": ""
}
|
"""
Support for exposing a templated binary sensor.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/binary_sensor.template/
"""
import logging
import voluptuous as vol
from homeassistant.core import callback
from homeassistant.components.binary_sensor import (
BinarySensorDevice, ENTITY_ID_FORMAT, PLATFORM_SCHEMA,
DEVICE_CLASSES_SCHEMA)
from homeassistant.const import (
ATTR_FRIENDLY_NAME, ATTR_ENTITY_ID, CONF_VALUE_TEMPLATE,
CONF_ICON_TEMPLATE, CONF_ENTITY_PICTURE_TEMPLATE,
CONF_SENSORS, CONF_DEVICE_CLASS, EVENT_HOMEASSISTANT_START, MATCH_ALL)
from homeassistant.exceptions import TemplateError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import async_generate_entity_id
from homeassistant.helpers.event import (
async_track_state_change, async_track_same_state)
_LOGGER = logging.getLogger(__name__)
CONF_DELAY_ON = 'delay_on'
CONF_DELAY_OFF = 'delay_off'
SENSOR_SCHEMA = vol.Schema({
vol.Required(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_ICON_TEMPLATE): cv.template,
vol.Optional(CONF_ENTITY_PICTURE_TEMPLATE): cv.template,
vol.Optional(ATTR_FRIENDLY_NAME): cv.string,
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_DELAY_ON):
vol.All(cv.time_period, cv.positive_timedelta),
vol.Optional(CONF_DELAY_OFF):
vol.All(cv.time_period, cv.positive_timedelta),
})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_SENSORS): cv.schema_with_slug_keys(SENSOR_SCHEMA),
})
async def async_setup_platform(hass, config, async_add_entities,
discovery_info=None):
"""Set up template binary sensors."""
sensors = []
for device, device_config in config[CONF_SENSORS].items():
value_template = device_config[CONF_VALUE_TEMPLATE]
icon_template = device_config.get(CONF_ICON_TEMPLATE)
entity_picture_template = device_config.get(
CONF_ENTITY_PICTURE_TEMPLATE)
entity_ids = set()
manual_entity_ids = device_config.get(ATTR_ENTITY_ID)
invalid_templates = []
for tpl_name, template in (
(CONF_VALUE_TEMPLATE, value_template),
(CONF_ICON_TEMPLATE, icon_template),
(CONF_ENTITY_PICTURE_TEMPLATE, entity_picture_template),
):
if template is None:
continue
template.hass = hass
if manual_entity_ids is not None:
continue
template_entity_ids = template.extract_entities()
if template_entity_ids == MATCH_ALL:
entity_ids = MATCH_ALL
# Cut off _template from name
invalid_templates.append(tpl_name[:-9])
elif entity_ids != MATCH_ALL:
entity_ids |= set(template_entity_ids)
if manual_entity_ids is not None:
entity_ids = manual_entity_ids
elif entity_ids != MATCH_ALL:
entity_ids = list(entity_ids)
if invalid_templates:
_LOGGER.warning(
'Template binary sensor %s has no entity ids configured to'
' track nor were we able to extract the entities to track'
' from the %s template(s). This entity will only be able'
' to be updated manually.',
device, ', '.join(invalid_templates))
friendly_name = device_config.get(ATTR_FRIENDLY_NAME, device)
device_class = device_config.get(CONF_DEVICE_CLASS)
delay_on = device_config.get(CONF_DELAY_ON)
delay_off = device_config.get(CONF_DELAY_OFF)
sensors.append(
BinarySensorTemplate(
hass, device, friendly_name, device_class, value_template,
icon_template, entity_picture_template, entity_ids,
delay_on, delay_off)
)
if not sensors:
_LOGGER.error("No sensors added")
return False
async_add_entities(sensors)
return True
class BinarySensorTemplate(BinarySensorDevice):
"""A virtual binary sensor that triggers from another sensor."""
def __init__(self, hass, device, friendly_name, device_class,
value_template, icon_template, entity_picture_template,
entity_ids, delay_on, delay_off):
"""Initialize the Template binary sensor."""
self.hass = hass
self.entity_id = async_generate_entity_id(
ENTITY_ID_FORMAT, device, hass=hass)
self._name = friendly_name
self._device_class = device_class
self._template = value_template
self._state = None
self._icon_template = icon_template
self._entity_picture_template = entity_picture_template
self._icon = None
self._entity_picture = None
self._entities = entity_ids
self._delay_on = delay_on
self._delay_off = delay_off
async def async_added_to_hass(self):
"""Register callbacks."""
@callback
def template_bsensor_state_listener(entity, old_state, new_state):
"""Handle the target device state changes."""
self.async_check_state()
@callback
def template_bsensor_startup(event):
"""Update template on startup."""
if self._entities != MATCH_ALL:
# Track state change only for valid templates
async_track_state_change(
self.hass, self._entities, template_bsensor_state_listener)
self.async_check_state()
self.hass.bus.async_listen_once(
EVENT_HOMEASSISTANT_START, template_bsensor_startup)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return self._icon
@property
def entity_picture(self):
"""Return the entity_picture to use in the frontend, if any."""
return self._entity_picture
@property
def is_on(self):
"""Return true if sensor is on."""
return self._state
@property
def device_class(self):
"""Return the sensor class of the sensor."""
return self._device_class
@property
def should_poll(self):
"""No polling needed."""
return False
@callback
def _async_render(self):
"""Get the state of template."""
state = None
try:
state = (self._template.async_render().lower() == 'true')
except TemplateError as ex:
if ex.args and ex.args[0].startswith(
"UndefinedError: 'None' has no attribute"):
# Common during HA startup - so just a warning
_LOGGER.warning("Could not render template %s, "
"the state is unknown", self._name)
return
_LOGGER.error("Could not render template %s: %s", self._name, ex)
for property_name, template in (
('_icon', self._icon_template),
('_entity_picture', self._entity_picture_template)):
if template is None:
continue
try:
setattr(self, property_name, template.async_render())
except TemplateError as ex:
friendly_property_name = property_name[1:].replace('_', ' ')
if ex.args and ex.args[0].startswith(
"UndefinedError: 'None' has no attribute"):
# Common during HA startup - so just a warning
_LOGGER.warning('Could not render %s template %s,'
' the state is unknown.',
friendly_property_name, self._name)
else:
_LOGGER.error('Could not render %s template %s: %s',
friendly_property_name, self._name, ex)
return state
return state
@callback
def async_check_state(self):
"""Update the state from the template."""
state = self._async_render()
# return if the state don't change or is invalid
if state is None or state == self.state:
return
@callback
def set_state():
"""Set state of template binary sensor."""
self._state = state
self.async_schedule_update_ha_state()
# state without delay
if (state and not self._delay_on) or \
(not state and not self._delay_off):
set_state()
return
period = self._delay_on if state else self._delay_off
async_track_same_state(
self.hass, period, set_state, entity_ids=self._entities,
async_check_same_func=lambda *args: self._async_render() == state)
async def async_update(self):
"""Force update of the state from the template."""
self.async_check_state()
|
{
"content_hash": "89a1736d9701f14a628be8ef1c436026",
"timestamp": "",
"source": "github",
"line_count": 253,
"max_line_length": 79,
"avg_line_length": 36.26482213438735,
"alnum_prop": 0.5937874659400545,
"repo_name": "nugget/home-assistant",
"id": "605ab24a26417091eec01ce5e09294025f71a0b8",
"size": "9175",
"binary": false,
"copies": "4",
"ref": "refs/heads/dev",
"path": "homeassistant/components/binary_sensor/template.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1175"
},
{
"name": "Dockerfile",
"bytes": "1081"
},
{
"name": "HCL",
"bytes": "826"
},
{
"name": "Python",
"bytes": "14492390"
},
{
"name": "Ruby",
"bytes": "745"
},
{
"name": "Shell",
"bytes": "17526"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('home', '0006_auto_20161101_0622'),
]
operations = [
migrations.AlterField(
model_name='movie',
name='actors',
field=models.ManyToManyField(blank=True, to='home.Actor'),
),
migrations.AlterField(
model_name='movie',
name='director',
field=models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, to='home.Director'),
),
migrations.AlterField(
model_name='movie',
name='genre',
field=models.CharField(blank=True, choices=[('action', 'ACTION'), ('comedy', 'COMEDY'), ('romance', 'ROMANCE'), ('thriller', 'THRILLER'), ('drama', 'DRAMA')], default='drama', max_length=30),
),
migrations.AlterField(
model_name='movie',
name='release_date',
field=models.DateField(blank=True),
),
]
|
{
"content_hash": "ab2ecb2f57efae734ab56dc3de0046d6",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 203,
"avg_line_length": 32.44117647058823,
"alnum_prop": 0.5738893925657298,
"repo_name": "huzaifafaruqui/Movies-Website",
"id": "8facb5f9eab42b53da86e36cda38c4e62676433d",
"size": "1173",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lmdb/home/migrations/0007_auto_20161101_0623.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "60303"
},
{
"name": "HTML",
"bytes": "34536"
},
{
"name": "JavaScript",
"bytes": "96495"
},
{
"name": "Python",
"bytes": "23213"
}
],
"symlink_target": ""
}
|
import variadic, go
############### Non Variadic ##############
nonvarResult = variadic.NonVariFunc(1, go.Slice_int([2,3,4]),5)
print("NonVariadic 1+[2+3+4]+5 = %d" % nonvarResult)
############### Variadic Over Int ##############
varResult = variadic.VariFunc(1,2,3,4,5)
print("Variadic 1+2+3+4+5 = %d" % varResult)
############### Variadic Over Struct ##############
varStructResult = variadic.VariStructFunc(variadic.NewIntStrUct(1), variadic.NewIntStrUct(2), variadic.NewIntStrUct(3))
print("Variadic Struct s(1)+s(2)+s(3) = %d" % varStructResult)
############### Variadic Over InterFace ##############
varInterFaceResult = variadic.VariInterFaceFunc(variadic.NewIntStrUct(1), variadic.NewIntStrUct(2), variadic.NewIntStrUct(3))
print("Variadic InterFace i(1)+i(2)+i(3) = %d" % varInterFaceResult)
############### Final ##############
if isinstance(varResult, int):
print("Type OK")
else:
print("Type Not OK")
|
{
"content_hash": "018ad05f59117dd3ce47ac9b6a83b40e",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 125,
"avg_line_length": 40,
"alnum_prop": 0.6206521739130435,
"repo_name": "go-python/gopy",
"id": "b53f80f4b31eced624ac0b25bfcdbd5b138d5045",
"size": "1084",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_examples/variadic/test.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "119"
},
{
"name": "Go",
"bytes": "261530"
},
{
"name": "Makefile",
"bytes": "1839"
},
{
"name": "Python",
"bytes": "34989"
}
],
"symlink_target": ""
}
|
"""
SoftLayer.tests.CLI.modules.vs.vs_create_tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:license: MIT, see LICENSE for more details.
"""
import sys
import tempfile
from unittest import mock as mock
from SoftLayer.fixtures import SoftLayer_Product_Package as SoftLayer_Product_Package
from SoftLayer import testing
class VirtCreateTests(testing.TestCase):
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create(self, confirm_mock):
confirm_mock.return_value = True
result = self.run_command(['vs', 'create',
'--cpu=2',
'--domain=example.com',
'--hostname=host',
'--os=UBUNTU_LATEST',
'--memory=1',
'--network=100',
'--billing=hourly',
'--datacenter=dal05',
'--tag=dev',
'--tag=green'])
self.assert_no_fail(result)
self.assertIn('"guid": "1a2b3c-1701"', result.output)
self.assert_called_with('SoftLayer_Product_Order', 'placeOrder')
args = ({'datacenter': {'name': 'dal05'},
'domain': 'example.com',
'hourlyBillingFlag': True,
'localDiskFlag': True,
'maxMemory': 1024,
'hostname': 'host',
'startCpus': 2,
'operatingSystemReferenceCode': 'UBUNTU_LATEST',
'networkComponents': [{'maxSpeed': 100}],
'supplementalCreateObjectOptions': {'bootMode': None}},)
self.assert_called_with('SoftLayer_Virtual_Guest', 'generateOrderTemplate', args=args)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_vlan_subnet(self, confirm_mock):
confirm_mock.return_value = True
result = self.run_command(['vs', 'create',
'--cpu=2',
'--domain=example.com',
'--hostname=host',
'--os=UBUNTU_LATEST',
'--memory=1',
'--billing=hourly',
'--datacenter=dal05',
'--vlan-private=577940',
'--subnet-private=478700',
'--vlan-public=1639255',
'--subnet-public=297614',
'--tag=dev',
'--tag=green'])
self.assert_no_fail(result)
self.assertIn('"guid": "1a2b3c-1701"', result.output)
self.assert_called_with('SoftLayer_Product_Order', 'placeOrder')
args = ({
'startCpus': 2,
'maxMemory': 1024,
'hostname': 'host',
'domain': 'example.com',
'localDiskFlag': True,
'hourlyBillingFlag': True,
'supplementalCreateObjectOptions': {'bootMode': None},
'operatingSystemReferenceCode': 'UBUNTU_LATEST',
'datacenter': {'name': 'dal05'},
'primaryBackendNetworkComponent': {
'networkVlan': {
'id': 577940,
'primarySubnet': {'id': 478700}
}
},
'primaryNetworkComponent': {
'networkVlan': {
'id': 1639255,
'primarySubnet': {'id': 297614}
}
}
},)
self.assert_called_with('SoftLayer_Virtual_Guest', 'generateOrderTemplate', args=args)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_by_router(self, confirm_mock):
confirm_mock.return_value = True
result = self.run_command(['vs', 'create',
'--cpu=2',
'--domain=example.com',
'--hostname=host',
'--os=UBUNTU_LATEST',
'--memory=1',
'--billing=hourly',
'--datacenter=dal05',
'--router-private=577940',
'--router-public=1639255',
'--tag=dev',
'--tag=green'])
self.assert_no_fail(result)
self.assert_called_with('SoftLayer_Product_Order', 'placeOrder')
args = ({
'startCpus': 2,
'maxMemory': 1024,
'hostname': 'host',
'domain': 'example.com',
'localDiskFlag': True,
'hourlyBillingFlag': True,
'supplementalCreateObjectOptions': {'bootMode': None},
'operatingSystemReferenceCode': 'UBUNTU_LATEST',
'datacenter': {'name': 'dal05'},
'primaryBackendNetworkComponent': {
'router': {
'id': 577940
}
},
'primaryNetworkComponent': {
'router': {
'id': 1639255
}
}
},)
self.assert_called_with('SoftLayer_Virtual_Guest', 'generateOrderTemplate', args=args)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_with_wait_ready(self, confirm_mock):
mock = self.set_mock('SoftLayer_Virtual_Guest', 'getObject')
mock.return_value = {
"provisionDate": "2018-06-10T12:00:00-05:00",
"id": 100
}
confirm_mock.return_value = True
result = self.run_command(['vs', 'create',
'--cpu=2',
'--domain=example.com',
'--hostname=host',
'--os=UBUNTU_LATEST',
'--memory=1',
'--network=100',
'--billing=hourly',
'--datacenter=dal05',
'--wait=1'])
self.assert_no_fail(result)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_with_wait_not_ready(self, confirm_mock):
mock = self.set_mock('SoftLayer_Virtual_Guest', 'getObject')
mock.return_value = {
"ready": False,
"guid": "1a2b3c-1701",
"id": 100,
"created": "2018-06-10 12:00:00"
}
confirm_mock.return_value = True
result = self.run_command(['vs', 'create',
'--cpu=2',
'--domain=example.com',
'--hostname=host',
'--os=UBUNTU_LATEST',
'--memory=1',
'--network=100',
'--billing=hourly',
'--datacenter=dal05',
'--wait=1'])
self.assertEqual(result.exit_code, 1)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_with_integer_image_id(self, confirm_mock):
confirm_mock.return_value = True
result = self.run_command(['vs', 'create',
'--cpu=2',
'--domain=example.com',
'--hostname=host',
'--image=12345',
'--memory=1',
'--network=100',
'--billing=hourly',
'--datacenter=dal05'])
self.assert_no_fail(result)
self.assertIn('"guid": "1a2b3c-1701"', result.output)
self.assert_called_with('SoftLayer_Product_Order', 'placeOrder')
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_with_integer_image_guid(self, confirm_mock):
confirm_mock.return_value = True
result = self.run_command(['vs', 'create',
'--cpu=2',
'--domain=example.com',
'--hostname=host',
'--image=aaaa1111bbbb2222',
'--memory=1',
'--network=100',
'--billing=hourly',
'--datacenter=dal05'])
self.assert_no_fail(result)
self.assertIn('"guid": "1a2b3c-1701"', result.output)
args = ({
'startCpus': 2,
'maxMemory': 1024,
'hostname': 'host',
'domain': 'example.com',
'localDiskFlag': True,
'hourlyBillingFlag': True,
'supplementalCreateObjectOptions': {'bootMode': None},
'blockDeviceTemplateGroup': {'globalIdentifier': 'aaaa1111bbbb2222'},
'datacenter': {'name': 'dal05'},
'networkComponents': [{'maxSpeed': 100}]
},)
self.assert_called_with('SoftLayer_Virtual_Guest', 'generateOrderTemplate', args=args)
self.assert_called_with('SoftLayer_Product_Order', 'placeOrder')
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_with_flavor(self, confirm_mock):
confirm_mock.return_value = True
result = self.run_command(['vs', 'create',
'--domain=example.com',
'--hostname=host',
'--os=UBUNTU_LATEST',
'--network=100',
'--billing=hourly',
'--datacenter=dal05',
'--flavor=B1_1X2X25'])
self.assert_no_fail(result)
self.assertIn('"guid": "1a2b3c-1701"', result.output)
self.assert_called_with('SoftLayer_Product_Order', 'placeOrder')
args = ({'datacenter': {'name': 'dal05'},
'domain': 'example.com',
'hourlyBillingFlag': True,
'hostname': 'host',
'startCpus': None,
'maxMemory': None,
'localDiskFlag': None,
'supplementalCreateObjectOptions': {
'bootMode': None,
'flavorKeyName': 'B1_1X2X25'},
'operatingSystemReferenceCode': 'UBUNTU_LATEST',
'networkComponents': [{'maxSpeed': 100}]},)
self.assert_called_with('SoftLayer_Virtual_Guest', 'generateOrderTemplate', args=args)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_with_flavor_and_memory(self, confirm_mock):
confirm_mock.return_value = True
result = self.run_command(['vs', 'create',
'--domain=example.com',
'--hostname=host',
'--os=UBUNTU_LATEST',
'--network=100',
'--datacenter=TEST00',
'--flavor=BL_1X2X25',
'--memory=2048MB'])
self.assertEqual(result.exit_code, 2)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_with_dedicated_and_flavor(self, confirm_mock):
confirm_mock.return_value = True
result = self.run_command(['vs', 'create',
'--domain=example.com',
'--hostname=host',
'--os=UBUNTU_LATEST',
'--network=100',
'--datacenter=TEST00',
'--dedicated',
'--flavor=BL_1X2X25'])
self.assertEqual(result.exit_code, 2)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_with_hostid_and_flavor(self, confirm_mock):
confirm_mock.return_value = True
result = self.run_command(['vs', 'create',
'--domain=example.com',
'--hostname=host',
'--os=UBUNTU_LATEST',
'--network=100',
'--datacenter=dal05',
'--host-id=100',
'--flavor=BL_1X2X25'])
self.assertEqual(result.exit_code, 2)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_with_flavor_and_cpu(self, confirm_mock):
confirm_mock.return_value = True
result = self.run_command(['vs', 'create',
'--domain=example.com',
'--hostname=host',
'--os=UBUNTU_LATEST',
'--network=100',
'--datacenter=TEST00',
'--flavor=BL_1X2X25',
'--cpu=2'])
self.assertEqual(result.exit_code, 2)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_with_host_id(self, confirm_mock):
confirm_mock.return_value = True
result = self.run_command(['vs', 'create',
'--cpu=2',
'--domain=example.com',
'--hostname=host',
'--os=UBUNTU_LATEST',
'--memory=1',
'--network=100',
'--billing=hourly',
'--datacenter=dal05',
'--dedicated',
'--host-id=123'])
self.assert_no_fail(result)
self.assertIn('"guid": "1a2b3c-1701"', result.output)
# Argument testing Example
order_call = self.calls('SoftLayer_Product_Order', 'placeOrder')
order_args = getattr(order_call[0], 'args')[0]
self.assertEqual(123, order_args['hostId'])
self.assert_called_with('SoftLayer_Product_Order', 'placeOrder')
template_args = ({
'startCpus': 2,
'maxMemory': 1024,
'hostname': 'host',
'domain': 'example.com',
'localDiskFlag': True,
'hourlyBillingFlag': True,
'supplementalCreateObjectOptions': {'bootMode': None},
'dedicatedHost': {'id': 123},
'operatingSystemReferenceCode': 'UBUNTU_LATEST',
'datacenter': {'name': 'dal05'},
'networkComponents': [{'maxSpeed': 100}]
},)
self.assert_called_with('SoftLayer_Virtual_Guest', 'generateOrderTemplate', args=template_args)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_like(self, confirm_mock):
mock = self.set_mock('SoftLayer_Virtual_Guest', 'getObject')
mock.return_value = {
'hostname': 'vs-test-like',
'domain': 'test.sftlyr.ws',
'maxCpu': 2,
'maxMemory': 1024,
'datacenter': {'name': 'dal05'},
'networkComponents': [{'maxSpeed': 100}],
'dedicatedAccountHostOnlyFlag': False,
'privateNetworkOnlyFlag': False,
'billingItem': {'orderItem': {'preset': {}}},
'operatingSystem': {'softwareLicense': {
'softwareDescription': {'referenceCode': 'UBUNTU_LATEST'}
}},
'hourlyBillingFlag': False,
'localDiskFlag': True,
'userData': {}
}
confirm_mock.return_value = True
result = self.run_command(['vs', 'create',
'--like=123',
'--san',
'--billing=hourly'])
self.assert_no_fail(result)
self.assertIn('"guid": "1a2b3c-1701"', result.output)
self.assert_called_with('SoftLayer_Product_Order', 'placeOrder')
args = ({'datacenter': {'name': 'dal05'},
'domain': 'test.sftlyr.ws',
'hourlyBillingFlag': True,
'hostname': 'vs-test-like',
'startCpus': 2,
'maxMemory': 1024,
'localDiskFlag': False,
'operatingSystemReferenceCode': 'UBUNTU_LATEST',
'networkComponents': [{'maxSpeed': 100}],
'supplementalCreateObjectOptions': {'bootMode': None}},)
self.assert_called_with('SoftLayer_Virtual_Guest', 'generateOrderTemplate', args=args)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_like_tags(self, confirm_mock):
mock = self.set_mock('SoftLayer_Virtual_Guest', 'getObject')
mock.return_value = {
'hostname': 'vs-test-like',
'domain': 'test.sftlyr.ws',
'maxCpu': 2,
'maxMemory': 1024,
'datacenter': {'name': 'dal05'},
'networkComponents': [{'maxSpeed': 100}],
'dedicatedAccountHostOnlyFlag': False,
'privateNetworkOnlyFlag': False,
'billingItem': {'orderItem': {'preset': {}}},
'operatingSystem': {'softwareLicense': {
'softwareDescription': {'referenceCode': 'UBUNTU_LATEST'}
}},
'hourlyBillingFlag': False,
'localDiskFlag': True,
'userData': {},
'tagReferences': [{'tag': {'name': 'production'}}],
}
confirm_mock.return_value = True
result = self.run_command(['vs', 'create',
'--like=123',
'--san',
'--billing=hourly'])
self.assert_no_fail(result)
self.assertIn('"guid": "1a2b3c-1701"', result.output)
self.assert_called_with('SoftLayer_Product_Order', 'placeOrder')
_args = ('production',)
self.assert_called_with('SoftLayer_Virtual_Guest', 'setTags', identifier=1234567, args=_args)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_like_image(self, confirm_mock):
mock = self.set_mock('SoftLayer_Virtual_Guest', 'getObject')
mock.return_value = {
'hostname': 'vs-test-like',
'domain': 'test.sftlyr.ws',
'maxCpu': 2,
'maxMemory': 1024,
'datacenter': {'name': 'dal05'},
'networkComponents': [{'maxSpeed': 100}],
'dedicatedAccountHostOnlyFlag': False,
'privateNetworkOnlyFlag': False,
'billingItem': {'orderItem': {'preset': {}}},
'blockDeviceTemplateGroup': {'globalIdentifier': 'aaa1xxx1122233'},
'hourlyBillingFlag': False,
'localDiskFlag': True,
'userData': {},
}
confirm_mock.return_value = True
result = self.run_command(['vs', 'create',
'--like=123',
'--san',
'--billing=hourly'])
self.assert_no_fail(result)
self.assertIn('"guid": "1a2b3c-1701"', result.output)
self.assert_called_with('SoftLayer_Product_Order', 'placeOrder')
args = ({'datacenter': {'name': 'dal05'},
'domain': 'test.sftlyr.ws',
'hourlyBillingFlag': True,
'hostname': 'vs-test-like',
'startCpus': 2,
'maxMemory': 1024,
'localDiskFlag': False,
'blockDeviceTemplateGroup': {'globalIdentifier': 'aaa1xxx1122233'},
'networkComponents': [{'maxSpeed': 100}],
'supplementalCreateObjectOptions': {'bootMode': None}},)
self.assert_called_with('SoftLayer_Virtual_Guest', 'generateOrderTemplate', args=args)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_like_flavor(self, confirm_mock):
mock = self.set_mock('SoftLayer_Virtual_Guest', 'getObject')
mock.return_value = {
'hostname': 'vs-test-like',
'domain': 'test.sftlyr.ws',
'maxCpu': 2,
'maxMemory': 1024,
'datacenter': {'name': 'dal05'},
'networkComponents': [{'maxSpeed': 100}],
'dedicatedAccountHostOnlyFlag': False,
'privateNetworkOnlyFlag': False,
'billingItem': {'orderItem': {'preset': {'keyName': 'B1_1X2X25'}}},
'operatingSystem': {'softwareLicense': {
'softwareDescription': {'referenceCode': 'UBUNTU_LATEST'}
}},
'hourlyBillingFlag': True,
'localDiskFlag': False,
'userData': {}
}
confirm_mock.return_value = True
result = self.run_command(['vs', 'create', '--like=123'])
self.assert_no_fail(result)
self.assertIn('"guid": "1a2b3c-1701"', result.output)
self.assert_called_with('SoftLayer_Product_Order', 'placeOrder')
args = ({'datacenter': {'name': 'dal05'},
'domain': 'test.sftlyr.ws',
'hourlyBillingFlag': True,
'hostname': 'vs-test-like',
'startCpus': None,
'maxMemory': None,
'localDiskFlag': None,
'supplementalCreateObjectOptions': {
'bootMode': None,
'flavorKeyName': 'B1_1X2X25'},
'operatingSystemReferenceCode': 'UBUNTU_LATEST',
'networkComponents': [{'maxSpeed': 100}]},)
self.assert_called_with('SoftLayer_Virtual_Guest', 'generateOrderTemplate', args=args)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_like_transient(self, confirm_mock):
mock = self.set_mock('SoftLayer_Virtual_Guest', 'getObject')
mock.return_value = {
'hostname': 'vs-test-like',
'domain': 'test.sftlyr.ws',
'datacenter': {'name': 'dal05'},
'networkComponents': [{'maxSpeed': 100}],
'dedicatedAccountHostOnlyFlag': False,
'privateNetworkOnlyFlag': False,
'billingItem': {'orderItem': {'preset': {'keyName': 'B1_1X2X25'}}},
'operatingSystem': {'softwareLicense': {
'softwareDescription': {'referenceCode': 'UBUNTU_LATEST'}
}},
'hourlyBillingFlag': True,
'localDiskFlag': False,
'transientGuestFlag': True,
'userData': {}
}
confirm_mock.return_value = True
result = self.run_command(['vs', 'create', '--like=123'])
self.assert_no_fail(result)
self.assertIn('"guid": "1a2b3c-1701"', result.output)
self.assert_called_with('SoftLayer_Product_Order', 'placeOrder')
args = ({'datacenter': {'name': 'dal05'},
'domain': 'test.sftlyr.ws',
'hourlyBillingFlag': True,
'hostname': 'vs-test-like',
'startCpus': None,
'maxMemory': None,
'localDiskFlag': None,
'transientGuestFlag': True,
'supplementalCreateObjectOptions': {
'bootMode': None,
'flavorKeyName': 'B1_1X2X25'},
'operatingSystemReferenceCode': 'UBUNTU_LATEST',
'networkComponents': [{'maxSpeed': 100}]},)
self.assert_called_with('SoftLayer_Virtual_Guest', 'generateOrderTemplate', args=args)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_vs_test(self, confirm_mock):
confirm_mock.return_value = True
result = self.run_command(['vs', 'create', '--test', '--hostname', 'TEST',
'--domain', 'TESTING', '--cpu', '1',
'--memory', '2048MB', '--datacenter',
'TEST00', '--os', 'UBUNTU_LATEST'])
self.assertEqual(result.exit_code, 0)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_vs_flavor_test(self, confirm_mock):
confirm_mock.return_value = True
result = self.run_command(['vs', 'create', '--test', '--hostname', 'TEST',
'--domain', 'TESTING', '--flavor', 'B1_2X8X25',
'--datacenter', 'TEST00', '--os', 'UBUNTU_LATEST'])
self.assert_no_fail(result)
self.assertEqual(result.exit_code, 0)
def test_create_vs_bad_memory(self):
result = self.run_command(['vs', 'create', '--hostname', 'TEST',
'--domain', 'TESTING', '--cpu', '1',
'--memory', '2034MB', '--flavor',
'B1_2X8X25', '--datacenter', 'TEST00'])
self.assertEqual(2, result.exit_code)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_vs_transient(self, confirm_mock):
confirm_mock.return_value = True
result = self.run_command(['vs', 'create', '--hostname', 'TEST',
'--domain', 'TESTING', '--flavor',
'B1_2X8X25', '--datacenter', 'TEST00',
'--transient', '--os', 'UBUNTU_LATEST'])
self.assert_no_fail(result)
self.assertEqual(0, result.exit_code)
def test_create_vs_bad_transient_monthly(self):
result = self.run_command(['vs', 'create', '--hostname', 'TEST',
'--domain', 'TESTING', '--flavor',
'B1_2X8X25', '--datacenter', 'TEST00',
'--transient', '--billing', 'monthly',
'--os', 'UBUNTU_LATEST'])
self.assertEqual(2, result.exit_code)
def test_create_vs_bad_transient_dedicated(self):
result = self.run_command(['vs', 'create', '--hostname', 'TEST',
'--domain', 'TESTING', '--flavor',
'B1_2X8X25', '--datacenter', 'TEST00',
'--transient', '--dedicated',
'--os', 'UBUNTU_LATEST'])
self.assertEqual(2, result.exit_code)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_with_ipv6(self, confirm_mock):
amock = self.set_mock('SoftLayer_Product_Package', 'getItems')
amock.return_value = SoftLayer_Product_Package.getItems_1_IPV6_ADDRESS
result = self.run_command(['vs', 'create', '--test', '--hostname', 'TEST',
'--domain', 'TESTING', '--flavor', 'B1_2X8X25',
'--datacenter', 'TEST00', '--os', 'UBUNTU_LATEST', '--ipv6'])
self.assert_no_fail(result)
self.assertEqual(result.exit_code, 0)
self.assert_called_with('SoftLayer_Product_Order', 'verifyOrder')
args = ({
'startCpus': None,
'maxMemory': None,
'hostname': 'TEST',
'domain': 'TESTING',
'localDiskFlag': None,
'hourlyBillingFlag': True,
'supplementalCreateObjectOptions': {
'bootMode': None,
'flavorKeyName': 'B1_2X8X25'
},
'operatingSystemReferenceCode': 'UBUNTU_LATEST',
'datacenter': {
'name': 'TEST00'
}
},
)
self.assert_called_with('SoftLayer_Virtual_Guest', 'generateOrderTemplate', args=args)
self.assertEqual([], self.calls('SoftLayer_Virtual_Guest', 'setTags'))
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_with_ipv6_no_test(self, confirm_mock):
confirm_mock.return_value = True
amock = self.set_mock('SoftLayer_Product_Package', 'getItems')
amock.return_value = SoftLayer_Product_Package.getItems_1_IPV6_ADDRESS
result = self.run_command(['vs', 'create', '--hostname', 'TEST',
'--domain', 'TESTING', '--flavor', 'B1_2X8X25',
'--datacenter', 'TEST00', '--os', 'UBUNTU_LATEST', '--ipv6'])
self.assert_no_fail(result)
self.assertEqual(result.exit_code, 0)
self.assert_called_with('SoftLayer_Product_Order', 'placeOrder')
self.assertEqual([], self.calls('SoftLayer_Virtual_Guest', 'setTags'))
@mock.patch('SoftLayer.CLI.formatting.no_going_back')
def test_create_with_ipv6_no_prices(self, confirm_mock):
"""Test makes sure create fails if ipv6 price cannot be found.
Since its hard to test if the price ids gets added to placeOrder call,
this test juse makes sure that code block isn't being skipped
"""
confirm_mock.return_value = True
amock = self.set_mock('SoftLayer_Product_Package', 'getItems')
amock.return_value = SoftLayer_Product_Package.getItemsVS
result = self.run_command(['vs', 'create', '--test', '--hostname', 'TEST',
'--domain', 'TESTING', '--flavor', 'B1_2X8X25',
'--datacenter', 'TEST00', '--os', 'UBUNTU_LATEST',
'--ipv6'])
self.assertEqual(result.exit_code, 1)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_vs_no_confirm(self, confirm_mock):
confirm_mock.return_value = False
result = self.run_command(['vs', 'create', '--hostname', 'TEST',
'--domain', 'TESTING', '--flavor', 'B1_2X8X25',
'--datacenter', 'TEST00', '--os', 'UBUNTU_LATEST'])
self.assertEqual(result.exit_code, 2)
def test_create_vs_export(self):
if(sys.platform.startswith("win")):
self.skipTest("Test doesn't work in Windows")
with tempfile.NamedTemporaryFile() as config_file:
result = self.run_command(['vs', 'create', '--hostname', 'TEST', '--export', config_file.name,
'--domain', 'TESTING', '--flavor', 'B1_2X8X25',
'--datacenter', 'TEST00', '--os', 'UBUNTU_LATEST'])
self.assert_no_fail(result)
self.assertIn('Successfully exported options to a template file.', result.output)
contents = config_file.read().decode("utf-8")
self.assertIn('hostname=TEST', contents)
self.assertIn('flavor=B1_2X8X25', contents)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create_with_userdata(self, confirm_mock):
result = self.run_command(['vs', 'create', '--hostname', 'TEST', '--domain', 'TESTING',
'--flavor', 'B1_2X8X25', '--datacenter', 'TEST00', '--os', 'UBUNTU_LATEST',
'--userdata', 'This is my user data ok'])
self.assert_no_fail(result)
expected_guest = [
{
'domain': 'test.local',
'hostname': 'test',
'userData': [{'value': 'This is my user data ok'}]
}
]
# Returns a list of API calls that hit SL_Product_Order::placeOrder
api_call = self.calls('SoftLayer_Product_Order', 'placeOrder')
# Doing this because the placeOrder args are huge and mostly not needed to test
self.assertEqual(api_call[0].args[0]['virtualGuests'], expected_guest)
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_check_for_closing(self, confirm_mock):
confirm_mock.return_value = True
result = self.run_command(['vs', 'create', '--hostname', 'TEST', '--domain', 'TESTING',
'--flavor', 'B1_2X8X25', '--datacenter', 'ams01', '--os', 'UBUNTU_LATEST'])
self.assert_no_fail(result)
self.assertIn('Warning: Closed soon: ams01', result.output)
result = self.run_command(['vs', 'create', '--hostname', 'TEST', '--domain', 'TESTING',
'--flavor', 'B1_2X8X25', '--datacenter', 'mex01', '--os', 'UBUNTU_LATEST'])
self.assert_no_fail(result)
self.assertNotIn('Warning: Closed soon: mex01', result.output)
|
{
"content_hash": "0f8e1212b01bbbe586e2262720514919",
"timestamp": "",
"source": "github",
"line_count": 736,
"max_line_length": 110,
"avg_line_length": 44.650815217391305,
"alnum_prop": 0.4783495116088002,
"repo_name": "allmightyspiff/softlayer-python",
"id": "468bc5339299357e5ced8ac57c498f0cf04b6602",
"size": "32863",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/CLI/modules/vs/vs_create_tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "DIGITAL Command Language",
"bytes": "854"
},
{
"name": "Makefile",
"bytes": "7458"
},
{
"name": "Python",
"bytes": "2657752"
}
],
"symlink_target": ""
}
|
import sys
from neutronclient.common import exceptions
from neutronclient.neutron.v2_0.vpn import ipsec_site_connection
from neutronclient.tests.unit import test_cli20
class CLITestV20IPsecSiteConnectionJSON(test_cli20.CLITestV20Base):
# TODO(pcm): Remove, once peer-cidr is deprecated completely
def test_create_ipsec_site_connection_all_params_using_peer_cidrs(self):
# ipsecsite-connection-create all params using peer CIDRs.
resource = 'ipsec_site_connection'
cmd = ipsec_site_connection.CreateIPsecSiteConnection(
test_cli20.MyApp(sys.stdout), None
)
tenant_id = 'mytenant_id'
name = 'connection1'
my_id = 'my_id'
peer_address = '192.168.2.10'
peer_id = '192.168.2.10'
psk = 'abcd'
mtu = '1500'
initiator = 'bi-directional'
vpnservice_id = 'vpnservice_id'
ikepolicy_id = 'ikepolicy_id'
ipsecpolicy_id = 'ipsecpolicy_id'
peer_cidrs = ['192.168.3.0/24', '192.168.2.0/24']
admin_state = True
description = 'my-vpn-connection'
dpd = 'action=restart,interval=30,timeout=120'
args = ['--tenant-id', tenant_id,
'--peer-address', peer_address, '--peer-id', peer_id,
'--psk', psk, '--initiator', initiator,
'--vpnservice-id', vpnservice_id,
'--ikepolicy-id', ikepolicy_id, '--name', name,
'--ipsecpolicy-id', ipsecpolicy_id, '--mtu', mtu,
'--description', description,
'--peer-cidr', '192.168.3.0/24',
'--peer-cidr', '192.168.2.0/24',
'--dpd', dpd]
position_names = ['name', 'tenant_id', 'admin_state_up',
'peer_address', 'peer_id', 'peer_cidrs',
'psk', 'mtu', 'initiator', 'description',
'vpnservice_id', 'ikepolicy_id',
'ipsecpolicy_id']
position_values = [name, tenant_id, admin_state, peer_address,
peer_id, peer_cidrs, psk, mtu,
initiator, description,
vpnservice_id, ikepolicy_id, ipsecpolicy_id]
extra_body = {
'dpd': {
'action': 'restart',
'interval': 30,
'timeout': 120,
},
}
self._test_create_resource(resource, cmd, name, my_id, args,
position_names, position_values,
extra_body=extra_body)
def test_create_ipsec_site_conn_all_params(self):
# ipsecsite-connection-create all params using endpoint groups.
resource = 'ipsec_site_connection'
cmd = ipsec_site_connection.CreateIPsecSiteConnection(
test_cli20.MyApp(sys.stdout), None
)
tenant_id = 'mytenant_id'
name = 'connection1'
my_id = 'my_id'
peer_address = '192.168.2.10'
peer_id = '192.168.2.10'
psk = 'abcd'
mtu = '1500'
initiator = 'bi-directional'
vpnservice_id = 'vpnservice_id'
ikepolicy_id = 'ikepolicy_id'
ipsecpolicy_id = 'ipsecpolicy_id'
local_ep_group = 'local-epg'
peer_ep_group = 'peer-epg'
admin_state = True
description = 'my-vpn-connection'
dpd = 'action=restart,interval=30,timeout=120'
args = ['--tenant-id', tenant_id,
'--peer-address', peer_address, '--peer-id', peer_id,
'--psk', psk, '--initiator', initiator,
'--vpnservice-id', vpnservice_id,
'--ikepolicy-id', ikepolicy_id, '--name', name,
'--ipsecpolicy-id', ipsecpolicy_id, '--mtu', mtu,
'--description', description,
'--local-ep-group', local_ep_group,
'--peer-ep-group', peer_ep_group,
'--dpd', dpd]
position_names = ['name', 'tenant_id', 'admin_state_up',
'peer_address', 'peer_id', 'psk', 'mtu',
'local_ep_group_id', 'peer_ep_group_id',
'initiator', 'description',
'vpnservice_id', 'ikepolicy_id',
'ipsecpolicy_id']
position_values = [name, tenant_id, admin_state, peer_address,
peer_id, psk, mtu, local_ep_group,
peer_ep_group, initiator, description,
vpnservice_id, ikepolicy_id, ipsecpolicy_id]
extra_body = {
'dpd': {
'action': 'restart',
'interval': 30,
'timeout': 120,
},
}
self._test_create_resource(resource, cmd, name, my_id, args,
position_names, position_values,
extra_body=extra_body)
def test_create_ipsec_site_connection_with_limited_params(self):
# ipsecsite-connection-create with limited params.
resource = 'ipsec_site_connection'
cmd = ipsec_site_connection.CreateIPsecSiteConnection(
test_cli20.MyApp(sys.stdout), None
)
tenant_id = 'mytenant_id'
my_id = 'my_id'
peer_address = '192.168.2.10'
peer_id = '192.168.2.10'
psk = 'abcd'
mtu = '1500'
initiator = 'bi-directional'
vpnservice_id = 'vpnservice_id'
ikepolicy_id = 'ikepolicy_id'
ipsecpolicy_id = 'ipsecpolicy_id'
local_ep_group = 'local-epg'
peer_ep_group = 'peer-epg'
admin_state = True
args = ['--tenant-id', tenant_id,
'--peer-address', peer_address,
'--peer-id', peer_id,
'--psk', psk,
'--vpnservice-id', vpnservice_id,
'--ikepolicy-id', ikepolicy_id,
'--ipsecpolicy-id', ipsecpolicy_id,
'--local-ep-group', local_ep_group,
'--peer-ep-group', peer_ep_group]
position_names = ['tenant_id', 'admin_state_up',
'peer_address', 'peer_id',
'local_ep_group_id', 'peer_ep_group_id',
'psk', 'mtu', 'initiator',
'vpnservice_id', 'ikepolicy_id',
'ipsecpolicy_id']
position_values = [tenant_id, admin_state, peer_address, peer_id,
local_ep_group, peer_ep_group, psk, mtu, initiator,
vpnservice_id, ikepolicy_id, ipsecpolicy_id]
self._test_create_resource(resource, cmd, None, my_id, args,
position_names, position_values)
def _test_create_failure(self, additional_args=None, expected_exc=None):
# Helper to test failure of IPSec site-to-site creation failure.
resource = 'ipsec_site_connection'
cmd = ipsec_site_connection.CreateIPsecSiteConnection(
test_cli20.MyApp(sys.stdout), None
)
tenant_id = 'mytenant_id'
my_id = 'my_id'
peer_address = '192.168.2.10'
peer_id = '192.168.2.10'
psk = 'abcd'
mtu = '1500'
initiator = 'bi-directional'
vpnservice_id = 'vpnservice_id'
ikepolicy_id = 'ikepolicy_id'
ipsecpolicy_id = 'ipsecpolicy_id'
admin_state = True
args = ['--tenant-id', tenant_id,
'--peer-address', peer_address,
'--peer-id', peer_id,
'--psk', psk,
'--vpnservice-id', vpnservice_id,
'--ikepolicy-id', ikepolicy_id,
'--ipsecpolicy-id', ipsecpolicy_id]
if additional_args is not None:
args += additional_args
position_names = ['tenant_id', 'admin_state_up', 'peer_address',
'peer_id', 'psk', 'mtu', 'initiator',
'local_ep_group_id', 'peer_ep_group_id',
'vpnservice_id', 'ikepolicy_id', 'ipsecpolicy_id']
position_values = [tenant_id, admin_state, peer_address, peer_id, psk,
mtu, initiator, None, None, vpnservice_id,
ikepolicy_id, ipsecpolicy_id]
if not expected_exc:
expected_exc = exceptions.CommandError
self.assertRaises(expected_exc,
self._test_create_resource,
resource, cmd, None, my_id, args,
position_names, position_values)
def test_fail_create_with_invalid_mtu(self):
# ipsecsite-connection-create with invalid dpd values.
bad_mtu = ['--mtu', '67']
self._test_create_failure(bad_mtu)
def test_fail_create_with_invalid_dpd_keys(self):
bad_dpd_key = ['--dpd', 'act=restart,interval=30,time=120']
self._test_create_failure(bad_dpd_key, SystemExit)
def test_fail_create_with_invalid_dpd_values(self):
bad_dpd_values = ['--dpd', 'action=hold,interval=30,timeout=-1']
self._test_create_failure(bad_dpd_values)
def test_fail_create_missing_endpoint_groups_or_cidr(self):
# Must provide either endpoint groups or peer cidrs.
self._test_create_failure()
def test_fail_create_missing_peer_endpoint_group(self):
# Fails if dont have both endpoint groups - missing peer.
self._test_create_failure(['--local-ep-group', 'local-epg'])
def test_fail_create_missing_local_endpoint_group(self):
# Fails if dont have both endpoint groups - missing local.
self._test_create_failure(['--peer-ep-group', 'peer-epg'])
def test_fail_create_when_both_endpoints_and_peer_cidr(self):
# Cannot intermix endpoint groups and peer CIDRs for create.
additional_args = ['--local-ep-group', 'local-epg',
'--peer-ep-group', 'peer-epg',
'--peer-cidr', '10.2.0.0/24']
self._test_create_failure(additional_args)
def test_list_ipsec_site_connection(self):
# ipsecsite-connection-list.
resources = "ipsec_site_connections"
cmd = ipsec_site_connection.ListIPsecSiteConnection(
test_cli20.MyApp(sys.stdout), None
)
self._test_list_resources(resources, cmd, True)
def test_list_ipsec_site_connection_pagination(self):
# ipsecsite-connection-list.
resources = "ipsec_site_connections"
cmd = ipsec_site_connection.ListIPsecSiteConnection(
test_cli20.MyApp(sys.stdout), None
)
self._test_list_resources_with_pagination(resources, cmd)
def test_list_ipsec_site_connection_sort(self):
# ipsecsite-connection-list.
# --sort-key name --sort-key id --sort-key asc --sort-key desc
resources = "ipsec_site_connections"
cmd = ipsec_site_connection.ListIPsecSiteConnection(
test_cli20.MyApp(sys.stdout), None
)
self._test_list_resources(resources, cmd,
sort_key=["name", "id"],
sort_dir=["asc", "desc"])
def test_list_ipsec_site_connection_limit(self):
# ipsecsite-connection-list -P.
resources = "ipsec_site_connections"
cmd = ipsec_site_connection.ListIPsecSiteConnection(
test_cli20.MyApp(sys.stdout), None
)
self._test_list_resources(resources, cmd, page_size=1000)
def test_delete_ipsec_site_connection(self):
# ipsecsite-connection-delete my-id.
resource = 'ipsec_site_connection'
cmd = ipsec_site_connection.DeleteIPsecSiteConnection(
test_cli20.MyApp(sys.stdout), None
)
my_id = 'my-id'
args = [my_id]
self._test_delete_resource(resource, cmd, my_id, args)
def test_update_ipsec_site_connection(self):
# ipsecsite-connection-update myid --name myname --tags a b."""
resource = 'ipsec_site_connection'
cmd = ipsec_site_connection.UpdateIPsecSiteConnection(
test_cli20.MyApp(sys.stdout), None
)
self._test_update_resource(resource, cmd, 'myid',
['myid', '--name', 'Branch-new',
'--tags', 'a', 'b'],
{'name': 'Branch-new',
'tags': ['a', 'b'], })
def test_show_ipsec_site_connection_id(self):
# ipsecsite-connection-show test_id."""
resource = 'ipsec_site_connection'
cmd = ipsec_site_connection.ShowIPsecSiteConnection(
test_cli20.MyApp(sys.stdout), None
)
args = ['--fields', 'id', self.test_id]
self._test_show_resource(resource, cmd, self.test_id, args, ['id'])
def test_show_ipsec_site_connection_id_name(self):
# ipsecsite-connection-show."""
resource = 'ipsec_site_connection'
cmd = ipsec_site_connection.ShowIPsecSiteConnection(
test_cli20.MyApp(sys.stdout), None
)
args = ['--fields', 'id', '--fields', 'name', self.test_id]
self._test_show_resource(resource, cmd, self.test_id,
args, ['id', 'name'])
|
{
"content_hash": "30745a27b38b68992fc42315b6ad3667",
"timestamp": "",
"source": "github",
"line_count": 315,
"max_line_length": 78,
"avg_line_length": 42.27936507936508,
"alnum_prop": 0.5363417930620213,
"repo_name": "rackerlabs/rackspace-python-neutronclient",
"id": "8ead88f2a9f0cbc56ec433f77cb8b197b66e2741",
"size": "13987",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "neutronclient/tests/unit/vpn/test_cli20_ipsec_site_connection.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1041700"
},
{
"name": "Shell",
"bytes": "9346"
}
],
"symlink_target": ""
}
|
from app import db
from hashlib import md5
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
nickname = db.Column(db.String(64), index=True, unique=True)
email = db.Column(db.String(120), index=True, unique=True)
posts = db.relationship('Post', backref='author', lazy='dynamic')
about_me = db.Column(db.String(140))
last_seen = db.Column(db.DateTime)
@property
def is_authenticated(self):
return True
@property
def is_active(self):
return True
@property
def is_anonymous(self):
return False
def get_id(self):
try:
return unicode(self.id) # python 2
except NameError:
return str(self.id) # python 3
def avatar(self, size):
return 'http://www.gravatar.com/avatar/%s?d=mm&s=%d' % (md5(self.email.encode('utf-8')).hexdigest(), size)
@staticmethod
def make_unique_nickname(nickname):
if User.query.filter_by(nickname=nickname).first() is None:
return nickname
version = 1
while True:
new_nickname = nickname + str(version)
if User.query.filter_by(nickname=new_nickname).first() is None:
break
version += 1
return new_nickname
def __repr__(self):
return '<User %r>' % (self.nickname)
class Post(db.Model):
id = db.Column(db.Integer, primary_key=True)
body = db.Column(db.String(140))
timestamp = db.Column(db.DateTime)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
def __repr__(self):
return '<Post %r>' % (self.body)
|
{
"content_hash": "7cec855b2127743a064a74c14a30bc5f",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 114,
"avg_line_length": 28.54385964912281,
"alnum_prop": 0.6004917025199754,
"repo_name": "jtara1/SimpleFlaskWebsite",
"id": "d66a01bda4b103d7e0fe76255a3a432efd2f2981",
"size": "1627",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/models.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "4014"
},
{
"name": "Python",
"bytes": "11871"
}
],
"symlink_target": ""
}
|
import numpy as np
import matplotlib.pyplot as plt
import sys
import os
import random
import time
import json
import cPickle
import math
from scipy import io
import matplotlib
from matplotlib import pylab
import matplotlib.pyplot as plt
import matplotlib.patches as patches
from collections import Counter, defaultdict
import tensorflow as tf
from slim.data import dataset_data_provider
from slim.data import dataset
from slim.data import tfexample_decoder
from slim.datasets import datasets
from slim import queues
with open('relationships.json') as f:
data_rel = json.load(f)
white_list = ['hold', 'rid', 'carry', 'eat', 'watch',
'look', 'fly', 'swing', 'pull', 'hit', 'touch', 'throw', 'cast',
'cut', 'read', 'catch', 'talk', 'drink', 'look',
'swim', 'push', 'feed', 'graze', 'reflect', 'kick',
'float', 'perch', 'brush', 'reach', 'pet', 'talk', 'serve',
'sew', 'sniff', 'chase', 'lick', 'swing', 'hug',
'lift', 'splash', 'spray']
def preprocess_relation(x):
x = [WordNetLemmatizer().lemmatize(token ,'v')
for token in x.split(' ')]
for xx in x:
if xx in white_list:
return xx
return ''
nltk.data.path = ['/home/akolesnikov/nltk_data']
result = {}
for entry in data_rel:
cur = entry['image_id'], []
for r in entry['relationships']:
predicate = preprocess_relation(r['predicate'].strip().lower())
if predicate:
try:
cur[1].append({'predicate': predicate,
'object_bbox': (r['object']['y'], r['object']['x'],
r['object']['h'], r['object']['w']),
'subject_bbox': (r['subject']['y'], r['subject']['x'],
r['subject']['h'], r['subject']['w']),
'object_name': str(r['object']['name']),
'subject_name': str(r['subject']['name'])})
except:
continue
if cur[1]:
result[cur[0]] = cur[1]
predicate_list = [e['predicate'] for d in result.values() for e in d]
[x for x in sorted(Counter(predicate_list).items(), key=lambda x: -x[1])]
class ImageReader(object):
"""Helper class that provides TensorFlow image coding utilities."""
def __init__(self):
# Initializes function that decodes RGB JPEG data.
self._decode_jpeg_data = tf.placeholder(dtype=tf.string)
self._decode_jpeg = tf.image.decode_jpeg(self._decode_jpeg_data, channels=3)
def read_image_dims(self, sess, image_data):
image = self.decode_jpeg(sess, image_data)
return image.shape[0], image.shape[1]
def read_image_and_dims(self, sess, image_data):
image = self.decode_jpeg(sess, image_data)
return image, image.shape[0], image.shape[1]
def decode_jpeg(self, sess, image_data):
image = sess.run(self._decode_jpeg,
feed_dict={self._decode_jpeg_data: image_data})
assert len(image.shape) == 3
assert image.shape[2] == 3
return image
def int64_feature(values):
if not isinstance(values, (tuple, list)):
values = [values]
return tf.train.Feature(int64_list=tf.train.Int64List(value=values))
def bytes_feature(values):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[values]))
def float_feature(values):
if not isinstance(values, (tuple, list)):
values = [values]
return tf.train.Feature(float_list=tf.train.FloatList(value=values))
chars = range(ord("a"), ord("z") + 1) + [ord(" "), ord("X")]
ord_map = defaultdict(int)
ord_map.update(dict(zip(chars, range(1, len(chars) + 1))))
def ord_caption(cap):
if len(cap) < 24:
cap += "X" * (24 - len(cap))
cap = cap[:24]
return str(cap), np.array([ord_map[ord(x)] for x in cap]).astype("int64")
def get_dataset_filename(dataset_dir, split_name, shard_id, num_shards):
output_filename = 'VG-%s_%05d-of-%05d.tfrecord' % (split_name, shard_id,
num_shards)
return os.path.join(dataset_dir, output_filename)
def image_to_tfexample(image_data, shape, num_relations,
relations_raw, relations, relations_label, bboxes,
objects, subjects):
return tf.train.Example(features=tf.train.Features(feature={
'image/encoded': bytes_feature(image_data),
'image/shape': int64_feature(shape),
'image/relations/num': int64_feature(num_relations),
'image/relations/predicates_raw': bytes_feature(relations_raw),
'image/relations/predicates': int64_feature(relations),
'image/relations/predicates_label': int64_feature(relations_label),
'image/relations/bboxes': int64_feature(bboxes),
'image/relations/objects': int64_feature(objects),
'image/relations/subjects': int64_feature(subjects),
}))
d = '/home/akolesnikov/VG/'
def get_image_list(split):
train_list = set([int(l.strip()[:-4]) for l in
open(os.path.join(d, 'image_lists',
'image_%s_list' % split)).readlines()])
train_files = [os.path.join(d, 'images', str(i) + '.jpg') for i in result.keys()
if i in train_list]
return train_files
train_files = get_image_list('train')
val_files = get_image_list('val')
test_files = get_image_list('test')
len(train_files), len(val_files), len(test_files)
dataset_dir = '/home/akolesnikov/VG/binary/'
for split, image_filenames, num_shards in [('test', test_files, 10),
('val', val_files, 10),
('train', train_files, 100)]:
dataset_pickle = []
num_images = len(image_filenames)
num_per_shard = int(math.ceil(num_images / float(num_shards)))
with tf.Graph().as_default():
image_reader = ImageReader()
for shard_id in xrange(num_shards):
with tf.Session() as sess:
output_filename = get_dataset_filename(dataset_dir, split, shard_id,
num_shards)
print("Processing %s" % output_filename)
print(output_filename)
with tf.python_io.TFRecordWriter(output_filename) as tfrecord_writer:
start_ndx = shard_id * num_per_shard
end_ndx = min((shard_id+1) * num_per_shard, num_images)
for i in xrange(start_ndx, end_ndx):
filename = image_filenames[i]
# load the image
image_data = tf.gfile.FastGFile(filename, 'r').read()
image, height, width = image_reader.read_image_and_dims(sess,
image_data)
image_id = int(os.path.basename(filename)[:-4])
bboxes = [val
for rel in result[image_id]
for val in rel['object_bbox'] + rel['subject_bbox']]
caps_raw = [ord_caption(x['predicate'])[0]
for x in result[image_id]]
caps = [ord_caption(x['predicate'])[1]
for x in result[image_id]]
caps_index = [white_list.index(x['predicate'])
for x in result[image_id]]
objects = [ord_caption(x['object_name'])[1]
for x in result[image_id]]
subjects = [ord_caption(x['subject_name'])[1]
for x in result[image_id]]
num_relations = len(result[image_id])
caps_raw = str(''.join(caps_raw))
caps = list(np.hstack(caps))
objects = list(np.hstack(objects))
subjects = list(np.hstack(subjects))
example = image_to_tfexample(image_data, [height, width],
num_relations, caps_raw, caps, caps_index,
bboxes,
objects, subjects)
dataset_pickle.append([os.path.basename(filename),
caps_index,
bboxes])
# write to stream
tfrecord_writer.write(example.SerializeToString())
cPickle.dump(dataset_pickle, open('/home/akolesnikov/'
'VG/pickle/%s.pickle' % split, 'w'),
protocol=2)
|
{
"content_hash": "08ac198f2e59bb66f0a677ad01c48e9c",
"timestamp": "",
"source": "github",
"line_count": 237,
"max_line_length": 83,
"avg_line_length": 34.79324894514768,
"alnum_prop": 0.5685180693669658,
"repo_name": "google/VRD",
"id": "1e1b5e33c8214bdde65629d199a6b32a57989243",
"size": "8822",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "preprocess_ipynb.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "49448"
}
],
"symlink_target": ""
}
|
import unittest
from os.path import join as joinpath
import ffmpymedia
from tests import TEST_FILE_PATH
class TestMediaUse(unittest.TestCase):
def test_compare_two_files(self):
# User1 wants to compare two media files to see if their stream layouts are the same.
# First he passes the same file to the API to see if they compare as the same
filename1 = filename2 = joinpath(TEST_FILE_PATH, 'SIN001 Sinuca.mp4')
file1 = ffmpymedia.MediaFile.parse_file(filename1)
file2 = ffmpymedia.MediaFile.parse_file(filename2)
self.assertTrue(file1 == file2)
# Then he wants to be sure and see the that difference between the two files is {}
self.assertEqual(ffmpymedia.MediaFile.parse_file(filename1).difference(ffmpymedia.MediaFile.parse_file(filename2)), {})
# Then he decides to try two different files to be sure different files are treated differenty
filename3 = joinpath(TEST_FILE_PATH, 'COLB001 Color Bar.mp4')
file3 = ffmpymedia.MediaFile.parse_file(filename3)
self.assertFalse(file1 == file3)
# As he is very curious, he then wants to see the difference between the files
self.assertNotEqual(ffmpymedia.MediaFile.parse_file(filename1).difference(ffmpymedia.MediaFile.parse_file(filename3)), {})
# After all these comparisons, he decided to take a look at the streams of each file.
print(file1.__repr__())
print(file2.__repr__())
print(file3.__repr__())
def test_media_analyser(self):
# Developer1 whises to test the MediaAnalyser API funcionality.
# With that in mind, he decides to try out all 4 API calls from this helper class.
filename1 = filename2 = joinpath(TEST_FILE_PATH, 'SIN001 Sinuca.mp4')
filename3 = joinpath(TEST_FILE_PATH, 'COLB001 Color Bar.mp4')
template1 = ffmpymedia.MediaFileTemplate(**{'format_name': 'mov,mp4,m4a,3gp,3g2,mj2', 'duration': '12.0', 'metadata': None, 'start_time': '0.000000', 'streams': [{'type': 'video', 'height': '1080', 'bitrate': '2574', 'metadata': {'handler_name': 'VideoHandler'}, 'codec': 'h264', 'index': '0', 'disposition': {'lyrics': 0, 'default': 1, 'clean_effects': 0, 'karaoke': 0, 'hearing_impaired': 0, 'visual_impaired': 0, 'forced': 0, 'comment': 0, 'dub': 0, 'original': 0, 'attached_pic': 0}, 'codec_tag': '0x31637661', 'codec_tag_string': 'avc1', 'width': '1920', 'sample_aspect_ratio': '1:1', 'pixel_format': 'yuv420p', 'reported_frame_rate': '25', 'display_aspect_ratio': '16:9', 'container_time_base': '12800', 'average_frame_rate': '25', 'codec_time_base': '50', 'language': 'und', 'profile': 'High', 'codec_long_name': 'H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10'}], 'filename': '/home/flaviopontes/PycharmProjects/ffmpymedia/test_files/SIN001 Sinuca.mp4', 'bit_rate': '2577000'})
template3 = ffmpymedia.MediaFileTemplate(**{'format_name': 'mov,mp4,m4a,3gp,3g2,mj2', 'duration': '12.0', 'metadata': None, 'start_time': '0.000000', 'streams': [{'type': 'video', 'height': '1080', 'bitrate': '2574', 'metadata': {'handler_name': 'VideoHandler'}, 'codec': 'h264', 'index': '0', 'disposition': {'lyrics': 0, 'default': 1, 'clean_effects': 0, 'karaoke': 0, 'hearing_impaired': 0, 'visual_impaired': 0, 'forced': 0, 'comment': 0, 'dub': 0, 'original': 0, 'attached_pic': 0}, 'codec_tag': '0x31637661', 'codec_tag_string': 'avc1', 'width': '1920', 'sample_aspect_ratio': '1:1', 'pixel_format': 'yuv420p', 'reported_frame_rate': '25', 'display_aspect_ratio': '16:9', 'container_time_base': '12800', 'average_frame_rate': '25', 'codec_time_base': '50', 'language': 'und', 'profile': 'High', 'codec_long_name': 'H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10'}], 'filename': '/home/flaviopontes/PycharmProjects/ffmpymedia/test_files/SIN001 Sinuca.mp4', 'bit_rate': '2577000'})
self.assertFalse(ffmpymedia.MediaAnalyser.compare_media_file_with_template(filename1, template1))
|
{
"content_hash": "d5a1d601982c212bc54c1c1e3d187094",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 988,
"avg_line_length": 85.67391304347827,
"alnum_prop": 0.6691195128140066,
"repo_name": "flaviocpontes/ffmpymedia",
"id": "deb07be72390f2147631738e63f8d050ef7b7569",
"size": "3989",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_media_functional.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "169236"
}
],
"symlink_target": ""
}
|
import os
from gppylib.test.unit.gp_unittest import GpTestCase, run_tests
from gppylib.commands import gp
from gppylib.db import dbconn
class Context(object):
filename = os.path.join(gp.get_masterdatadir(), 'gpexpand.status')
dbname = os.getenv('PGDATABASE', 'postgres')
dburl = dbconn.DbURL(dbname=dbname)
conn = dbconn.connect(dburl)
day = 0
ctx = Context()
def get_gpexpand_status():
st = gp.get_gpexpand_status()
st.dbname = ctx.dbname
return st.get_status()
def insert_status(status):
ctx.day += 1
dbconn.execSQL(ctx.conn, '''
INSERT INTO gpexpand.status VALUES
( '{status}', date '2001-01-01' + interval '{day} day');
'''.format(status=status, day=ctx.day))
ctx.conn.commit()
def leave_phase1(func):
def wrapper(*args, **kwargs):
try:
os.unlink(ctx.filename)
except OSError:
pass
return func(*args, **kwargs)
return wrapper
def leave_phase2(func):
def wrapper(*args, **kwargs):
dbconn.execSQL(ctx.conn, '''
DROP SCHEMA IF EXISTS gpexpand CASCADE;
''')
ctx.conn.commit()
return func(*args, **kwargs)
return wrapper
def drop_table(name):
def decorator(func):
def wrapper(*args, **kwargs):
dbconn.execSQL(ctx.conn, '''
DROP TABLE IF EXISTS {name};
'''.format(name=name))
ctx.conn.commit()
return func(*args, **kwargs)
return wrapper
return decorator
def start_redistribution(func):
def wrapper(*args, **kwargs):
insert_status('EXPANSION STARTED')
return func(*args, **kwargs)
return wrapper
def stop_redistribution(func):
def wrapper(*args, **kwargs):
insert_status('EXPANSION STOPPED')
return func(*args, **kwargs)
return wrapper
def expanding_table(name):
def decorator(func):
def wrapper(*args, **kwargs):
dbconn.execSQL(ctx.conn, '''
UPDATE gpexpand.status_detail SET STATUS='IN PROGRESS'
WHERE fq_name='{name}';
'''.format(name=name))
ctx.conn.commit()
return func(*args, **kwargs)
return wrapper
return decorator
def expanded_table(name):
def decorator(func):
def wrapper(*args, **kwargs):
dbconn.execSQL(ctx.conn, '''
UPDATE gpexpand.status_detail SET STATUS='COMPLETED'
WHERE fq_name='{name}';
'''.format(name=name))
ctx.conn.commit()
return func(*args, **kwargs)
return wrapper
return decorator
class GpExpandUtils(GpTestCase):
def setUp(self):
ctx.day = 1
dbconn.execSQL(ctx.conn, '''
DROP SCHEMA IF EXISTS gpexpand CASCADE;
CREATE SCHEMA gpexpand;
CREATE TABLE gpexpand.status (status text, updated timestamp);
CREATE TABLE gpexpand.status_detail (
dbname text,
fq_name text,
schema_oid oid,
table_oid oid,
distribution_policy smallint[],
distribution_policy_names text,
distribution_policy_coloids text,
distribution_policy_type text,
root_partition_name text,
storage_options text,
rank int,
status text,
expansion_started timestamp,
expansion_finished timestamp,
source_bytes numeric
);
INSERT INTO gpexpand.status VALUES
( 'SETUP', '2001-01-01' ),
( 'SETUP DONE', '2001-01-02' );
INSERT INTO gpexpand.status_detail (dbname, fq_name, rank, status) VALUES
('fake_db', 'public.t1', 2, 'NOT STARTED'),
('fake_db', 'public.t2', 2, 'NOT STARTED');
'''.format(dbname=ctx.dbname))
ctx.conn.commit()
with open(ctx.filename, 'w') as f:
f.write('''UNINITIALIZED:None
EXPANSION_PREPARE_STARTED:<filename>
BUILD_SEGMENT_TEMPLATE_STARTED:<filename>
BUILD_SEGMENT_TEMPLATE_DONE:None
BUILD_SEGMENTS_STARTED:<filename>
BUILD_SEGMENTS_DONE:<number>
UPDATE_CATALOG_STARTED:<filename>
UPDATE_CATALOG_DONE:None
SETUP_EXPANSION_SCHEMA_STARTED:None
SETUP_EXPANSION_SCHEMA_DONE:None
PREPARE_EXPANSION_SCHEMA_STARTED:None
PREPARE_EXPANSION_SCHEMA_DONE:None
EXPANSION_PREPARE_DONE:None
''')
@leave_phase1
@leave_phase2
def tearDown(self):
pass
@leave_phase1
@leave_phase2
def test_when_no_expansion(self):
st = get_gpexpand_status()
self.assertEqual(st.phase, 0)
self.assertEqual(st.status, 'NO EXPANSION DETECTED')
self.assertEqual(len(st.uncompleted), 0)
self.assertEqual(len(st.inprogress), 0)
self.assertEqual(len(st.completed), 0)
st.get_progress()
self.assertEqual(st.phase, 0)
self.assertEqual(st.status, 'NO EXPANSION DETECTED')
self.assertEqual(len(st.uncompleted), 0)
self.assertEqual(len(st.inprogress), 0)
self.assertEqual(len(st.completed), 0)
def test_phase1_with_empty_status(self):
with open(ctx.filename, 'w'):
pass
st = get_gpexpand_status()
self.assertEqual(st.phase, 1)
self.assertEqual(st.status, 'UNKNOWN PHASE1 STATUS')
def test_phase1_with_normal_status(self):
st = get_gpexpand_status()
self.assertEqual(st.phase, 1)
self.assertEqual(st.status, 'EXPANSION_PREPARE_DONE')
@leave_phase1
@drop_table('gpexpand.status_detail')
def test_phase2_when_missing_status_detail(self):
st = get_gpexpand_status()
st.get_progress()
self.assertEqual(st.phase, 2)
self.assertEqual(st.status, 'SETUP DONE')
self.assertEqual(len(st.uncompleted), 0)
self.assertEqual(len(st.inprogress), 0)
self.assertEqual(len(st.completed), 0)
@leave_phase1
def test_phase2_when_setup_done(self):
st = get_gpexpand_status()
st.get_progress()
self.assertEqual(st.phase, 2)
self.assertEqual(st.status, 'SETUP DONE')
self.assertEqual(len(st.uncompleted), 2)
self.assertEqual(len(st.inprogress), 0)
self.assertEqual(len(st.completed), 0)
@leave_phase1
@start_redistribution
@expanding_table('public.t1')
def test_phase2_when_expanding_first_table(self):
st = get_gpexpand_status()
st.get_progress()
self.assertEqual(st.phase, 2)
self.assertEqual(st.status, 'EXPANSION STARTED')
self.assertEqual(len(st.uncompleted), 1)
self.assertEqual(len(st.inprogress), 1)
self.assertEqual(len(st.completed), 0)
@leave_phase1
@start_redistribution
@expanded_table('public.t1')
def test_phase2_when_expanded_first_table(self):
st = get_gpexpand_status()
st.get_progress()
self.assertEqual(st.phase, 2)
self.assertEqual(st.status, 'EXPANSION STARTED')
self.assertEqual(len(st.uncompleted), 1)
self.assertEqual(len(st.inprogress), 0)
self.assertEqual(len(st.completed), 1)
@leave_phase1
@start_redistribution
@expanded_table('public.t1')
@expanding_table('public.t2')
def test_phase2_when_expanding_last_table(self):
st = get_gpexpand_status()
st.get_progress()
self.assertEqual(st.phase, 2)
self.assertEqual(st.status, 'EXPANSION STARTED')
self.assertEqual(len(st.uncompleted), 0)
self.assertEqual(len(st.inprogress), 1)
self.assertEqual(len(st.completed), 1)
@leave_phase1
@start_redistribution
@expanded_table('public.t1')
@expanded_table('public.t2')
@stop_redistribution
def test_phase2_when_expanded_last_table(self):
st = get_gpexpand_status()
st.get_progress()
self.assertEqual(st.phase, 2)
self.assertEqual(st.status, 'EXPANSION STOPPED')
self.assertEqual(len(st.uncompleted), 0)
self.assertEqual(len(st.inprogress), 0)
self.assertEqual(len(st.completed), 2)
if __name__ == '__main__':
run_tests()
|
{
"content_hash": "b89d465bb714f8062fea6f9eaec99f06",
"timestamp": "",
"source": "github",
"line_count": 266,
"max_line_length": 85,
"avg_line_length": 30.909774436090224,
"alnum_prop": 0.6059352955485283,
"repo_name": "jmcatamney/gpdb",
"id": "a63f304dd1e2ff0a23e6199de77b4ba3e90b37e1",
"size": "8375",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "gpMgmt/bin/gppylib/test/unit/test_unit_gpexpand_status.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "3724"
},
{
"name": "Awk",
"bytes": "836"
},
{
"name": "Batchfile",
"bytes": "12854"
},
{
"name": "C",
"bytes": "42498841"
},
{
"name": "C++",
"bytes": "14366259"
},
{
"name": "CMake",
"bytes": "38452"
},
{
"name": "Csound Score",
"bytes": "223"
},
{
"name": "DTrace",
"bytes": "3873"
},
{
"name": "Dockerfile",
"bytes": "11932"
},
{
"name": "Emacs Lisp",
"bytes": "3488"
},
{
"name": "Fortran",
"bytes": "14863"
},
{
"name": "GDB",
"bytes": "576"
},
{
"name": "Gherkin",
"bytes": "335208"
},
{
"name": "HTML",
"bytes": "53484"
},
{
"name": "JavaScript",
"bytes": "23969"
},
{
"name": "Lex",
"bytes": "229556"
},
{
"name": "M4",
"bytes": "111147"
},
{
"name": "Makefile",
"bytes": "496239"
},
{
"name": "Objective-C",
"bytes": "38376"
},
{
"name": "PLpgSQL",
"bytes": "8009512"
},
{
"name": "Perl",
"bytes": "798767"
},
{
"name": "PowerShell",
"bytes": "422"
},
{
"name": "Python",
"bytes": "3000118"
},
{
"name": "Raku",
"bytes": "698"
},
{
"name": "Roff",
"bytes": "32437"
},
{
"name": "Ruby",
"bytes": "77585"
},
{
"name": "SCSS",
"bytes": "339"
},
{
"name": "Shell",
"bytes": "451713"
},
{
"name": "XS",
"bytes": "6983"
},
{
"name": "Yacc",
"bytes": "674092"
},
{
"name": "sed",
"bytes": "1231"
}
],
"symlink_target": ""
}
|
import unittest
from webkitpy.common.net.networktransaction import NetworkTransaction, NetworkTimeout
from webkitpy.common.system.logtesting import LoggingTestCase
from webkitpy.thirdparty.autoinstalled.mechanize import HTTPError
class NetworkTransactionTest(LoggingTestCase):
exception = Exception("Test exception")
def test_success(self):
transaction = NetworkTransaction()
self.assertEqual(transaction.run(lambda: 42), 42)
def _raise_exception(self):
raise self.exception
def test_exception(self):
transaction = NetworkTransaction()
did_process_exception = False
did_throw_exception = True
try:
transaction.run(lambda: self._raise_exception())
did_throw_exception = False
except Exception, e:
did_process_exception = True
self.assertEqual(e, self.exception)
self.assertTrue(did_throw_exception)
self.assertTrue(did_process_exception)
def _raise_500_error(self):
self._run_count += 1
if self._run_count < 3:
raise HTTPError("http://example.com/", 500, "internal server error", None, None)
return 42
def _raise_404_error(self):
raise HTTPError("http://foo.com/", 404, "not found", None, None)
def test_retry(self):
self._run_count = 0
transaction = NetworkTransaction(initial_backoff_seconds=0)
self.assertEqual(transaction.run(lambda: self._raise_500_error()), 42)
self.assertEqual(self._run_count, 3)
self.assertLog(['WARNING: Received HTTP status 500 loading "http://example.com/". '
'Retrying in 0 seconds...\n',
'WARNING: Received HTTP status 500 loading "http://example.com/". '
'Retrying in 0.0 seconds...\n'])
def test_convert_404_to_None(self):
transaction = NetworkTransaction(convert_404_to_None=True)
self.assertEqual(transaction.run(lambda: self._raise_404_error()), None)
def test_timeout(self):
self._run_count = 0
transaction = NetworkTransaction(initial_backoff_seconds=60*60, timeout_seconds=60)
did_process_exception = False
did_throw_exception = True
try:
transaction.run(lambda: self._raise_500_error())
did_throw_exception = False
except NetworkTimeout, e:
did_process_exception = True
self.assertTrue(did_throw_exception)
self.assertTrue(did_process_exception)
|
{
"content_hash": "e28014f83a27c64aea77f8420ab79232",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 92,
"avg_line_length": 38.98461538461538,
"alnum_prop": 0.6428571428571429,
"repo_name": "Xperia-Nicki/android_platform_sony_nicki",
"id": "c4cd4e0b8cfb93c6226e14adc6626bea7c0ff6e2",
"size": "4066",
"binary": false,
"copies": "15",
"ref": "refs/heads/master",
"path": "external/webkit/Tools/Scripts/webkitpy/common/net/networktransaction_unittest.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Ada",
"bytes": "89080"
},
{
"name": "Assembly",
"bytes": "212775"
},
{
"name": "Awk",
"bytes": "19252"
},
{
"name": "C",
"bytes": "68667466"
},
{
"name": "C#",
"bytes": "55625"
},
{
"name": "C++",
"bytes": "54670920"
},
{
"name": "CLIPS",
"bytes": "12224"
},
{
"name": "CSS",
"bytes": "283405"
},
{
"name": "D",
"bytes": "1931"
},
{
"name": "Java",
"bytes": "4882"
},
{
"name": "JavaScript",
"bytes": "19597804"
},
{
"name": "Objective-C",
"bytes": "5849156"
},
{
"name": "PHP",
"bytes": "17224"
},
{
"name": "Pascal",
"bytes": "42411"
},
{
"name": "Perl",
"bytes": "1632149"
},
{
"name": "Prolog",
"bytes": "214621"
},
{
"name": "Python",
"bytes": "3493321"
},
{
"name": "R",
"bytes": "290"
},
{
"name": "Ruby",
"bytes": "78743"
},
{
"name": "Scilab",
"bytes": "554"
},
{
"name": "Shell",
"bytes": "265637"
},
{
"name": "TypeScript",
"bytes": "45459"
},
{
"name": "XSLT",
"bytes": "11219"
}
],
"symlink_target": ""
}
|
"""Keras implementation of the Layer-wise Adaptive Rate Scaling (LARS) optimizer.
Original paper: [Large batch training of convolutional networks]
(https://arxiv.org/pdf/1708.03888.pdf).
Code adapted from Algorithm 1 in [Large Batch Optimization for Deep Learning:
Training BERT in 76 minutes](https://arxiv.org/abs/1904.00962):
https://github.com/tensorflow/addons/blob/v0.10.0/tensorflow_addons/optimizers/lamb.py
"""
import re
from typing import List, Optional, Union
import numpy as np
import tensorflow as tf
FloatTensorLike = Union[tf.Tensor, float, np.float16, np.float32, np.float64]
class LARS(tf.keras.optimizers.Optimizer):
"""Optimizer that implements Layer-wise Adaptive Rate Scaling (LARS)."""
def __init__(self,
learning_rate: Union[FloatTensorLike] = 0.001,
momentum: FloatTensorLike = 0.9,
weight_decay_rate: FloatTensorLike = 0.0,
epsilon: FloatTensorLike = 0.001,
exclude_from_weight_decay: Optional[List[str]] = None,
exclude_from_layer_adaptation: Optional[List[str]] = None,
name: str = 'LARS',
**kwargs):
"""Construct a new LARS optimizer.
Args:
learning_rate: A `Tensor` or floating point value representing the
learning rate of the optimizer.
momentum: A `float` value or a constant `float` tensor representing the
momentum parameter.
weight_decay_rate: A `float` value representing the weight decay rate.
epsilon: A `float` value used for numerical stability.
exclude_from_weight_decay: A list of regex patterns of
variables excluded from weight decay. Variables whose name contain
a substring matching the pattern will be excluded.
exclude_from_layer_adaptation: A list of regex patterns of
variables excluded from layer adaptation. Variables whose name
contain a substring matching the pattern will be excluded. If not
provided, this will default to the same value as
`exclude_from_weight_decay`.
name: Optional name for the operations created when applying
gradients. Defaults to 'LARS'.
**kwargs: keyword arguments. Allowed to be {`clipnorm`, `clipvalue`,
`lr`, `decay`}. `clipnorm` is clip gradients by norm; `clipvalue`
is clip gradients by value, `decay` is included for backward
compatibility to allow time inverse decay of learning rate. The kwarg
`lr` is included for backward compatibility, it is recommended to use
`learning_rate` instead.
"""
super().__init__(name, **kwargs)
# We handle L2 regularization/weight decay generically, via a
# 'weight_decay_rate' hyperparameter (distinct from the default
# Keras learning rate decay optionally supplied via the 'decay' kwarg).
self._set_hyper('weight_decay_rate', weight_decay_rate)
self._set_hyper('epsilon', epsilon)
self._set_hyper('decay', self._initial_decay) # Keras default, not used
self._set_hyper('learning_rate', kwargs.get('lr', learning_rate))
self._set_hyper('momentum', momentum)
self.exclude_from_weight_decay = exclude_from_weight_decay
if exclude_from_layer_adaptation:
self.exclude_from_layer_adaptation = exclude_from_layer_adaptation
else:
self.exclude_from_layer_adaptation = exclude_from_weight_decay
def _create_slots(self, var_list):
# Create slots for the first moment.
for var in var_list:
self.add_slot(var, 'm')
def _resource_apply_dense(self, grad, var, apply_state=None):
var_dtype = var.dtype.base_dtype
lr = self._get_hyper('learning_rate', var_dtype)
momentum = self._get_hyper('momentum', var_dtype)
weight_decay_rate = self._get_hyper('weight_decay_rate', var_dtype)
epsilon = self._get_hyper('epsilon', var_dtype)
var_name = self._get_variable_name(var.name)
# m_t = beta * m_{t-1} + (1 - beta) * (g_t + lambda * x_t)
m = self.get_slot(var, 'm')
grad_with_decay = grad
if self._do_use_weight_decay(var_name):
grad_with_decay += weight_decay_rate * var
scaled_grad_with_decay = grad_with_decay * (1 - momentum)
m_t = m.assign(
m * momentum + scaled_grad_with_decay, use_locking=self._use_locking)
ratio = 1.0
if self._do_layer_adaptation(var_name):
w_norm = tf.norm(var, ord=2)
m_norm = tf.norm(m_t, ord=2)
ratio = tf.where(
tf.greater(w_norm, 0),
tf.where(tf.greater(m_norm, 0), (w_norm / (m_norm + epsilon)), 1.0),
1.0,
)
var_update = var - ratio * lr * m_t
return var.assign(var_update, use_locking=self._use_locking).op
def _resource_apply_sparse(self, grad, var, indices, apply_state=None):
var_dtype = var.dtype.base_dtype
lr = self._get_hyper('learning_rate', var_dtype)
momentum = self._get_hyper('momentum', var_dtype)
weight_decay_rate = self._get_hyper('weight_decay_rate', var_dtype)
epsilon = self._get_hyper('epsilon', var_dtype)
var_name = self._get_variable_name(var.name)
# m_t = beta * m_{t-1} + (1 - beta) * (g_t + lambda * x_t)
m = self.get_slot(var, 'm')
grad_with_decay = grad
if self._do_use_weight_decay(var_name):
grad_with_decay += weight_decay_rate * var
scaled_grad_with_decay = grad_with_decay * (1 - momentum)
m_t = m.assign(m * momentum, use_locking=self._use_locking)
with tf.control_dependencies([m_t]):
m_t = self._resource_scatter_add(m, indices, scaled_grad_with_decay)
ratio = 1.0
if self._do_layer_adaptation(var_name):
w_norm = tf.norm(var, ord=2)
m_norm = tf.norm(m_t, ord=2)
ratio = tf.where(
tf.greater(w_norm, 0),
tf.where(tf.greater(m_norm, 0), (w_norm / (m_norm + epsilon)), 1.0),
1.0,
)
var_update = var.assign_sub(
ratio * lr * m_t, use_locking=self._use_locking)
return tf.group(*[var_update, m_t])
def get_config(self):
config = super().get_config()
config.update({
'learning_rate':
self._serialize_hyperparameter('learning_rate'),
'weight_decay_rate':
self._serialize_hyperparameter('weight_decay_rate'),
'decay':
self._serialize_hyperparameter('decay'),
'momentum':
self._serialize_hyperparameter('momentum'),
'epsilon':
self._serialize_hyperparameter('epsilon'),
})
return config
def _do_use_weight_decay(self, param_name):
"""Whether to use L2 weight decay for `param_name`."""
if self.exclude_from_weight_decay:
for r in self.exclude_from_weight_decay:
if re.search(r, param_name) is not None:
return False
return True
def _do_layer_adaptation(self, param_name):
"""Whether to do layer-wise learning rate adaptation for `param_name`."""
if self.exclude_from_layer_adaptation:
for r in self.exclude_from_layer_adaptation:
if re.search(r, param_name) is not None:
return False
return True
def _get_variable_name(self, param_name):
"""Get the variable name from the tensor name."""
m = re.match('^(.*):\\d+$', param_name)
if m is not None:
param_name = m.group(1)
return param_name
|
{
"content_hash": "2ba09c5930a13ab822c72a7741b33994",
"timestamp": "",
"source": "github",
"line_count": 179,
"max_line_length": 86,
"avg_line_length": 40.74301675977654,
"alnum_prop": 0.6412998765939942,
"repo_name": "google-research/public-data-in-dpfl",
"id": "4b48cf2c7b699f637507c79d2a8a973eb9b779e4",
"size": "7889",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "utils/optimizers/lars.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "828921"
},
{
"name": "Shell",
"bytes": "2569"
},
{
"name": "Starlark",
"bytes": "9413"
}
],
"symlink_target": ""
}
|
"""Training and Predicting Cifar10 with Mutant Networks.
The networks mutate their architecture using genetic algorithms.
Author: Lucas David -- <ld492@drexel.edu>
Licence: MIT License 2016 (c)
"""
import logging
import artificial as art
import numpy as np
import tensorflow as tf
from artificial.utils.experiments import arg_parser, ExperimentSet, Experiment
from keras.datasets import cifar10
from keras.preprocessing.image import ImageDataGenerator
import mutant
class Cifar10MutantEnvironment(mutant.Environment):
def build(self):
tf.logging.info('building environment...')
tf.logging.info('|-loading data...')
(X, y), (X_test, y_test) = cifar10.load_data()
X = X.astype('float32') / 255
X_test = X_test.astype('float32') / 255
g = ImageDataGenerator(
featurewise_center=True,
featurewise_std_normalization=True,
rotation_range=15,
width_shift_range=0.1,
height_shift_range=0.1,
horizontal_flip=True)
tf.logging.info('|-fitting image generator...')
g.fit(X)
tf.logging.info('|-defining data sets...')
self.dataset_ = g.flow(X, y, batch_size=self.consts.batch_size,
shuffle=self.consts.shuffle_train)
self.test_dataset_ = self.val_dataset_ = (X_test, y_test)
tf.logging.info('building complete')
return self
class ClimbOverCifar10Experiment(Experiment):
env_ = None
def setup(self):
consts = self.consts
# Settings for logging.
verbosity_level = logging.INFO if consts.verbose else logging.WARNING
for m in ('artificial', 'tensorflow', 'connoisseur'):
logger = logging.getLogger(m)
logger.setLevel(verbosity_level)
logger.addHandler(logging.FileHandler(consts.log_file))
np.random.seed(consts.seed)
# Create mutation environment.
e = Cifar10MutantEnvironment(optimizer='adam', consts=consts)
e.agents = [
mutant.Agent(search=art.searches.local.HillClimbing,
environment=e,
**consts.agent_params)
]
initial_architecture = e.architect_.validate({
mutant.Codes.Conv2D: [
e.architect_.random_layer(mutant.Codes.Conv2D)
for _ in range(4)
],
mutant.Codes.Dense: [
e.architect_.random_layer(mutant.Codes.Dense)
for _ in range(2)
],
})
initial_state = mutant.MutantNetwork(initial_architecture)
e.current_state = e.initial_state = initial_state
self.env_ = e
def run(self):
try:
self.env_.live(n_cycles=1)
finally:
answer = self.env_.current_state
if answer:
tf.logging.info('train and validation loss after %i epochs: '
'(%s, %s)', self.consts.n_epochs,
answer.loss_, answer.validation_loss_)
if __name__ == '__main__':
print(__doc__, flush=True)
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('tensorflow').propagate = False
(ExperimentSet(ClimbOverCifar10Experiment)
.load_from_json(arg_parser.parse_args().constants)
.run())
|
{
"content_hash": "6ecf597b4d28fa304d1c0362410727b3",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 78,
"avg_line_length": 31.514018691588785,
"alnum_prop": 0.5954922894424673,
"repo_name": "Comp-UFSCar/neural-networks-2",
"id": "ee59b627dabe65c3f9d1307a3cd82f758adbce99",
"size": "3372",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mutant-networks/experiments/cifar-hill-climbing/experiment.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "54063"
},
{
"name": "TeX",
"bytes": "459762"
}
],
"symlink_target": ""
}
|
import ast
import sys
import os
import json
import math
class Nb_classifier:
def __init__(self, model):
self.model = model
def classify(self, file_path):
contents = open(file_path, "r").read()
delimiters = ['.', ',', '$', '(', ')', '!', '\n', '"', ':', ';', '!', '?', "'", '&', '%', '=', '/', '@']
delimiters = delimiters + ['[', ']', '~']
delimiters = delimiters + ['+', '/', '*', '-']
delimiters = delimiters + ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
for delimiter in delimiters:
contents = contents.replace(delimiter, ' ')
tokens = contents.split(' ')
negative_truthful = self.model['negative_truthful']
negative_truthful_probability = math.log(negative_truthful['reviews_count'] / negative_truthful['total_reviews_count'])
for item in tokens:
if(item in negative_truthful['dictionary']):
#print(negative_truthful['dictionary'][item] / negative_truthful['total_tokens_count'])
negative_truthful_probability = negative_truthful_probability + math.log(negative_truthful['dictionary'][item] / negative_truthful['total_tokens_count'])
positive_truthful = self.model['positive_truthful']
positive_truthful_probability = math.log(positive_truthful['reviews_count'] / positive_truthful['total_reviews_count'])
for item in tokens:
if(item in positive_truthful['dictionary']):
positive_truthful_probability = positive_truthful_probability + math.log(positive_truthful['dictionary'][item] / positive_truthful['total_tokens_count'])
negative_deceptive = self.model['negative_deceptive']
negative_deceptive_probability = math.log(negative_deceptive['reviews_count'] / negative_deceptive['total_reviews_count'])
for item in tokens:
if(item in negative_deceptive['dictionary']):
#print(math.log(negative_deceptive['dictionary'][item] / negative_deceptive['total_tokens_count']))
negative_deceptive_probability = negative_deceptive_probability + math.log(negative_deceptive['dictionary'][item] / negative_deceptive['total_tokens_count'])
positive_deceptive = self.model['positive_deceptive']
positive_deceptive_probability = math.log(positive_deceptive['reviews_count'] / positive_deceptive['total_reviews_count'])
for item in tokens:
if(item in positive_deceptive['dictionary']):
positive_deceptive_probability = positive_deceptive_probability + math.log(positive_deceptive['dictionary'][item] / positive_deceptive['total_tokens_count'])
#print(negative_truthful_probability)
#print(positive_truthful_probability)
#print(negative_deceptive_probability)
#print(positive_deceptive_probability)
if(negative_truthful_probability >= negative_deceptive_probability and negative_truthful_probability >= positive_truthful_probability and negative_truthful_probability >= positive_deceptive_probability):
return 'truthful negative ' + file_path
elif(positive_truthful_probability >= positive_deceptive_probability and positive_truthful_probability >= negative_truthful_probability and positive_truthful_probability >= negative_deceptive_probability):
return 'truthful positive ' + file_path
elif(negative_deceptive_probability >= negative_truthful_probability and negative_deceptive_probability >= positive_truthful_probability and negative_deceptive_probability >= positive_deceptive_probability):
return 'deceptive negative ' + file_path
return 'deceptive positive ' + file_path
def run_classifier(nb_classifier, input_path, out):
if(os.path.isfile(input_path)):
out.append(nb_classifier.classify(input_path))
return
for item in os.listdir(input_path):
sub_path = input_path + "/" + item
if(os.path.isfile(sub_path)):
out.append(nb_classifier.classify(sub_path))
else:
run_classifier(nb_classifier, sub_path, out)
def write_to_file(output_file, out):
for item in out:
output_file.write(item + "\n")
model_file = open("nbmodel.txt","r")
input_path = sys.argv[1]
output_file = open("nboutput.txt", "w")
model = ast.literal_eval(model_file.read())
nb_classifier = Nb_classifier(model)
out = []
run_classifier(nb_classifier, input_path, out)
write_to_file(output_file, out)
|
{
"content_hash": "19177d55095cbe240de7380dffc64b36",
"timestamp": "",
"source": "github",
"line_count": 83,
"max_line_length": 217,
"avg_line_length": 55.2289156626506,
"alnum_prop": 0.6472513089005235,
"repo_name": "vswamy/NLP",
"id": "653bc8803a6d149a9d0c46220077988ca69dca6d",
"size": "4584",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "homework2/nbclassify3.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "13286"
}
],
"symlink_target": ""
}
|
import datetime
import mock
from oslo_config import cfg
from oslo_utils import timeutils
import testtools
from manila.common import constants
from manila import context
from manila import db
from manila.db.sqlalchemy import api as sqa_api
from manila.db.sqlalchemy import models as sqa_models
from manila import exception
from manila import quota
from manila import share
from manila import test
from manila.tests import db_utils
CONF = cfg.CONF
class QuotaIntegrationTestCase(test.TestCase):
def setUp(self):
super(QuotaIntegrationTestCase, self).setUp()
self.flags(quota_shares=2,
quota_gigabytes=20)
self.user_id = 'admin'
self.project_id = 'admin'
self.create_share = lambda size=10: (
db_utils.create_share(user_id=self.user_id,
project_id=self.project_id,
size=size,
status=constants.STATUS_AVAILABLE)
)
self.context = context.RequestContext(self.user_id,
self.project_id,
is_admin=True)
@testtools.skip("SQLAlchemy sqlite insert bug")
def test_too_many_shares(self):
share_ids = []
for i in range(CONF.quota_shares):
share_ref = self.create_share()
share_ids.append(share_ref['id'])
self.assertRaises(exception.QuotaError,
share.API().create,
self.context, 'nfs', 10, '', '', None)
for share_id in share_ids:
db.share_delete(self.context, share_id)
@testtools.skip("SQLAlchemy sqlite insert bug")
def test_too_many_gigabytes(self):
share_ids = []
share_ref = self.create_share(size=20)
share_ids.append(share_ref['id'])
self.assertRaises(exception.QuotaError,
share.API().create,
self.context, 'cifs', 10, '', '', None)
for share_id in share_ids:
db.share_delete(self.context, share_id)
class FakeContext(object):
def __init__(self, project_id, quota_class):
self.is_admin = False
self.user_id = 'fake_user'
self.project_id = project_id
self.quota_class = quota_class
self.read_deleted = 'no'
def elevated(self):
elevated = self.__class__(self.project_id, self.quota_class)
elevated.is_admin = True
return elevated
class FakeDriver(object):
def __init__(self, by_project=None, by_class=None, reservations=None):
self.called = []
self.by_project = by_project or {}
self.by_class = by_class or {}
self.reservations = reservations or []
def get_by_project(self, context, project_id, resource):
self.called.append(('get_by_project', context, project_id, resource))
try:
return self.by_project[project_id][resource]
except KeyError:
raise exception.ProjectQuotaNotFound(project_id=project_id)
def get_by_class(self, context, quota_class, resource):
self.called.append(('get_by_class', context, quota_class, resource))
try:
return self.by_class[quota_class][resource]
except KeyError:
raise exception.QuotaClassNotFound(class_name=quota_class)
def get_defaults(self, context, resources):
self.called.append(('get_defaults', context, resources))
return resources
def get_class_quotas(self, context, resources, quota_class,
defaults=True):
self.called.append(('get_class_quotas', context, resources,
quota_class, defaults))
return resources
def get_project_quotas(self, context, resources, project_id,
quota_class=None, defaults=True, usages=True,
remains=False):
self.called.append(('get_project_quotas', context, resources,
project_id, quota_class, defaults, usages,
remains))
return resources
def limit_check(self, context, resources, values, project_id=None,
user_id=None):
self.called.append(('limit_check', context, resources,
values, project_id, user_id))
def reserve(self, context, resources, deltas, expire=None,
project_id=None, user_id=None):
self.called.append(('reserve', context, resources, deltas,
expire, project_id, user_id))
return self.reservations
def commit(self, context, reservations, project_id=None, user_id=None):
self.called.append(('commit', context, reservations, project_id,
user_id))
def rollback(self, context, reservations, project_id=None, user_id=None):
self.called.append(('rollback', context, reservations, project_id,
user_id))
def destroy_all_by_project_and_user(self, context, project_id, user_id):
self.called.append(('destroy_all_by_project_and_user', context,
project_id, user_id))
def destroy_all_by_project(self, context, project_id):
self.called.append(('destroy_all_by_project', context, project_id))
def expire(self, context):
self.called.append(('expire', context))
class BaseResourceTestCase(test.TestCase):
def test_no_flag(self):
resource = quota.BaseResource('test_resource')
self.assertEqual('test_resource', resource.name)
self.assertIsNone(resource.flag)
self.assertEqual(-1, resource.default)
def test_with_flag(self):
# We know this flag exists, so use it...
self.flags(quota_shares=10)
resource = quota.BaseResource('test_resource', 'quota_shares')
self.assertEqual('test_resource', resource.name)
self.assertEqual('quota_shares', resource.flag)
self.assertEqual(10, resource.default)
def test_with_flag_no_quota(self):
self.flags(quota_shares=-1)
resource = quota.BaseResource('test_resource', 'quota_shares')
self.assertEqual('test_resource', resource.name)
self.assertEqual('quota_shares', resource.flag)
self.assertEqual(-1, resource.default)
def test_quota_no_project_no_class(self):
self.flags(quota_shares=10)
resource = quota.BaseResource('test_resource', 'quota_shares')
driver = FakeDriver()
context = FakeContext(None, None)
quota_value = resource.quota(driver, context)
self.assertEqual(10, quota_value)
def test_quota_with_project_no_class(self):
self.flags(quota_shares=10)
resource = quota.BaseResource('test_resource', 'quota_shares')
driver = FakeDriver(
by_project=dict(
test_project=dict(test_resource=15), ))
context = FakeContext('test_project', None)
quota_value = resource.quota(driver, context)
self.assertEqual(15, quota_value)
def test_quota_no_project_with_class(self):
self.flags(quota_shares=10)
resource = quota.BaseResource('test_resource', 'quota_shares')
driver = FakeDriver(
by_class=dict(
test_class=dict(test_resource=20), ))
context = FakeContext(None, 'test_class')
quota_value = resource.quota(driver, context)
self.assertEqual(20, quota_value)
def test_quota_with_project_with_class(self):
self.flags(quota_shares=10)
resource = quota.BaseResource('test_resource', 'quota_shares')
driver = FakeDriver(by_project=dict(
test_project=dict(test_resource=15), ),
by_class=dict(test_class=dict(test_resource=20), ))
context = FakeContext('test_project', 'test_class')
quota_value = resource.quota(driver, context)
self.assertEqual(15, quota_value)
def test_quota_override_project_with_class(self):
self.flags(quota_shares=10)
resource = quota.BaseResource('test_resource', 'quota_shares')
driver = FakeDriver(by_project=dict(
test_project=dict(test_resource=15),
override_project=dict(test_resource=20), ))
context = FakeContext('test_project', 'test_class')
quota_value = resource.quota(driver, context,
project_id='override_project')
self.assertEqual(20, quota_value)
def test_quota_with_project_override_class(self):
self.flags(quota_shares=10)
resource = quota.BaseResource('test_resource', 'quota_shares')
driver = FakeDriver(by_class=dict(
test_class=dict(test_resource=15),
override_class=dict(test_resource=20), ))
context = FakeContext('test_project', 'test_class')
quota_value = resource.quota(driver, context,
quota_class='override_class')
self.assertEqual(20, quota_value)
class QuotaEngineTestCase(test.TestCase):
def test_init(self):
quota_obj = quota.QuotaEngine()
self.assertEqual({}, quota_obj._resources)
self.assertIsInstance(quota_obj._driver, quota.DbQuotaDriver)
def test_init_override_string(self):
quota_obj = quota.QuotaEngine(
quota_driver_class='manila.tests.test_quota.FakeDriver')
self.assertEqual({}, quota_obj._resources)
self.assertIsInstance(quota_obj._driver, FakeDriver)
def test_init_override_obj(self):
quota_obj = quota.QuotaEngine(quota_driver_class=FakeDriver)
self.assertEqual({}, quota_obj._resources)
self.assertEqual(FakeDriver, quota_obj._driver)
def test_register_resource(self):
quota_obj = quota.QuotaEngine()
resource = quota.AbsoluteResource('test_resource')
quota_obj.register_resource(resource)
self.assertEqual(dict(test_resource=resource), quota_obj._resources)
def test_register_resources(self):
quota_obj = quota.QuotaEngine()
resources = [
quota.AbsoluteResource('test_resource1'),
quota.AbsoluteResource('test_resource2'),
quota.AbsoluteResource('test_resource3'), ]
quota_obj.register_resources(resources)
self.assertEqual(dict(test_resource1=resources[0],
test_resource2=resources[1],
test_resource3=resources[2], ),
quota_obj._resources)
def test_sync_predeclared(self):
quota_obj = quota.QuotaEngine()
def spam(*args, **kwargs):
pass
resource = quota.ReservableResource('test_resource', spam)
quota_obj.register_resource(resource)
self.assertEqual(spam, resource.sync)
def test_sync_multi(self):
quota_obj = quota.QuotaEngine()
def spam(*args, **kwargs):
pass
resources = [
quota.ReservableResource('test_resource1', spam),
quota.ReservableResource('test_resource2', spam),
quota.ReservableResource('test_resource3', spam),
quota.ReservableResource('test_resource4', spam), ]
quota_obj.register_resources(resources[:2])
self.assertEqual(spam, resources[0].sync)
self.assertEqual(spam, resources[1].sync)
self.assertEqual(spam, resources[2].sync)
self.assertEqual(spam, resources[3].sync)
def test_get_by_project(self):
context = FakeContext('test_project', 'test_class')
driver = FakeDriver(
by_project=dict(
test_project=dict(test_resource=42)))
quota_obj = quota.QuotaEngine(quota_driver_class=driver)
result = quota_obj.get_by_project(context, 'test_project',
'test_resource')
self.assertEqual([('get_by_project',
context,
'test_project',
'test_resource'), ], driver.called)
self.assertEqual(42, result)
def test_get_by_class(self):
context = FakeContext('test_project', 'test_class')
driver = FakeDriver(
by_class=dict(
test_class=dict(test_resource=42)))
quota_obj = quota.QuotaEngine(quota_driver_class=driver)
result = quota_obj.get_by_class(context, 'test_class', 'test_resource')
self.assertEqual([('get_by_class',
context,
'test_class',
'test_resource'), ], driver.called)
self.assertEqual(42, result)
def _make_quota_obj(self, driver):
quota_obj = quota.QuotaEngine(quota_driver_class=driver)
resources = [
quota.AbsoluteResource('test_resource4'),
quota.AbsoluteResource('test_resource3'),
quota.AbsoluteResource('test_resource2'),
quota.AbsoluteResource('test_resource1'), ]
quota_obj.register_resources(resources)
return quota_obj
def test_get_defaults(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
result = quota_obj.get_defaults(context)
self.assertEqual([('get_defaults',
context,
quota_obj._resources), ],
driver.called)
self.assertEqual(quota_obj._resources, result)
def test_get_class_quotas(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
result1 = quota_obj.get_class_quotas(context, 'test_class')
result2 = quota_obj.get_class_quotas(context, 'test_class', False)
self.assertEqual([
('get_class_quotas',
context,
quota_obj._resources,
'test_class', True),
('get_class_quotas',
context, quota_obj._resources,
'test_class', False), ], driver.called)
self.assertEqual(quota_obj._resources, result1)
self.assertEqual(quota_obj._resources, result2)
def test_get_project_quotas(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
result1 = quota_obj.get_project_quotas(context, 'test_project')
result2 = quota_obj.get_project_quotas(context, 'test_project',
quota_class='test_class',
defaults=False,
usages=False)
self.assertEqual([
('get_project_quotas',
context,
quota_obj._resources,
'test_project',
None,
True,
True,
False),
('get_project_quotas',
context,
quota_obj._resources,
'test_project',
'test_class',
False,
False,
False), ],
driver.called)
self.assertEqual(quota_obj._resources, result1)
self.assertEqual(quota_obj._resources, result2)
def test_count_no_resource(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
self.assertRaises(exception.QuotaResourceUnknown,
quota_obj.count, context, 'test_resource5',
True, foo='bar')
def test_count_wrong_resource(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
self.assertRaises(exception.QuotaResourceUnknown,
quota_obj.count, context, 'test_resource1',
True, foo='bar')
def test_count(self):
def fake_count(context, *args, **kwargs):
self.assertEqual((True,), args)
self.assertEqual(dict(foo='bar'), kwargs)
return 5
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
quota_obj.register_resource(quota.CountableResource('test_resource5',
fake_count))
result = quota_obj.count(context, 'test_resource5', True, foo='bar')
self.assertEqual(5, result)
def test_limit_check(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
quota_obj.limit_check(context, test_resource1=4, test_resource2=3,
test_resource3=2, test_resource4=1)
self.assertEqual([
('limit_check',
context,
quota_obj._resources,
dict(
test_resource1=4,
test_resource2=3,
test_resource3=2,
test_resource4=1,),
None, None), ],
driver.called)
def test_reserve(self):
context = FakeContext(None, None)
driver = FakeDriver(reservations=['resv-01',
'resv-02',
'resv-03',
'resv-04', ])
quota_obj = self._make_quota_obj(driver)
result1 = quota_obj.reserve(context, test_resource1=4,
test_resource2=3, test_resource3=2,
test_resource4=1)
result2 = quota_obj.reserve(context, expire=3600,
test_resource1=1, test_resource2=2,
test_resource3=3, test_resource4=4)
result3 = quota_obj.reserve(context, project_id='fake_project',
test_resource1=1, test_resource2=2,
test_resource3=3, test_resource4=4)
self.assertEqual([
('reserve',
context,
quota_obj._resources,
dict(
test_resource1=4,
test_resource2=3,
test_resource3=2,
test_resource4=1, ),
None,
None,
None),
('reserve',
context,
quota_obj._resources,
dict(
test_resource1=1,
test_resource2=2,
test_resource3=3,
test_resource4=4, ),
3600,
None,
None),
('reserve',
context,
quota_obj._resources,
dict(
test_resource1=1,
test_resource2=2,
test_resource3=3,
test_resource4=4, ),
None,
'fake_project', None), ],
driver.called)
self.assertEqual(['resv-01',
'resv-02',
'resv-03',
'resv-04', ], result1)
self.assertEqual(['resv-01',
'resv-02',
'resv-03',
'resv-04', ], result2)
self.assertEqual(['resv-01',
'resv-02',
'resv-03',
'resv-04', ], result3)
def test_commit(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
quota_obj.commit(context, ['resv-01', 'resv-02', 'resv-03'])
self.assertEqual([('commit',
context,
['resv-01',
'resv-02',
'resv-03'],
None, None), ], driver.called)
def test_rollback(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
quota_obj.rollback(context, ['resv-01', 'resv-02', 'resv-03'])
self.assertEqual([('rollback',
context,
['resv-01',
'resv-02',
'resv-03'],
None, None), ], driver.called)
def test_destroy_all_by_project_and_user(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
quota_obj.destroy_all_by_project_and_user(context,
'test_project', 'fake_user')
self.assertEqual([
('destroy_all_by_project_and_user', context, 'test_project',
'fake_user'), ], driver.called)
def test_destroy_all_by_project(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
quota_obj.destroy_all_by_project(context, 'test_project')
self.assertEqual([('destroy_all_by_project',
context,
'test_project'), ], driver.called)
def test_expire(self):
context = FakeContext(None, None)
driver = FakeDriver()
quota_obj = self._make_quota_obj(driver)
quota_obj.expire(context)
self.assertEqual([('expire', context), ], driver.called)
def test_resources(self):
quota_obj = self._make_quota_obj(None)
self.assertEqual(['test_resource1', 'test_resource2',
'test_resource3', 'test_resource4'],
quota_obj.resources)
class DbQuotaDriverTestCase(test.TestCase):
expected_all_context = {
"shares": {"limit": 10, "in_use": 2, "reserved": 0, },
"gigabytes": {"limit": 50, "in_use": 10, "reserved": 0, },
"snapshot_gigabytes": {"limit": 50, "in_use": 20, "reserved": 0, },
"snapshots": {"limit": 10, "in_use": 4, "reserved": 0, },
"share_networks": {"limit": 10, "in_use": 0, "reserved": 0, },
}
def setUp(self):
super(DbQuotaDriverTestCase, self).setUp()
self.flags(
quota_shares=10, quota_snapshots=10, quota_gigabytes=1000,
quota_snapshot_gigabytes=1000, reservation_expire=86400,
until_refresh=0, max_age=0)
self.driver = quota.DbQuotaDriver()
self.calls = []
self.patcher = mock.patch.object(timeutils, 'utcnow')
self.mock_utcnow = self.patcher.start()
self.mock_utcnow.return_value = datetime.datetime.utcnow()
def tearDown(self):
self.patcher.stop()
super(DbQuotaDriverTestCase, self).tearDown()
def test_get_defaults(self):
context = FakeContext('test_project', 'test_class')
# Use our pre-defined resources
result = self.driver.get_defaults(context, quota.QUOTAS._resources)
expected = {
"shares": 10,
"gigabytes": 1000,
"snapshot_gigabytes": 1000,
"snapshots": 10,
"share_networks": 10,
}
self.assertEqual(expected, result)
def _stub_quota_class_get_all_by_name(self):
# Stub out quota_class_get_all_by_name
def fake_qcgabn(context, quota_class):
self.calls.append('quota_class_get_all_by_name')
self.assertEqual('test_class', quota_class)
return dict(gigabytes=500, shares=10, snapshot_gigabytes=50)
self.mock_object(db, 'quota_class_get_all_by_name', fake_qcgabn)
def test_get_class_quotas(self):
self._stub_quota_class_get_all_by_name()
result = self.driver.get_class_quotas(None, quota.QUOTAS._resources,
'test_class')
self.assertEqual(['quota_class_get_all_by_name'], self.calls)
expected = {
"shares": 10,
"gigabytes": 500,
"snapshot_gigabytes": 50,
"snapshots": 10,
"share_networks": 10,
}
self.assertEqual(expected, result)
def test_get_class_quotas_no_defaults(self):
self._stub_quota_class_get_all_by_name()
result = self.driver.get_class_quotas(None, quota.QUOTAS._resources,
'test_class', False)
self.assertEqual(['quota_class_get_all_by_name'], self.calls)
self.assertEqual(
dict(shares=10, gigabytes=500, snapshot_gigabytes=50), result)
def _stub_get_by_project_and_user(self):
def fake_qgabpu(context, project_id, user_id):
self.calls.append('quota_get_all_by_project_and_user')
self.assertEqual('test_project', project_id)
self.assertEqual('fake_user', user_id)
return dict(
shares=10, gigabytes=50, snapshots=10, snapshot_gigabytes=50,
reserved=0)
def fake_qgabp(context, project_id):
self.calls.append('quota_get_all_by_project')
self.assertEqual('test_project', project_id)
return dict(
shares=10, gigabytes=50, snapshots=10, snapshot_gigabytes=50,
reserved=0)
def fake_qugabpu(context, project_id, user_id):
self.calls.append('quota_usage_get_all_by_project_and_user')
self.assertEqual('test_project', project_id)
self.assertEqual('fake_user', user_id)
return dict(
shares=dict(in_use=2, reserved=0),
gigabytes=dict(in_use=10, reserved=0),
snapshots=dict(in_use=4, reserved=0),
snapshot_gigabytes=dict(in_use=20, reserved=0),
)
self.mock_object(db, 'quota_get_all_by_project_and_user', fake_qgabpu)
self.mock_object(db, 'quota_get_all_by_project', fake_qgabp)
self.mock_object(db, 'quota_usage_get_all_by_project_and_user',
fake_qugabpu)
self._stub_quota_class_get_all_by_name()
def test_get_user_quotas(self):
self._stub_get_by_project_and_user()
result = self.driver.get_user_quotas(
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources, 'test_project', 'fake_user')
self.assertEqual([
'quota_get_all_by_project_and_user',
'quota_get_all_by_project',
'quota_usage_get_all_by_project_and_user',
'quota_class_get_all_by_name', ], self.calls)
self.assertEqual(self.expected_all_context, result)
def _stub_get_by_project(self):
def fake_qgabp(context, project_id):
self.calls.append('quota_get_all_by_project')
self.assertEqual('test_project', project_id)
return dict(
shares=10, gigabytes=50, snapshot_gigabytes=50, reserved=0)
def fake_qugabp(context, project_id):
self.calls.append('quota_usage_get_all_by_project')
self.assertEqual('test_project', project_id)
return dict(
shares=dict(in_use=2, reserved=0),
snapshots=dict(in_use=4, reserved=0),
snapshot_gigabytes=dict(in_use=20, reserved=0),
gigabytes=dict(in_use=10, reserved=0))
self.mock_object(db, 'quota_get_all_by_project', fake_qgabp)
self.mock_object(db, 'quota_usage_get_all_by_project', fake_qugabp)
self._stub_quota_class_get_all_by_name()
def test_get_project_quotas(self):
self._stub_get_by_project()
result = self.driver.get_project_quotas(
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources, 'test_project')
self.assertEqual(['quota_get_all_by_project',
'quota_usage_get_all_by_project',
'quota_class_get_all_by_name', ], self.calls)
self.assertEqual(self.expected_all_context, result)
def test_get_project_quotas_with_remains(self):
self._stub_get_by_project()
result = self.driver.get_project_quotas(
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources, 'test_project', remains=True)
for result_key in result:
self.assertIn("remains", result[result_key])
def test_get_user_quotas_alt_context_no_class(self):
self._stub_get_by_project_and_user()
result = self.driver.get_user_quotas(
FakeContext('other_project', None),
quota.QUOTAS._resources, 'test_project', 'fake_user')
self.assertEqual([
'quota_get_all_by_project_and_user',
'quota_get_all_by_project',
'quota_usage_get_all_by_project_and_user', ], self.calls)
self.assertEqual(self.expected_all_context, result)
def test_get_project_quotas_alt_context_no_class(self):
self._stub_get_by_project()
result = self.driver.get_project_quotas(
FakeContext('other_project', None),
quota.QUOTAS._resources, 'test_project')
self.assertEqual(['quota_get_all_by_project',
'quota_usage_get_all_by_project', ], self.calls)
self.assertEqual(self.expected_all_context, result)
def test_get_user_quotas_alt_context_with_class(self):
self._stub_get_by_project_and_user()
result = self.driver.get_user_quotas(
FakeContext('other_project', 'other_class'),
quota.QUOTAS._resources, 'test_project', 'fake_user',
quota_class='test_class')
self.assertEqual([
'quota_get_all_by_project_and_user',
'quota_get_all_by_project',
'quota_usage_get_all_by_project_and_user',
'quota_class_get_all_by_name', ], self.calls)
self.assertEqual(self.expected_all_context, result)
def test_get_project_quotas_alt_context_with_class(self):
self._stub_get_by_project()
result = self.driver.get_project_quotas(
FakeContext('other_project', 'other_class'),
quota.QUOTAS._resources, 'test_project', quota_class='test_class')
self.assertEqual(['quota_get_all_by_project',
'quota_usage_get_all_by_project',
'quota_class_get_all_by_name', ], self.calls)
self.assertEqual(self.expected_all_context, result)
def test_get_user_quotas_no_defaults(self):
self._stub_get_by_project_and_user()
result = self.driver.get_user_quotas(
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources, 'test_project', 'fake_user',
defaults=False)
self.assertEqual([
'quota_get_all_by_project_and_user',
'quota_get_all_by_project',
'quota_usage_get_all_by_project_and_user',
'quota_class_get_all_by_name', ], self.calls)
expected = {
"shares": {"limit": 10, "in_use": 2, "reserved": 0, },
"gigabytes": {"limit": 50, "in_use": 10, "reserved": 0, },
"snapshot_gigabytes": {"limit": 50, "in_use": 20, "reserved": 0, },
"snapshots": {"limit": 10, "in_use": 4, "reserved": 0, },
}
self.assertEqual(expected, result)
def test_get_project_quotas_no_defaults(self):
self._stub_get_by_project()
result = self.driver.get_project_quotas(
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources, 'test_project', defaults=False)
self.assertEqual(['quota_get_all_by_project',
'quota_usage_get_all_by_project',
'quota_class_get_all_by_name', ], self.calls)
expected = {
"shares": {"limit": 10, "in_use": 2, "reserved": 0, },
"gigabytes": {"limit": 50, "in_use": 10, "reserved": 0, },
"snapshot_gigabytes": {"limit": 50, "in_use": 20, "reserved": 0, },
}
self.assertEqual(expected, result)
def test_get_user_quotas_no_usages(self):
self._stub_get_by_project_and_user()
result = self.driver.get_user_quotas(
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources, 'test_project', 'fake_user', usages=False)
self.assertEqual([
'quota_get_all_by_project_and_user',
'quota_get_all_by_project',
'quota_class_get_all_by_name', ], self.calls)
expected = {
"shares": {"limit": 10, },
"gigabytes": {"limit": 50, },
"snapshot_gigabytes": {"limit": 50, },
"snapshots": {"limit": 10, },
"share_networks": {"limit": 10, },
}
self.assertEqual(expected, result, result)
def test_get_project_quotas_no_usages(self):
self._stub_get_by_project()
result = self.driver.get_project_quotas(
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources, 'test_project', usages=False)
self.assertEqual(['quota_get_all_by_project',
'quota_class_get_all_by_name', ], self.calls)
expected = {
"shares": {"limit": 10, },
"gigabytes": {"limit": 50, },
"snapshot_gigabytes": {"limit": 50, },
"snapshots": {"limit": 10, },
"share_networks": {"limit": 10, },
}
self.assertEqual(expected, result)
def _stub_get_settable_quotas(self):
def fake_get_project_quotas(context, resources, project_id,
quota_class=None, defaults=True,
usages=True, remains=False):
self.calls.append('get_project_quotas')
result = {}
for k, v in resources.items():
remains = v.default
in_use = 0
result[k] = {'limit': v.default, 'in_use': in_use,
'reserved': 0, 'remains': remains}
return result
def fake_get_user_quotas(context, resources, project_id, user_id,
quota_class=None, defaults=True,
usages=True):
self.calls.append('get_user_quotas')
result = {}
for k, v in resources.items():
in_use = 0
result[k] = {'limit': v.default,
'in_use': in_use, 'reserved': 0}
return result
def fake_qgabpau(context, project_id, user_id):
self.calls.append('quota_get_all_by_project_and_user')
return {'shares': 2}
self.mock_object(self.driver, 'get_project_quotas',
fake_get_project_quotas)
self.mock_object(self.driver, 'get_user_quotas',
fake_get_user_quotas)
self.mock_object(db, 'quota_get_all_by_project_and_user',
fake_qgabpau)
def test_get_settable_quotas_with_user(self):
self._stub_get_settable_quotas()
result = self.driver.get_settable_quotas(
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources, 'test_project', user_id='test_user')
self.assertEqual([
'get_project_quotas',
'get_user_quotas',
'quota_get_all_by_project_and_user', ], self.calls)
expected = {
"shares": {"minimum": 0, "maximum": 12, },
"gigabytes": {"minimum": 0, "maximum": 1000, },
"snapshot_gigabytes": {"minimum": 0, "maximum": 1000, },
"snapshots": {"minimum": 0, "maximum": 10, },
"share_networks": {"minimum": 0, "maximum": 10, },
}
self.assertEqual(expected, result)
def test_get_settable_quotas_without_user(self):
self._stub_get_settable_quotas()
result = self.driver.get_settable_quotas(
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources, 'test_project')
self.assertEqual(['get_project_quotas', ], self.calls)
expected = {
"shares": {"minimum": 0, "maximum": -1, },
"gigabytes": {"minimum": 0, "maximum": -1, },
"snapshot_gigabytes": {"minimum": 0, "maximum": -1, },
"snapshots": {"minimum": 0, "maximum": -1, },
"share_networks": {"minimum": 0, "maximum": -1, },
}
self.assertEqual(expected, result)
def _stub_get_project_quotas(self):
def fake_get_project_quotas(context, resources, project_id,
quota_class=None, defaults=True,
usages=True):
self.calls.append('get_project_quotas')
return {k: dict(limit=v.default)
for k, v in resources.items()}
self.mock_object(self.driver, 'get_project_quotas',
fake_get_project_quotas)
def test_get_quotas_has_sync_unknown(self):
self._stub_get_project_quotas()
self.assertRaises(exception.QuotaResourceUnknown,
self.driver._get_quotas,
None, quota.QUOTAS._resources,
['unknown'], True)
self.assertEqual([], self.calls)
def test_get_quotas_no_sync_unknown(self):
self._stub_get_project_quotas()
self.assertRaises(exception.QuotaResourceUnknown,
self.driver._get_quotas,
None, quota.QUOTAS._resources,
['unknown'], False)
self.assertEqual([], self.calls)
def test_get_quotas_has_sync_no_sync_resource(self):
self._stub_get_project_quotas()
self.assertRaises(exception.QuotaResourceUnknown,
self.driver._get_quotas,
None, quota.QUOTAS._resources,
['metadata_items'], True)
self.assertEqual([], self.calls)
def test_get_quotas_no_sync_has_sync_resource(self):
self._stub_get_project_quotas()
self.assertRaises(exception.QuotaResourceUnknown,
self.driver._get_quotas,
None, quota.QUOTAS._resources,
['shares'], False)
self.assertEqual([], self.calls)
def test_get_quotas_has_sync(self):
self._stub_get_project_quotas()
result = self.driver._get_quotas(FakeContext('test_project',
'test_class'),
quota.QUOTAS._resources,
['shares', 'gigabytes'],
True)
self.assertEqual(['get_project_quotas'], self.calls)
self.assertEqual(dict(shares=10, gigabytes=1000, ), result)
def _stub_quota_reserve(self):
def fake_quota_reserve(context, resources, quotas, user_quotas,
deltas, expire, until_refresh, max_age,
project_id=None, user_id=None):
self.calls.append(('quota_reserve', expire, until_refresh,
max_age))
return ['resv-1', 'resv-2', 'resv-3']
self.mock_object(db, 'quota_reserve', fake_quota_reserve)
def test_reserve_bad_expire(self):
self._stub_get_project_quotas()
self._stub_quota_reserve()
self.assertRaises(exception.InvalidReservationExpiration,
self.driver.reserve,
FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(shares=2), expire='invalid')
self.assertEqual([], self.calls)
def test_reserve_default_expire(self):
self._stub_get_project_quotas()
self._stub_quota_reserve()
result = self.driver.reserve(FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(shares=2))
expire = timeutils.utcnow() + datetime.timedelta(seconds=86400)
self.assertEqual(['get_project_quotas',
('quota_reserve', expire, 0, 0), ], self.calls)
self.assertEqual(['resv-1', 'resv-2', 'resv-3'], result)
def test_reserve_int_expire(self):
self._stub_get_project_quotas()
self._stub_quota_reserve()
result = self.driver.reserve(FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(shares=2), expire=3600)
expire = timeutils.utcnow() + datetime.timedelta(seconds=3600)
self.assertEqual(['get_project_quotas',
('quota_reserve', expire, 0, 0), ], self.calls)
self.assertEqual(['resv-1', 'resv-2', 'resv-3'], result)
def test_reserve_timedelta_expire(self):
self._stub_get_project_quotas()
self._stub_quota_reserve()
expire_delta = datetime.timedelta(seconds=60)
result = self.driver.reserve(FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(shares=2), expire=expire_delta)
expire = timeutils.utcnow() + expire_delta
self.assertEqual(['get_project_quotas',
('quota_reserve', expire, 0, 0), ], self.calls)
self.assertEqual(['resv-1', 'resv-2', 'resv-3'], result)
def test_reserve_datetime_expire(self):
self._stub_get_project_quotas()
self._stub_quota_reserve()
expire = timeutils.utcnow() + datetime.timedelta(seconds=120)
result = self.driver.reserve(FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(shares=2), expire=expire)
self.assertEqual(['get_project_quotas',
('quota_reserve', expire, 0, 0), ], self.calls)
self.assertEqual(['resv-1', 'resv-2', 'resv-3'], result)
def test_reserve_until_refresh(self):
self._stub_get_project_quotas()
self._stub_quota_reserve()
self.flags(until_refresh=500)
expire = timeutils.utcnow() + datetime.timedelta(seconds=120)
result = self.driver.reserve(FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(shares=2), expire=expire)
self.assertEqual(['get_project_quotas',
('quota_reserve', expire, 500, 0), ], self.calls)
self.assertEqual(['resv-1', 'resv-2', 'resv-3'], result)
def test_reserve_max_age(self):
self._stub_get_project_quotas()
self._stub_quota_reserve()
self.flags(max_age=86400)
expire = timeutils.utcnow() + datetime.timedelta(seconds=120)
result = self.driver.reserve(FakeContext('test_project', 'test_class'),
quota.QUOTAS._resources,
dict(shares=2), expire=expire)
self.assertEqual(['get_project_quotas',
('quota_reserve', expire, 0, 86400), ], self.calls)
self.assertEqual(['resv-1', 'resv-2', 'resv-3'], result)
def _stub_quota_delete_all_by_project(self):
def fake_quota_delete_all_by_project(context, project_id):
self.calls.append(('quota_destroy_all_by_project', project_id))
return None
self.mock_object(sqa_api, 'quota_destroy_all_by_project',
fake_quota_delete_all_by_project)
def test_delete_by_project(self):
self._stub_quota_delete_all_by_project()
self.driver.destroy_all_by_project(FakeContext('test_project',
'test_class'),
'test_project')
self.assertEqual([('quota_destroy_all_by_project',
('test_project')), ], self.calls)
class FakeSession(object):
def begin(self):
return self
def add(self, instance):
pass
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
return False
class FakeUsage(sqa_models.QuotaUsage):
def save(self, *args, **kwargs):
pass
class QuotaReserveSqlAlchemyTestCase(test.TestCase):
# manila.db.sqlalchemy.api.quota_reserve is so complex it needs its
# own test case, and since it's a quota manipulator, this is the
# best place to put it...
def setUp(self):
super(QuotaReserveSqlAlchemyTestCase, self).setUp()
self.sync_called = set()
def make_sync(res_name):
def sync(context, project_id, user_id, session):
self.sync_called.add(res_name)
if res_name in self.usages:
if self.usages[res_name].in_use < 0:
return {res_name: 2}
else:
return {res_name: self.usages[res_name].in_use - 1}
return {res_name: 0}
return sync
self.resources = {}
for res_name in ('shares', 'gigabytes'):
method_name = '_sync_%s' % res_name
sqa_api.QUOTA_SYNC_FUNCTIONS[method_name] = make_sync(res_name)
res = quota.ReservableResource(res_name, '_sync_%s' % res_name)
self.resources[res_name] = res
self.expire = timeutils.utcnow() + datetime.timedelta(seconds=3600)
self.usages = {}
self.usages_created = {}
self.reservations_created = {}
def fake_get_session():
return FakeSession()
def fake_get_project_quota_usages(context, session, project_id):
return self.usages.copy()
def fake_get_user_quota_usages(context, session, project_id, user_id):
return self.usages.copy()
def fake_quota_usage_create(context, project_id, user_id, resource,
in_use, reserved, until_refresh,
session=None, save=True):
quota_usage_ref = self._make_quota_usage(
project_id, user_id, resource, in_use, reserved, until_refresh,
timeutils.utcnow(), timeutils.utcnow())
self.usages_created[resource] = quota_usage_ref
return quota_usage_ref
def fake_reservation_create(context, uuid, usage_id, project_id,
user_id, resource, delta, expire,
session=None):
reservation_ref = self._make_reservation(
uuid, usage_id, project_id, user_id, resource, delta, expire,
timeutils.utcnow(), timeutils.utcnow())
self.reservations_created[resource] = reservation_ref
return reservation_ref
self.mock_object(sqa_api, 'get_session', fake_get_session)
self.mock_object(sqa_api, '_get_project_quota_usages',
fake_get_project_quota_usages)
self.mock_object(sqa_api, '_get_user_quota_usages',
fake_get_user_quota_usages)
self.mock_object(sqa_api, '_quota_usage_create',
fake_quota_usage_create)
self.mock_object(sqa_api, '_reservation_create',
fake_reservation_create)
self.patcher = mock.patch.object(timeutils, 'utcnow')
self.mock_utcnow = self.patcher.start()
self.mock_utcnow.return_value = datetime.datetime.utcnow()
def tearDown(self):
self.patcher.stop()
super(QuotaReserveSqlAlchemyTestCase, self).tearDown()
def _make_quota_usage(self, project_id, user_id, resource, in_use,
reserved, until_refresh, created_at, updated_at):
quota_usage_ref = FakeUsage()
quota_usage_ref.id = len(self.usages) + len(self.usages_created)
quota_usage_ref.project_id = project_id
quota_usage_ref.resource = resource
quota_usage_ref.in_use = in_use
quota_usage_ref.reserved = reserved
quota_usage_ref.until_refresh = until_refresh
quota_usage_ref.created_at = created_at
quota_usage_ref.updated_at = updated_at
quota_usage_ref.deleted_at = None
quota_usage_ref.deleted = False
return quota_usage_ref
def init_usage(self, project_id, user_id, resource, in_use, reserved,
until_refresh=None, created_at=None, updated_at=None):
if created_at is None:
created_at = timeutils.utcnow()
if updated_at is None:
updated_at = timeutils.utcnow()
quota_usage_ref = self._make_quota_usage(project_id, user_id,
resource, in_use,
reserved, until_refresh,
created_at, updated_at)
self.usages[resource] = quota_usage_ref
def compare_usage(self, usage_dict, expected):
for usage in expected:
resource = usage['resource']
for key, value in usage.items():
actual = getattr(usage_dict[resource], key)
self.assertEqual(value, actual,
"%s != %s on usage for resource %s" %
(value, actual, resource))
def _make_reservation(self, uuid, usage_id, project_id, user_id, resource,
delta, expire, created_at, updated_at):
reservation_ref = sqa_models.Reservation()
reservation_ref.id = len(self.reservations_created)
reservation_ref.uuid = uuid
reservation_ref.usage_id = usage_id
reservation_ref.project_id = project_id
reservation_ref.resource = resource
reservation_ref.delta = delta
reservation_ref.expire = expire
reservation_ref.created_at = created_at
reservation_ref.updated_at = updated_at
reservation_ref.deleted_at = None
reservation_ref.deleted = False
return reservation_ref
def compare_reservation(self, reservations, expected):
reservations = set(reservations)
for resv in expected:
resource = resv['resource']
resv_obj = self.reservations_created[resource]
self.assertIn(resv_obj.uuid, reservations)
reservations.discard(resv_obj.uuid)
for key, value in resv.items():
actual = getattr(resv_obj, key)
self.assertEqual(value, actual,
"%s != %s on reservation for resource %s" %
(value, actual, resource))
self.assertEqual(0, len(reservations))
def test_quota_reserve_create_usages(self):
context = FakeContext('test_project', 'test_class')
quotas = dict(shares=5,
gigabytes=10 * 1024, )
deltas = dict(shares=2,
gigabytes=2 * 1024, )
result = sqa_api.quota_reserve(context, self.resources, quotas,
quotas, deltas, self.expire, 0, 0)
self.assertEqual(set(['shares', 'gigabytes']), self.sync_called)
self.compare_usage(self.usages_created,
[dict(resource='shares',
project_id='test_project',
in_use=0,
reserved=2,
until_refresh=None),
dict(resource='gigabytes',
project_id='test_project',
in_use=0,
reserved=2 * 1024,
until_refresh=None), ])
self.compare_reservation(
result,
[dict(resource='shares',
usage_id=self.usages_created['shares'],
project_id='test_project',
delta=2),
dict(resource='gigabytes',
usage_id=self.usages_created['gigabytes'],
delta=2 * 1024), ])
def test_quota_reserve_negative_in_use(self):
self.init_usage('test_project', 'test_user', 'shares', -1, 0,
until_refresh=1)
self.init_usage('test_project', 'test_user', 'gigabytes', -1, 0,
until_refresh=1)
context = FakeContext('test_project', 'test_class')
quotas = dict(shares=5,
gigabytes=10 * 1024, )
deltas = dict(shares=2,
gigabytes=2 * 1024, )
result = sqa_api.quota_reserve(context, self.resources, quotas,
quotas, deltas, self.expire, 5, 0)
self.assertEqual(set(['shares', 'gigabytes']), self.sync_called)
self.compare_usage(self.usages, [dict(resource='shares',
project_id='test_project',
in_use=2,
reserved=2,
until_refresh=5),
dict(resource='gigabytes',
project_id='test_project',
in_use=2,
reserved=2 * 1024,
until_refresh=5), ])
self.assertEqual({}, self.usages_created)
self.compare_reservation(result,
[dict(resource='shares',
usage_id=self.usages['shares'],
project_id='test_project',
delta=2),
dict(resource='gigabytes',
usage_id=self.usages['gigabytes'],
delta=2 * 1024), ])
def test_quota_reserve_until_refresh(self):
self.init_usage('test_project', 'test_user', 'shares', 3, 0,
until_refresh=1)
self.init_usage('test_project', 'test_user', 'gigabytes', 3, 0,
until_refresh=1)
context = FakeContext('test_project', 'test_class')
quotas = dict(shares=5, gigabytes=10 * 1024, )
deltas = dict(shares=2, gigabytes=2 * 1024, )
result = sqa_api.quota_reserve(context, self.resources, quotas,
quotas, deltas, self.expire, 5, 0)
self.assertEqual(set(['shares', 'gigabytes']), self.sync_called)
self.compare_usage(self.usages, [dict(resource='shares',
project_id='test_project',
in_use=2,
reserved=2,
until_refresh=5),
dict(resource='gigabytes',
project_id='test_project',
in_use=2,
reserved=2 * 1024,
until_refresh=5), ])
self.assertEqual({}, self.usages_created)
self.compare_reservation(result,
[dict(resource='shares',
usage_id=self.usages['shares'],
project_id='test_project',
delta=2),
dict(resource='gigabytes',
usage_id=self.usages['gigabytes'],
delta=2 * 1024), ])
def test_quota_reserve_max_age(self):
max_age = 3600
record_created = (timeutils.utcnow() -
datetime.timedelta(seconds=max_age))
self.init_usage('test_project', 'test_user', 'shares', 3, 0,
created_at=record_created, updated_at=record_created)
self.init_usage('test_project', 'test_user', 'gigabytes', 3, 0,
created_at=record_created, updated_at=record_created)
context = FakeContext('test_project', 'test_class')
quotas = dict(shares=5, gigabytes=10 * 1024, )
deltas = dict(shares=2, gigabytes=2 * 1024, )
result = sqa_api.quota_reserve(context, self.resources, quotas,
quotas, deltas, self.expire, 0,
max_age)
self.assertEqual(set(['shares', 'gigabytes']), self.sync_called)
self.compare_usage(self.usages, [dict(resource='shares',
project_id='test_project',
in_use=2,
reserved=2,
until_refresh=None),
dict(resource='gigabytes',
project_id='test_project',
in_use=2,
reserved=2 * 1024,
until_refresh=None), ])
self.assertEqual({}, self.usages_created)
self.compare_reservation(result,
[dict(resource='shares',
usage_id=self.usages['shares'],
project_id='test_project',
delta=2),
dict(resource='gigabytes',
usage_id=self.usages['gigabytes'],
delta=2 * 1024), ])
def test_quota_reserve_no_refresh(self):
self.init_usage('test_project', 'test_user', 'shares', 3, 0)
self.init_usage('test_project', 'test_user', 'gigabytes', 3, 0)
context = FakeContext('test_project', 'test_class')
quotas = dict(shares=5, gigabytes=10 * 1024, )
deltas = dict(shares=2, gigabytes=2 * 1024, )
result = sqa_api.quota_reserve(context, self.resources, quotas,
quotas, deltas, self.expire, 0, 0)
self.assertEqual(set([]), self.sync_called)
self.compare_usage(self.usages, [dict(resource='shares',
project_id='test_project',
in_use=3,
reserved=2,
until_refresh=None),
dict(resource='gigabytes',
project_id='test_project',
in_use=3,
reserved=2 * 1024,
until_refresh=None), ])
self.assertEqual({}, self.usages_created)
self.compare_reservation(result,
[dict(resource='shares',
usage_id=self.usages['shares'],
project_id='test_project',
delta=2),
dict(resource='gigabytes',
usage_id=self.usages['gigabytes'],
delta=2 * 1024), ])
def test_quota_reserve_unders(self):
self.init_usage('test_project', 'test_user', 'shares', 1, 0)
self.init_usage('test_project', 'test_user', 'gigabytes', 1 * 1024, 0)
context = FakeContext('test_project', 'test_class')
quotas = dict(shares=5, gigabytes=10 * 1024, )
deltas = dict(shares=-2, gigabytes=-2 * 1024, )
result = sqa_api.quota_reserve(context, self.resources, quotas,
quotas, deltas, self.expire, 0, 0)
self.assertEqual(set([]), self.sync_called)
self.compare_usage(self.usages, [dict(resource='shares',
project_id='test_project',
in_use=1,
reserved=0,
until_refresh=None),
dict(resource='gigabytes',
project_id='test_project',
in_use=1 * 1024,
reserved=0,
until_refresh=None), ])
self.assertEqual({}, self.usages_created)
self.compare_reservation(result,
[dict(resource='shares',
usage_id=self.usages['shares'],
project_id='test_project',
delta=-2),
dict(resource='gigabytes',
usage_id=self.usages['gigabytes'],
delta=-2 * 1024), ])
def test_quota_reserve_overs(self):
self.init_usage('test_project', 'test_user', 'shares', 4, 0)
self.init_usage('test_project', 'test_user', 'gigabytes', 10 * 1024,
0)
context = FakeContext('test_project', 'test_class')
quotas = dict(shares=5, gigabytes=10 * 1024, )
deltas = dict(shares=2, gigabytes=2 * 1024, )
self.assertRaises(exception.OverQuota,
sqa_api.quota_reserve,
context, self.resources, quotas, quotas,
deltas, self.expire, 0, 0)
self.assertEqual(set([]), self.sync_called)
self.compare_usage(self.usages, [dict(resource='shares',
project_id='test_project',
in_use=4,
reserved=0,
until_refresh=None),
dict(resource='gigabytes',
project_id='test_project',
in_use=10 * 1024,
reserved=0,
until_refresh=None), ])
self.assertEqual({}, self.usages_created)
self.assertEqual({}, self.reservations_created)
def test_quota_reserve_reduction(self):
self.init_usage('test_project', 'test_user', 'shares', 10, 0)
self.init_usage('test_project', 'test_user', 'gigabytes', 20 * 1024,
0)
context = FakeContext('test_project', 'test_class')
quotas = dict(shares=5, gigabytes=10 * 1024, )
deltas = dict(shares=-2, gigabytes=-2 * 1024, )
result = sqa_api.quota_reserve(context, self.resources, quotas,
quotas, deltas, self.expire, 0, 0)
self.assertEqual(set([]), self.sync_called)
self.compare_usage(self.usages, [dict(resource='shares',
project_id='test_project',
in_use=10,
reserved=0,
until_refresh=None),
dict(resource='gigabytes',
project_id='test_project',
in_use=20 * 1024,
reserved=0,
until_refresh=None), ])
self.assertEqual({}, self.usages_created)
self.compare_reservation(result,
[dict(resource='shares',
usage_id=self.usages['shares'],
project_id='test_project',
delta=-2),
dict(resource='gigabytes',
usage_id=self.usages['gigabytes'],
project_id='test_project',
delta=-2 * 1024), ])
|
{
"content_hash": "4e49162e61c58d534c9cab43b2a669eb",
"timestamp": "",
"source": "github",
"line_count": 1523,
"max_line_length": 79,
"avg_line_length": 42.78135259356533,
"alnum_prop": 0.5176499478175456,
"repo_name": "vponomaryov/manila",
"id": "af28fcda0c0eee1b8b02033057072c182cb7bc96",
"size": "65888",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "manila/tests/test_quota.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "953"
},
{
"name": "Python",
"bytes": "9697997"
},
{
"name": "Shell",
"bytes": "103800"
}
],
"symlink_target": ""
}
|
"""Tests for the stats_store classes."""
from grr.lib import aff4
from grr.lib import data_store
from grr.lib import flags
from grr.lib import rdfvalue
from grr.lib import stats
from grr.lib import test_lib
from grr.lib import timeseries
from grr.lib.aff4_objects import stats_store
class StatsStoreTest(test_lib.AFF4ObjectTest):
def setUp(self):
super(StatsStoreTest, self).setUp()
self.process_id = "some_pid"
self.stats_store = aff4.FACTORY.Create(
None, "StatsStore", mode="w", token=self.token)
def testCountersAreWrittenToDataStore(self):
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
row = data_store.DB.ResolvePrefix("aff4:/stats_store/some_pid",
"",
token=self.token)
counter = [x for x in row if x[0] == "aff4:stats_store/counter"]
self.assertTrue(counter)
stored_value = stats_store.StatsStoreValue(
value_type=stats.MetricMetadata.ValueType.INT,
int_value=1)
self.assertEqual(counter[0], ("aff4:stats_store/counter",
stored_value.SerializeToString(),
42))
def testCountersWithFieldsAreWrittenToDataStore(self):
stats.STATS.RegisterCounterMetric("counter", fields=[("source", str)])
stats.STATS.IncrementCounter("counter", fields=["http"])
stats.STATS.IncrementCounter("counter", delta=2, fields=["rpc"])
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
row = data_store.DB.ResolvePrefix("aff4:/stats_store/some_pid",
"",
token=self.token)
# Check that no plain counter is written.
values = [stats_store.StatsStoreValue(x[1]) for x in row
if x[0] == "aff4:stats_store/counter"]
self.assertEqual(len(values), 2)
http_field_value = stats_store.StatsStoreFieldValue(
field_type=stats.MetricFieldDefinition.FieldType.STR,
str_value="http")
rpc_field_value = stats_store.StatsStoreFieldValue(
field_type=stats.MetricFieldDefinition.FieldType.STR,
str_value="rpc")
# Check that counter with source=http is written.
http_counter = [x for x in values
if x.fields_values == [http_field_value]]
self.assertTrue(http_counter)
self.assertEqual(http_counter[0].value_type,
stats.MetricMetadata.ValueType.INT)
self.assertEqual(http_counter[0].int_value, 1)
# Check that counter with source=rpc is written.
rpc_counter = [x for x in values
if x.fields_values == [rpc_field_value]]
self.assertTrue(rpc_counter)
self.assertEqual(rpc_counter[0].value_type,
stats.MetricMetadata.ValueType.INT)
self.assertEqual(rpc_counter[0].int_value, 2)
def testEventMetricsAreWrittenToDataStore(self):
stats.STATS.RegisterEventMetric("foo_event")
stats.STATS.RecordEvent("foo_event", 5)
stats.STATS.RecordEvent("foo_event", 15)
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
row = data_store.DB.ResolvePrefix("aff4:/stats_store/some_pid",
"",
token=self.token)
values = [stats_store.StatsStoreValue(x[1]) for x in row
if x[0] == "aff4:stats_store/foo_event"]
self.assertEqual(len(values), 1)
stored_value = values[0]
self.assertEqual(stored_value.value_type,
stats.MetricMetadata.ValueType.DISTRIBUTION)
self.assertEqual(stored_value.distribution_value.count, 2)
self.assertEqual(stored_value.distribution_value.sum, 20)
def testEventMetricsWithFieldsAreWrittenToDataStore(self):
stats.STATS.RegisterEventMetric("foo_event", fields=[("source", str)])
stats.STATS.RecordEvent("foo_event", 5, fields=["http"])
stats.STATS.RecordEvent("foo_event", 15, fields=["rpc"])
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
row = data_store.DB.ResolvePrefix("aff4:/stats_store/some_pid",
"",
token=self.token)
values = [stats_store.StatsStoreValue(x[1]) for x in row
if x[0] == "aff4:stats_store/foo_event"]
self.assertEqual(len(values), 2)
http_field_value = stats_store.StatsStoreFieldValue(
field_type=stats.MetricFieldDefinition.FieldType.STR,
str_value="http")
rpc_field_value = stats_store.StatsStoreFieldValue(
field_type=stats.MetricFieldDefinition.FieldType.STR,
str_value="rpc")
# Check that distribution with source=http is written.
http_events = [x for x in values
if x.fields_values == [http_field_value]]
self.assertTrue(http_events)
self.assertEqual(http_events[0].value_type,
stats.MetricMetadata.ValueType.DISTRIBUTION)
self.assertEqual(http_events[0].distribution_value.count, 1)
self.assertEqual(http_events[0].distribution_value.sum, 5)
# Check that distribution with source=rpc is written.
rpc_events = [x for x in values
if x.fields_values == [rpc_field_value]]
self.assertTrue(rpc_events)
self.assertEqual(rpc_events[0].value_type,
stats.MetricMetadata.ValueType.DISTRIBUTION)
self.assertEqual(rpc_events[0].distribution_value.count, 1)
self.assertEqual(rpc_events[0].distribution_value.sum, 15)
def testStringGaugeValuesAreWrittenToDataStore(self):
stats.STATS.RegisterGaugeMetric("str_gauge", str)
stats.STATS.SetGaugeValue("str_gauge", "some_value")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
row = data_store.DB.ResolvePrefix("aff4:/stats_store/some_pid",
"",
token=self.token)
counter = [x for x in row if x[0] == "aff4:stats_store/str_gauge"]
self.assertTrue(counter)
stored_value = stats_store.StatsStoreValue(
value_type=stats.MetricMetadata.ValueType.STR,
str_value="some_value")
self.assertEqual(counter[0], ("aff4:stats_store/str_gauge",
stored_value.SerializeToString(),
42))
def testIntGaugeValuesAreWrittenToDataStore(self):
stats.STATS.RegisterGaugeMetric("int_gauge", int)
stats.STATS.SetGaugeValue("int_gauge", 4242)
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
row = data_store.DB.ResolvePrefix("aff4:/stats_store/some_pid",
"",
token=self.token)
counter = [x for x in row if x[0] == "aff4:stats_store/int_gauge"]
self.assertTrue(counter)
stored_value = stats_store.StatsStoreValue(
value_type=stats.MetricMetadata.ValueType.INT,
int_value=4242)
self.assertEqual(counter[0], ("aff4:stats_store/int_gauge",
stored_value.SerializeToString(),
42))
def testLaterValuesDoNotOverridePrevious(self):
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=43,
sync=True)
row = data_store.DB.ResolvePrefix("aff4:/stats_store/some_pid",
"",
token=self.token)
counters = [x for x in row if x[0] == "aff4:stats_store/counter"]
self.assertEqual(len(counters), 2)
counters = sorted(counters, key=lambda x: x[2])
stored_value = stats_store.StatsStoreValue(
value_type=stats.MetricMetadata.ValueType.INT,
int_value=1)
self.assertEqual(counters[0], ("aff4:stats_store/counter",
stored_value.SerializeToString(),
42))
stored_value = stats_store.StatsStoreValue(
value_type=stats.MetricMetadata.ValueType.INT,
int_value=2)
self.assertEqual(counters[1], ("aff4:stats_store/counter",
stored_value.SerializeToString(),
43))
def testValuesAreFetchedCorrectly(self):
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.RegisterGaugeMetric("int_gauge", int)
stats.STATS.SetGaugeValue("int_gauge", 4242)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=43,
sync=True)
stats_history = self.stats_store.ReadStats(
process_id=self.process_id,
timestamp=self.stats_store.ALL_TIMESTAMPS)
self.assertEqual(stats_history["counter"], [(1, 42), (2, 43)])
self.assertEqual(stats_history["int_gauge"], [(4242, 42), (4242, 43)])
def testFetchedValuesCanBeLimitedByTimeRange(self):
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.RegisterGaugeMetric("int_gauge", int)
stats.STATS.SetGaugeValue("int_gauge", 4242)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=43,
sync=True)
stats_history = self.stats_store.ReadStats(process_id=self.process_id,
timestamp=(0, 42))
self.assertEqual(stats_history["counter"], [(1, 42)])
self.assertEqual(stats_history["int_gauge"], [(4242, 42)])
def testFetchedValuesCanBeLimitedByName(self):
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.RegisterGaugeMetric("int_gauge", int)
stats.STATS.SetGaugeValue("int_gauge", 4242)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=43,
sync=True)
stats_history = self.stats_store.ReadStats(process_id=self.process_id,
metric_name="counter")
self.assertEqual(stats_history["counter"], [(1, 42), (2, 43)])
self.assertTrue("int_gauge" not in stats_history)
def testDeleteStatsInTimeRangeWorksCorrectly(self):
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.RegisterGaugeMetric("int_gauge", int)
stats.STATS.SetGaugeValue("int_gauge", 4242)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id=self.process_id, timestamp=44,
sync=True)
self.stats_store.DeleteStats(process_id=self.process_id, timestamp=(0, 43),
sync=True)
stats_history = self.stats_store.ReadStats(process_id=self.process_id)
self.assertEqual(stats_history["counter"], [(2, 44)])
self.assertEqual(stats_history["int_gauge"], [(4242, 44)])
def testDeleteStatsInTimeRangeWorksCorrectlyWithFields(self):
stats.STATS.RegisterCounterMetric("counter", fields=[("source", str)])
stats.STATS.IncrementCounter("counter", fields=["http"])
self.stats_store.WriteStats(process_id=self.process_id, timestamp=42,
sync=True)
stats.STATS.IncrementCounter("counter", fields=["http"])
stats.STATS.IncrementCounter("counter", fields=["rpc"])
self.stats_store.WriteStats(process_id=self.process_id, timestamp=44,
sync=True)
self.stats_store.DeleteStats(process_id=self.process_id, timestamp=(0, 43),
sync=True)
stats_history = self.stats_store.ReadStats(process_id=self.process_id)
self.assertEqual(stats_history["counter"]["http"], [(2, 44)])
self.assertEqual(stats_history["counter"]["rpc"], [(1, 44)])
def testReturnsListOfAllUsedProcessIds(self):
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.RegisterGaugeMetric("int_gauge", int)
self.stats_store.WriteStats(process_id="pid1", sync=True)
self.stats_store.WriteStats(process_id="pid2", sync=True)
self.assertEqual(sorted(self.stats_store.ListUsedProcessIds()),
["pid1", "pid2"])
def testMultiReadStatsWorksCorrectly(self):
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id="pid1", timestamp=42, sync=True)
self.stats_store.WriteStats(process_id="pid2", timestamp=42, sync=True)
self.stats_store.WriteStats(process_id="pid2", timestamp=43, sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id="pid1", timestamp=43, sync=True)
results = self.stats_store.MultiReadStats()
self.assertEqual(sorted(results.keys()), ["pid1", "pid2"])
self.assertEqual(results["pid1"]["counter"], [(1, 42), (2, 43)])
self.assertEqual(results["pid2"]["counter"], [(1, 42), (1, 43)])
def testMultiReadStatsLimitsResultsByTimeRange(self):
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id="pid1", timestamp=42, sync=True)
self.stats_store.WriteStats(process_id="pid2", timestamp=42, sync=True)
self.stats_store.WriteStats(process_id="pid2", timestamp=44, sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(process_id="pid1", timestamp=44, sync=True)
results = self.stats_store.MultiReadStats(
timestamp=(43, 100))
self.assertEqual(sorted(results.keys()), ["pid1", "pid2"])
self.assertEqual(results["pid1"]["counter"], [(2, 44)])
self.assertEqual(results["pid2"]["counter"], [(1, 44)])
def testReadMetadataReturnsAllUsedMetadata(self):
# Register metrics
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.RegisterCounterMetric("counter_with_fields",
fields=[("source", str)])
stats.STATS.RegisterEventMetric("events")
stats.STATS.RegisterEventMetric("events_with_fields",
fields=[("source", str)])
stats.STATS.RegisterGaugeMetric("str_gauge", str)
stats.STATS.RegisterGaugeMetric("str_gauge_with_fields", str,
fields=[("task", int)])
# Check that there are no metadata for registered metrics.
metadata = self.stats_store.ReadMetadata(process_id=self.process_id)
self.assertFalse("counter" in metadata)
self.assertFalse("counter_with_fields" in metadata)
self.assertFalse("events" in metadata)
self.assertFalse("events_with_fields" in metadata)
self.assertFalse("str_gauge" in metadata)
self.assertFalse("str_gauge_with_fields" in metadata)
# Write stats to the data store. Metadata should be
# written as well.
self.stats_store.WriteStats(process_id=self.process_id,
timestamp=42, sync=True)
# Check that metadata were written into the store.
metadata = self.stats_store.ReadMetadata(process_id=self.process_id)
# Field definitions used in assertions below.
source_field_def = stats.MetricFieldDefinition(
field_name="source",
field_type=stats.MetricFieldDefinition.FieldType.STR)
task_field_def = stats.MetricFieldDefinition(
field_name="task",
field_type=stats.MetricFieldDefinition.FieldType.INT)
self.assertTrue("counter" in metadata)
self.assertEqual(metadata["counter"].varname, "counter")
self.assertEqual(metadata["counter"].metric_type, stats.MetricType.COUNTER)
self.assertEqual(metadata["counter"].value_type,
stats.MetricMetadata.ValueType.INT)
self.assertListEqual(list(metadata["counter"].fields_defs), [])
self.assertTrue("counter_with_fields" in metadata)
self.assertEqual(metadata["counter_with_fields"].varname,
"counter_with_fields")
self.assertEqual(metadata["counter_with_fields"].metric_type,
stats.MetricType.COUNTER)
self.assertEqual(metadata["counter_with_fields"].value_type,
stats.MetricMetadata.ValueType.INT)
self.assertListEqual(list(metadata["counter_with_fields"].fields_defs),
[source_field_def])
self.assertTrue("events" in metadata)
self.assertEqual(metadata["events"].varname, "events")
self.assertEqual(metadata["events"].metric_type, stats.MetricType.EVENT)
self.assertEqual(metadata["events"].value_type,
stats.MetricMetadata.ValueType.DISTRIBUTION)
self.assertListEqual(list(metadata["events"].fields_defs), [])
self.assertTrue("events_with_fields" in metadata)
self.assertEqual(metadata["events_with_fields"].varname,
"events_with_fields")
self.assertEqual(metadata["events_with_fields"].metric_type,
stats.MetricType.EVENT)
self.assertEqual(metadata["events_with_fields"].value_type,
stats.MetricMetadata.ValueType.DISTRIBUTION)
self.assertListEqual(list(metadata["events_with_fields"].fields_defs),
[source_field_def])
self.assertTrue("str_gauge" in metadata)
self.assertEqual(metadata["str_gauge"].varname, "str_gauge")
self.assertEqual(metadata["str_gauge"].metric_type, stats.MetricType.GAUGE)
self.assertEqual(metadata["str_gauge"].value_type,
stats.MetricMetadata.ValueType.STR)
self.assertListEqual(list(metadata["str_gauge"].fields_defs), [])
self.assertTrue("str_gauge_with_fields" in metadata)
self.assertEqual(metadata["str_gauge_with_fields"].varname,
"str_gauge_with_fields")
self.assertEqual(metadata["str_gauge_with_fields"].metric_type,
stats.MetricType.GAUGE)
self.assertEqual(metadata["str_gauge_with_fields"].value_type,
stats.MetricMetadata.ValueType.STR)
self.assertListEqual(list(metadata["str_gauge_with_fields"].fields_defs),
[task_field_def])
def testMultiReadMetadataReturnsAllUsedMetadata(self):
stats.STATS.RegisterCounterMetric("counter")
# Check that there are no metadata for registered metrics.
metadata_by_id = self.stats_store.MultiReadMetadata(
process_ids=["pid1", "pid2"])
self.assertFalse("counter" in metadata_by_id["pid1"])
self.assertFalse("counter" in metadata_by_id["pid2"])
# Write stats to the data store. Metadata should be
# written as well.
self.stats_store.WriteStats(process_id="pid1",
timestamp=42, sync=True)
# Now metadata should be found only for the pid1.
metadata_by_id = self.stats_store.MultiReadMetadata(
process_ids=["pid1", "pid2"])
self.assertTrue("counter" in metadata_by_id["pid1"])
self.assertFalse("counter" in metadata_by_id["pid2"])
# Write stats for the pid2 and check again.
self.stats_store.WriteStats(process_id="pid2",
timestamp=42, sync=True)
metadata_by_id = self.stats_store.MultiReadMetadata(
process_ids=["pid1", "pid2"])
self.assertTrue("counter" in metadata_by_id["pid1"])
self.assertTrue("counter" in metadata_by_id["pid2"])
class StatsStoreDataQueryTest(test_lib.AFF4ObjectTest):
"""Tests for StatsStoreDataQuery class."""
def setUp(self):
super(StatsStoreDataQueryTest, self).setUp()
self.process_id = "some_pid"
self.stats_store = aff4.FACTORY.Create(
None, "StatsStore", mode="w", token=self.token)
def testUsingInCallNarrowsQuerySpace(self):
# Create sample data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.RegisterCounterMetric("counter_with_fields",
fields=[("source", str)])
stats.STATS.IncrementCounter("counter")
stats.STATS.IncrementCounter("counter_with_fields",
fields=["http"])
stats.STATS.IncrementCounter("counter_with_fields",
fields=["rpc"])
# Write to data store.
self.stats_store.WriteStats(process_id=self.process_id,
timestamp=42, sync=True)
# Read them back and apply queries with In() and InAll() calls.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertEqual(query.In("counter").SeriesCount(), 1)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertEqual(query.In("counter_with_fields").InAll().SeriesCount(), 2)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertEqual(query.In("counter_with_fields").In("http").SeriesCount(),
1)
def testInCallAcceptsRegularExpressions(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id="pid1",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(0),
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id="pid1",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(90),
sync=True)
self.stats_store.WriteStats(
process_id="pid2",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(90),
sync=True)
stats_data = self.stats_store.MultiReadStats(process_ids=["pid1", "pid2"])
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertEqual(query.In("pid1").In("counter").SeriesCount(), 1)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertEqual(query.In("pid2").In("counter").SeriesCount(), 1)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertEqual(query.In("pid.*").In("counter").SeriesCount(), 2)
def testInTimeRangeLimitsQueriesByTime(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(42),
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(100),
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(140),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
# Check that InTimeRange works as expected.
query = stats_store.StatsStoreDataQuery(stats_data)
ts = query.In("counter").TakeValue().InTimeRange(
rdfvalue.RDFDatetime().FromSecondsFromEpoch(80),
rdfvalue.RDFDatetime().FromSecondsFromEpoch(120)).ts
self.assertListEqual(ts.data, [[2, 100 * 1e6]])
def testInTimeRangeRaisesIfAppliedBeforeTakeMethod(self):
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertRaises(RuntimeError, query.In("counter").InTimeRange,
rdfvalue.RDFDatetime().FromSecondsFromEpoch(80),
rdfvalue.RDFDatetime().FromSecondsFromEpoch(120))
def testTakeValueUsesPlainValuesToBuildTimeSeries(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(42),
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(100),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
# Get time series generated with TakeValue().
query = stats_store.StatsStoreDataQuery(stats_data)
ts = query.In("counter").TakeValue().ts
self.assertListEqual(ts.data, [[1, 42 * 1e6], [2, 100 * 1e6]])
def testTakeValueRaisesIfDistributionIsEncountered(self):
# Initialize and write test data.
stats.STATS.RegisterEventMetric("events")
stats.STATS.RecordEvent("events", 42)
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(42),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertRaises(ValueError, query.In("events").TakeValue)
def testTakeDistributionCountUsesDistributionCountsToBuildTimeSeries(self):
# Initialize and write test data.
stats.STATS.RegisterEventMetric("events")
stats.STATS.RecordEvent("events", 42)
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(42),
sync=True)
stats.STATS.RecordEvent("events", 43)
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(100),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
query = stats_store.StatsStoreDataQuery(stats_data)
ts = query.In("events").TakeDistributionCount().ts
self.assertListEqual(ts.data, [[1, 42 * 1e6], [2, 100 * 1e6]])
def testTakeDistributionCountRaisesIfPlainValueIsEncountered(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(42),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertRaises(ValueError, query.In("counter").TakeDistributionCount)
def testTakeDistributionSumUsesDistributionSumsToBuildTimeSeries(self):
# Initialize and write test data.
stats.STATS.RegisterEventMetric("events")
stats.STATS.RecordEvent("events", 42)
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(42),
sync=True)
stats.STATS.RecordEvent("events", 43)
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(100),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
query = stats_store.StatsStoreDataQuery(stats_data)
ts = query.In("events").TakeDistributionSum().ts
self.assertListEqual(ts.data, [[42, 42 * 1e6], [85, 100 * 1e6]])
def testTakeDistributionSumRaisesIfPlainValueIsEncountered(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(42),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertRaises(ValueError, query.In("counter").TakeDistributionSum)
def testNormalize(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(0),
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(15),
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(45),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
query = stats_store.StatsStoreDataQuery(stats_data)
ts = query.In("counter").TakeValue().Normalize(
rdfvalue.Duration("30s"), 0, rdfvalue.Duration("1m")).ts
self.assertListEqual(ts.data, [[1.5, 0 * 1e6], [3.0, 30 * 1e6]])
def testNormalizeFillsGapsInTimeSeries(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(0),
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(120),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
query = stats_store.StatsStoreDataQuery(stats_data)
ts = query.In("counter").TakeValue().Normalize(
rdfvalue.Duration("30s"), 0, rdfvalue.Duration("130s")).ts
self.assertListEqual(ts.data, [[1.0, 0], [None, 30 * 1e6], [None, 60 * 1e6],
[None, 90 * 1e6], [2.0, 120 * 1e6]])
def testNormalizeRaisesIfAppliedBeforeTakeMethod(self):
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertRaises(RuntimeError, query.In("counter").Normalize, 15, 0, 60)
def testAggregateViaSumAggregatesMultipleTimeSeriesIntoOne(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id="pid1",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(0),
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id="pid2",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(0),
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id="pid1",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(90),
sync=True)
self.stats_store.WriteStats(
process_id="pid2",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(90),
sync=True)
stats_data = self.stats_store.MultiReadStats(process_ids=["pid1", "pid2"])
query = stats_store.StatsStoreDataQuery(stats_data)
ts = query.In("pid.*").In("counter").TakeValue().Normalize(
rdfvalue.Duration("30s"),
0,
rdfvalue.Duration("2m"),
mode=timeseries.NORMALIZE_MODE_COUNTER).AggregateViaSum().ts
# We expect 2 time series in the query:
# 1970-01-01 00:00:00 1
# 1970-01-01 00:00:30 1
# 1970-01-01 00:01:00 1
# 1970-01-01 00:01:30 3
#
# and:
# 1970-01-01 00:00:00 2
# 1970-01-01 00:00:30 2
# 1970-01-01 00:01:00 2
# 1970-01-01 00:01:30 3
#
# Therefore we expect the sum to look like:
# 1970-01-01 00:00:00 3
# 1970-01-01 00:00:30 3
# 1970-01-01 00:01:00 3
# 1970-01-01 00:01:30 6
self.assertAlmostEqual(ts.data[0][0], 3)
self.assertAlmostEqual(ts.data[1][0], 3)
self.assertAlmostEqual(ts.data[2][0], 3)
self.assertAlmostEqual(ts.data[3][0], 6)
self.assertListEqual([t for _, t in ts.data],
[0.0 * 1e6, 30.0 * 1e6, 60.0 * 1e6, 90.0 * 1e6])
def testMakeIncreasingHandlesValuesResets(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
self.stats_store.WriteStats(
process_id="pid1",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(0),
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id="pid1",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(30),
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id="pid1",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(60),
sync=True)
stats.STATS.RegisterCounterMetric("counter")
self.stats_store.WriteStats(
process_id="pid1",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(90),
sync=True)
# We've reset the counter on 60th second, so we get following time series:
# 1970-01-01 00:00:00 0
# 1970-01-01 00:00:30 1
# 1970-01-01 00:01:00 2
# 1970-01-01 00:01:30 0
stats_data = self.stats_store.ReadStats(process_id="pid1")
query = stats_store.StatsStoreDataQuery(stats_data)
ts = query.In("counter").TakeValue().ts
self.assertAlmostEqual(ts.data[0][0], 0)
self.assertAlmostEqual(ts.data[1][0], 1)
self.assertAlmostEqual(ts.data[2][0], 2)
self.assertAlmostEqual(ts.data[3][0], 0)
# EnsureIsIncremental detects the reset and increments values that follow
# the reset point:
# 1970-01-01 00:00:00 0
# 1970-01-01 00:00:30 1
# 1970-01-01 00:01:00 2
# 1970-01-01 00:01:30 2
ts = query.MakeIncreasing().ts
self.assertAlmostEqual(ts.data[0][0], 0)
self.assertAlmostEqual(ts.data[1][0], 1)
self.assertAlmostEqual(ts.data[2][0], 2)
self.assertAlmostEqual(ts.data[3][0], 2)
def testSeriesCountReturnsNumberOfDataSeriesInCurrentQuery(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id="pid1",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(0),
sync=True)
self.stats_store.WriteStats(
process_id="pid2",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(90),
sync=True)
stats_data = self.stats_store.MultiReadStats(process_ids=["pid1", "pid2"])
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertEqual(query.In("pid.*").SeriesCount(), 2)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertEqual(query.In("pid1").In("counter").SeriesCount(), 1)
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertEqual(query.In("pid.*").In("counter").SeriesCount(), 2)
def testRate(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
for i in range(5):
for _ in range(i):
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(10 * i),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
# Get time series generated with TakeValue().
query = stats_store.StatsStoreDataQuery(stats_data)
ts = query.In("counter").TakeValue().Normalize(
rdfvalue.Duration("10s"), 0, rdfvalue.Duration("50s")).Rate().ts
# We expect following time serie:
# 1970-01-01 00:00:00 0
# 1970-01-01 00:00:10 1
# 1970-01-01 00:00:20 3
# 1970-01-01 00:00:30 6
# 1970-01-01 00:00:40 10
#
# Therefore we expect the following after applying Rate():
# 1970-01-01 00:00:00 0
# 1970-01-01 00:00:10 0.1
# 1970-01-01 00:00:20 0.2
# 1970-01-01 00:00:30 0.3
# 1970-01-01 00:00:40 0.4
self.assertListEqual(ts.data, [[0.1, 0], [0.2, 10 * 1e6],
[0.30000000000000004, 20 * 1e6], [0.4, 30 *
1e6]])
def testScaleAppliesScaleFunctionToSingleTimeSerie(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(42),
sync=True)
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(100),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
# Get time series generated with TakeValue().
query = stats_store.StatsStoreDataQuery(stats_data)
ts = query.In("counter").TakeValue().Scale(3).ts
self.assertListEqual(ts.data, [[3, 42 * 1e6], [6, 100 * 1e6]])
def testMeanReturnsZeroIfQueryHasNoTimeSeries(self):
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
# Get time series generated with TakeValue().
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertEqual(query.In("counter").TakeValue().Mean(), 0)
def testMeanRaisesIfCalledOnMultipleTimeSeries(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id="pid1",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(0),
sync=True)
self.stats_store.WriteStats(
process_id="pid2",
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(90),
sync=True)
stats_data = self.stats_store.MultiReadStats(process_ids=["pid1", "pid2"])
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertRaises(RuntimeError,
query.In("pid.*").In("counter").TakeValue().Mean)
def testMeanReducesTimeSerieToSingleNumber(self):
# Initialize and write test data.
stats.STATS.RegisterCounterMetric("counter")
for i in range(5):
stats.STATS.IncrementCounter("counter")
self.stats_store.WriteStats(
process_id=self.process_id,
timestamp=rdfvalue.RDFDatetime().FromSecondsFromEpoch(10 * i),
sync=True)
# Read data back.
stats_data = self.stats_store.ReadStats(process_id=self.process_id)
# Get time series generated with TakeValue().
query = stats_store.StatsStoreDataQuery(stats_data)
self.assertAlmostEqual(query.In("counter").TakeValue().Mean(), 3)
def main(argv):
test_lib.main(argv)
if __name__ == "__main__":
flags.StartMain(main)
|
{
"content_hash": "ae528de6fcf05faa8d61d1c0044d464a",
"timestamp": "",
"source": "github",
"line_count": 1013,
"max_line_length": 80,
"avg_line_length": 39.454096742349456,
"alnum_prop": 0.6563164610803913,
"repo_name": "pombredanne/grr",
"id": "79e940786bd31fc8a4475a030c50a847a8383d6a",
"size": "39989",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/aff4_objects/stats_store_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "227"
},
{
"name": "Batchfile",
"bytes": "15671"
},
{
"name": "C",
"bytes": "10598"
},
{
"name": "C++",
"bytes": "304580"
},
{
"name": "CMake",
"bytes": "3228"
},
{
"name": "CSS",
"bytes": "13093"
},
{
"name": "Groff",
"bytes": "444"
},
{
"name": "HTML",
"bytes": "103301"
},
{
"name": "JavaScript",
"bytes": "224916"
},
{
"name": "Makefile",
"bytes": "4304"
},
{
"name": "Protocol Buffer",
"bytes": "219063"
},
{
"name": "Python",
"bytes": "5356619"
},
{
"name": "Ruby",
"bytes": "5103"
},
{
"name": "Shell",
"bytes": "48368"
},
{
"name": "Standard ML",
"bytes": "8172"
}
],
"symlink_target": ""
}
|
"""Implementing communication to MySQL servers
"""
import sys
import socket
import logging
import os
import weakref
from collections import deque
import constants
import conversion
import protocol
import errors
import utils
import cursor
logger = logging.getLogger('myconnpy')
class MySQLBaseSocket(object):
"""Base class for MySQL Connections subclasses.
Should not be used directly but overloaded, changing the
open_connection part. Examples of subclasses are
MySQLTCPSocket
MySQLUnixSocket
"""
def __init__(self):
self.sock = None # holds the socket connection
self.connection_timeout = None
self.buffer = deque()
self.recvsize = 1024*8
def open_connection(self):
pass
def close_connection(self):
try:
self.sock.close()
except:
pass
def get_address(self):
pass
def send(self, buf):
"""Send packets over the socket
"""
pktlen = len(buf)
try:
while pktlen:
pktlen -= self.sock.send(buf)
except Exception, e:
raise errors.OperationalError('%s' % e)
def recv(self):
"""Receive packets from the socket
"""
try:
return self.buffer.popleft()
except IndexError:
pass
pktnr = -1
try:
buf = self.sock.recv(self.recvsize)
while buf:
totalsize = len(buf)
if pktnr == -1 and totalsize > 4:
pktsize = utils.intread(buf[0:3])
pktnr = utils.intread(buf[3])
if pktnr > -1 and totalsize >= pktsize+4:
size = pktsize+4
self.buffer.append(buf[0:size])
buf = buf[size:]
pktnr = -1
if len(buf) == 0:
break
elif len(buf) < pktsize+4:
buf += self.sock.recv(self.recvsize)
except socket.timeout, e:
raise errors.InterfaceError(errno=2013)
except socket.error, e:
raise errors.InterfaceError(errno=2055,
values=dict(socketaddr=self.get_address(),errno=e.errno))
except:
raise
try:
return self.buffer.popleft()
except IndexError, e:
pass
def set_connection_timeout(self, timeout):
self.connection_timeout = timeout
class MySQLUnixSocket(MySQLBaseSocket):
"""Opens a connection through the UNIX socket of the MySQL Server."""
def __init__(self, unix_socket='/tmp/mysql.sock'):
MySQLBaseSocket.__init__(self)
self.unix_socket = unix_socket
def get_address(self):
return self.unix_socket
def open_connection(self):
"""Opens a UNIX socket and checks the MySQL handshake."""
try:
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.sock.settimeout(self.connection_timeout)
self.sock.connect(self.unix_socket)
except socket.error, e:
try:
m = e.errno
except:
m = e
raise errors.InterfaceError(errno=2002,
values=dict(socketaddr=self.get_address(),errno=m))
except StandardError, e:
raise errors.InterfaceError('%s' % e)
class MySQLTCPSocket(MySQLBaseSocket):
"""Opens a TCP connection to the MySQL Server."""
def __init__(self, host='127.0.0.1', port=3306):
MySQLBaseSocket.__init__(self)
self.server_host = host
self.server_port = port
def get_address(self):
return "%s:%s" % (self.server_host,self.server_port)
def open_connection(self):
"""Opens a TCP Connection and checks the MySQL handshake."""
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(self.connection_timeout)
self.sock.connect( (self.server_host, self.server_port) )
except socket.error, e:
try:
m = e.errno
except:
m = e
raise errors.InterfaceError(errno=2003,
values=dict(socketaddr=self.get_address(),errno=m))
except StandardError, e:
raise errors.InterfaceError('%s' % e)
except:
raise
class MySQLConnection(object):
"""MySQL"""
def __init__(self, *args, **kwargs):
"""Initializing"""
self.conn = None # Holding the connection
self.protocol = None
self.converter = None
self.cursors = []
self.client_flags = constants.ClientFlag.get_default()
self._charset = 33
self._username = ''
self._database = ''
self._server_host = '127.0.0.1'
self._server_port = 3306
self._unix_socket = None
self.client_host = ''
self.client_port = 0
self.affected_rows = 0
self.server_status = 0
self.warning_count = 0
self.field_count = 0
self.insert_id = 0
self.info_msg = ''
self.use_unicode = True
self.get_warnings = False
self.raise_on_warnings = False
self.connection_timeout = None
self.buffered = False
self.unread_result = False
self.raw = False
if len(kwargs) > 0:
self.connect(*args, **kwargs)
def connect(self, database=None, user='', password='',
host='127.0.0.1', port=3306, unix_socket=None,
use_unicode=True, charset='utf8', collation=None,
autocommit=False,
time_zone=None, sql_mode=None,
get_warnings=False, raise_on_warnings=False,
connection_timeout=None, client_flags=0,
buffered=False, raw=False,
passwd=None, db=None, connect_timeout=None, dsn=None):
if db and not database:
database = db
if passwd and not password:
password = passwd
if connect_timeout and not connection_timeout:
connection_timeout = connect_timeout
if dsn is not None:
errors.NotSupportedError("Data source name is not supported")
self._server_host = host
self._server_port = port
self._unix_socket = unix_socket
if database is not None:
self._database = database.strip()
else:
self._database = None
self._username = user
self.set_warnings(get_warnings,raise_on_warnings)
self.connection_timeout = connection_timeout
self.buffered = buffered
self.raw = raw
self.use_unicode = use_unicode
self.set_client_flags(client_flags)
self._charset = constants.CharacterSet.get_charset_info(charset)[0]
if user or password:
self.set_login(user, password)
self.disconnect()
self._open_connection(username=user, password=password, database=database,
client_flags=self.client_flags, charset=charset)
self._post_connection(time_zone=time_zone, sql_mode=sql_mode,
collation=collation)
def _get_connection(self, prtcls=None):
"""Get connection based on configuration
This method will return the appropriated connection object using
the connection parameters.
Returns subclass of MySQLBaseSocket.
"""
conn = None
if self.unix_socket and os.name != 'nt':
conn = MySQLUnixSocket(unix_socket=self.unix_socket)
else:
conn = MySQLTCPSocket(host=self.server_host,
port=self.server_port)
conn.set_connection_timeout(self.connection_timeout)
return conn
def _open_connection(self, username=None, password=None, database=None,
client_flags=None, charset=None):
"""Opens the connection
Open the connection, check the MySQL version, and set the
protocol.
"""
try:
self.protocol = protocol.MySQLProtocol(self._get_connection())
self.protocol.do_handshake()
version = self.protocol.server_version
if version < (4,1):
raise errors.InterfaceError(
"MySQL Version %s is not supported." % version)
self.protocol.do_auth(username, password, database, client_flags,
self._charset)
(self._charset, self.charset_name, c) = \
constants.CharacterSet.get_charset_info(charset)
except:
raise
def _post_connection(self, time_zone=None, autocommit=False,
sql_mode=None, collation=None):
"""Post connection session setup
Should be called after a connection was established"""
self.set_converter_class(conversion.MySQLConverter)
try:
if collation is not None:
self.collation = collation
self.autocommit = autocommit
if time_zone is not None:
self.time_zone = time_zone
if sql_mode is not None:
self.sql_mode = sql_mode
except:
raise
def is_connected(self):
"""
Check whether we are connected to the MySQL server.
"""
return self.protocol.cmd_ping()
ping = is_connected
def disconnect(self):
"""
Disconnect from the MySQL server.
"""
if not self.protocol:
return
if self.protocol.conn.sock is not None:
self.protocol.cmd_quit()
try:
self.protocol.conn.close_connection()
except:
pass
self.protocol = None
def set_converter_class(self, convclass):
"""
Set the converter class to be used. This should be a class overloading
methods and members of conversion.MySQLConverter.
"""
self.converter_class = convclass
self.converter = convclass(self.charset_name, self.use_unicode)
def get_server_version(self):
"""Returns the server version as a tuple"""
try:
return self.protocol.server_version
except:
pass
return None
def get_server_info(self):
"""Returns the server version as a string"""
return self.protocol.server_version_original
@property
def connection_id(self):
"""MySQL connection ID"""
threadid = None
try:
threadid = self.protocol.server_threadid
except:
pass
return threadid
def set_login(self, username=None, password=None):
"""Set login information for MySQL
Set the username and/or password for the user connecting to
the MySQL Server.
"""
if username is not None:
self.username = username.strip()
else:
self.username = ''
if password is not None:
self.password = password.strip()
else:
self.password = ''
def set_unicode(self, value=True):
"""Toggle unicode mode
Set whether we return string fields as unicode or not.
Default is True.
"""
self.use_unicode = value
if self.converter:
self.converter.set_unicode(value)
def set_charset(self, charset):
try:
(idx, charset_name, c) = \
constants.CharacterSet.get_charset_info(charset)
self._execute_query("SET NAMES '%s'" % charset_name)
except:
raise
else:
self._charset = idx
self.charset_name = charset_name
self.converter.set_charset(charset_name)
def get_charset(self):
return self._info_query(
"SELECT @@session.character_set_connection")[0]
charset = property(get_charset, set_charset,
doc="Character set for this connection")
def set_collation(self, collation):
try:
self._execute_query(
"SET @@session.collation_connection = '%s'" % collation)
except:
raise
def get_collation(self):
return self._info_query(
"SELECT @@session.collation_connection")[0]
collation = property(get_collation, set_collation,
doc="Collation for this connection")
def set_warnings(self, fetch=False, raise_on_warnings=False):
"""Set how to handle warnings coming from MySQL
Set wheter we should get warnings whenever an operation produced some.
If you set raise_on_warnings to True, any warning will be raised
as a DataError exception.
"""
if raise_on_warnings is True:
self.get_warnings = True
self.raise_on_warnings = True
else:
self.get_warnings = fetch
self.raise_on_warnings = False
def set_client_flags(self, flags):
"""Set the client flags
The flags-argument can be either an int or a list (or tuple) of
ClientFlag-values. If it is an integer, it will set client_flags
to flags.
If flags is a list (or tuple), each flag will be set or unset
when it's negative.
set_client_flags([ClientFlag.FOUND_ROWS,-ClientFlag.LONG_FLAG])
Returns self.client_flags
"""
if isinstance(flags,int) and flags > 0:
self.set_client_flags(flags)
else:
if isinstance(flags,(tuple,list)):
for f in flags:
if f < 0:
self.unset_client_flag(abs(f))
else:
self.set_client_flag(f)
return self.client_flags
def set_client_flag(self, flag):
if flag > 0:
self.client_flags |= flag
def unset_client_flag(self, flag):
if flag > 0:
self.client_flags &= ~flag
def isset_client_flag(self, flag):
if (self.client_flags & flag) > 0:
return True
return False
@property
def user(self):
"""User used while connecting to MySQL"""
return self._username
@property
def server_host(self):
"""MySQL server IP address or name"""
return self._server_host
@property
def server_port(self):
"MySQL server TCP/IP port"
return self._server_port
@property
def unix_socket(self):
"MySQL Unix socket file location"
return self._unix_socket
def set_database(self, value):
try:
self.protocol.cmd_query("USE %s" % value)
except:
raise
def get_database(self):
"""Get the current database"""
return self._info_query("SELECT DATABASE()")[0]
database = property(get_database, set_database,
doc="Current database")
def set_time_zone(self, value):
try:
self.protocol.cmd_query("SET @@session.time_zone = %s" % value)
except:
raise
def get_time_zone(self):
return self._info_query("SELECT @@session.time_zone")[0]
time_zone = property(get_time_zone, set_time_zone,
doc="time_zone value for current MySQL session")
def set_sql_mode(self, value):
try:
self.protocol.cmd_query("SET @@session.sql_mode = %s" % value)
except:
raise
def get_sql_mode(self):
return self._info_query("SELECT @@session.sql_mode")[0]
sql_mode = property(get_sql_mode, set_sql_mode,
doc="sql_mode value for current MySQL session")
def set_autocommit(self, value):
try:
if value:
s = 'ON'
else:
s = 'OFF'
self._execute_query("SET @@session.autocommit = %s" % s)
except:
raise
def get_autocommit(self):
value = self._info_query("SELECT @@session.autocommit")[0]
if value == 1:
return True
return False
autocommit = property(get_autocommit, set_autocommit,
doc="autocommit value for current MySQL session")
def close(self):
del self.cursors[:]
self.disconnect()
def remove_cursor(self, c):
try:
self.cursors.remove(c)
except ValueError:
raise errors.ProgrammingError(
"Cursor could not be removed.")
def cursor(self, buffered=None, raw=None, cursor_class=None):
"""Instantiates and returns a cursor
By default, MySQLCursor is returned. Depending on the options
while connecting, a buffered and/or raw cursor instantiated
instead.
It is possible to also give a custom cursor through the
cursor_class paramter, but it needs to be a subclass of
mysql.connector.cursor.CursorBase.
Returns a cursor-object
"""
if cursor_class is not None:
if not issubclass(cursor_class, cursor.CursorBase):
raise errors.ProgrammingError(
"Cursor class needs be subclass of cursor.CursorBase")
c = (cursor_class)(self)
else:
buffered = buffered or self.buffered
raw = raw or self.raw
t = 0
if buffered is True:
t |= 1
if raw is True:
t |= 2
types = {
0 : cursor.MySQLCursor,
1 : cursor.MySQLCursorBuffered,
2 : cursor.MySQLCursorRaw,
3 : cursor.MySQLCursorBufferedRaw,
}
c = (types[t])(self)
if c not in self.cursors:
self.cursors.append(c)
return c
def commit(self):
"""Commit current transaction"""
self._execute_query("COMMIT")
def rollback(self):
"""Rollback current transaction"""
self._execute_query("ROLLBACK")
def _execute_query(self, query):
if self.unread_result is True:
raise errors.InternalError("Unread result found.")
self.protocol.cmd_query(query)
def _info_query(self, query):
try:
cur = self.cursor(buffered=True)
cur.execute(query)
row = cur.fetchone()
cur.close()
except:
raise
return row
|
{
"content_hash": "17c83bfb08863cb19bfa18dd78e0c293",
"timestamp": "",
"source": "github",
"line_count": 589,
"max_line_length": 82,
"avg_line_length": 31.39049235993209,
"alnum_prop": 0.5621721023311158,
"repo_name": "ekristen/mythboxee",
"id": "a00dc409a4cac735880c57715397ff7711805f20",
"size": "19603",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "mysql/connector/connection.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "683837"
}
],
"symlink_target": ""
}
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Gloss.tjspeculate'
db.delete_column('dictionary_gloss', 'tjspeculate')
def backwards(self, orm):
# Adding field 'Gloss.tjspeculate'
db.add_column('dictionary_gloss', 'tjspeculate',
self.gf('django.db.models.fields.TextField')(null=True, blank=True),
keep_default=False)
models = {
'dictionary.definition': {
'Meta': {'ordering': "['gloss']", 'object_name': 'Definition'},
'count': ('django.db.models.fields.IntegerField', [], {}),
'gloss': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['dictionary.Gloss']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'text': ('django.db.models.fields.TextField', [], {})
},
'dictionary.dialect': {
'Meta': {'ordering': "['language', 'name']", 'object_name': 'Dialect'},
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['dictionary.Language']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
'dictionary.gloss': {
'BookProb': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'InMainBook': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'InSuppBook': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'Meta': {'ordering': "['idgloss']", 'object_name': 'Gloss'},
'NotBkDBOnly': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'Palm_orientation': ('django.db.models.fields.CharField', [], {'max_length': '10', 'blank': 'True'}),
'SpecialCore': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'StemSN': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'annotation_idgloss': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'aslgloss': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'asloantf': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'asltf': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'blend': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'blendtf': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'bslgloss': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'bslloantf': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'bsltf': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'comp': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'compound': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'comptf': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'dialect': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['dictionary.Dialect']", 'symmetrical': 'False'}),
'domhndsh': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'final_domhndsh': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'final_loc': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'final_subhndsh': ('django.db.models.fields.CharField', [], {'max_length': '5', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'idgloss': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'inCD': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'inWeb': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'inittext': ('django.db.models.fields.CharField', [], {'max_length': "'50'", 'blank': 'True'}),
'inittf': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'isNew': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'language': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['dictionary.Language']", 'symmetrical': 'False'}),
'locprim': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'locsecond': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'morph': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'queries': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'sedefinetf': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'segloss': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'sense': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'sn': ('django.db.models.fields.IntegerField', [], {'unique': 'True', 'null': 'True', 'blank': 'True'}),
'subhndsh': ('django.db.models.fields.CharField', [], {'max_length': '5', 'null': 'True', 'blank': 'True'})
},
'dictionary.keyword': {
'Meta': {'ordering': "['text']", 'object_name': 'Keyword'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'})
},
'dictionary.language': {
'Meta': {'ordering': "['name']", 'object_name': 'Language'},
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
'dictionary.relation': {
'Meta': {'ordering': "['source']", 'object_name': 'Relation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'relation_sources'", 'to': "orm['dictionary.Gloss']"}),
'target': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'relation_targets'", 'to': "orm['dictionary.Gloss']"})
},
'dictionary.translation': {
'Meta': {'ordering': "['gloss', 'index']", 'object_name': 'Translation'},
'gloss': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['dictionary.Gloss']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'index': ('django.db.models.fields.IntegerField', [], {}),
'translation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['dictionary.Keyword']"})
}
}
complete_apps = ['dictionary']
|
{
"content_hash": "a6eb79d75e7ad404715e416900372e2d",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 146,
"avg_line_length": 74.14814814814815,
"alnum_prop": 0.5462037962037962,
"repo_name": "Signbank/BSL-signbank",
"id": "a005b5d0a4ffc69f0d1792818b43a9345b02ab08",
"size": "8032",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "signbank/dictionary/migrations/0027_auto__del_field_gloss_tjspeculate.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "1C Enterprise",
"bytes": "1846"
},
{
"name": "CSS",
"bytes": "480831"
},
{
"name": "HTML",
"bytes": "244006"
},
{
"name": "JavaScript",
"bytes": "1011248"
},
{
"name": "PHP",
"bytes": "1052"
},
{
"name": "Python",
"bytes": "986206"
}
],
"symlink_target": ""
}
|
import diventi.accounts.models
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('accounts', '0134_auto_20190512_1601'),
]
operations = [
migrations.AlterModelManagers(
name='diventiuser',
managers=[
('objects', diventi.accounts.models.DiventiUserManager()),
],
),
]
|
{
"content_hash": "a5ed5b916611c8de57173a8cfdfb993c",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 74,
"avg_line_length": 22.38888888888889,
"alnum_prop": 0.5831265508684863,
"repo_name": "flavoi/diventi",
"id": "97f68ca4de9144ffb94ad74c3f266c9f5e9a0d03",
"size": "452",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "diventi/accounts/migrations/0135_auto_20190512_1602.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "385265"
},
{
"name": "Procfile",
"bytes": "46"
},
{
"name": "Python",
"bytes": "826530"
}
],
"symlink_target": ""
}
|
import logging
import classad
from ornithology import *
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
@action
def submitJobInputFailureAP(default_condor):
return default_condor.submit(
{
"log": "job_ap_input.log",
"executable": "/bin/sleep",
"arguments": "0",
"transfer_executable": "false",
"should_transfer_files": "yes",
"transfer_input_files": "not_there_in",
}
)
@action
def jobInputFailureAP(submitJobInputFailureAP):
assert submitJobInputFailureAP.wait(condition=ClusterState.all_held,timeout=60)
return submitJobInputFailureAP.query()[0]
@action
def submitJobOutputFailureAP(default_condor):
return default_condor.submit(
{
"log": "job_ap_output.log",
"executable": "/bin/sleep",
"arguments": "0",
"transfer_executable": "false",
"should_transfer_files": "yes",
"transfer_input_files": "/bin/true",
"transfer_output_files": "true",
"transfer_output_remaps": classad.quote("true=/not_there_dir/blah"),
}
)
@action
def jobOutputFailureAP(submitJobOutputFailureAP):
assert submitJobOutputFailureAP.wait(condition=ClusterState.all_held,timeout=60)
return submitJobOutputFailureAP.query()[0]
@action
def submitJobInputFailureEP(default_condor):
return default_condor.submit(
{
"log": "job_ep_input.log",
"executable": "/bin/sleep",
"arguments": "0",
"transfer_executable": "false",
"should_transfer_files": "yes",
"transfer_input_files": "http://neversslxxx.com/index.html",
}
)
@action
def jobInputFailureEP(submitJobInputFailureEP):
assert submitJobInputFailureEP.wait(condition=ClusterState.all_held,timeout=60)
return submitJobInputFailureEP.query()[0]
@action
def submitJobOutputFailureEP(default_condor):
return default_condor.submit(
{
"log": "job_ep_output.log",
"executable": "/bin/sleep",
"arguments": "0",
"transfer_executable": "false",
"should_transfer_files": "yes",
"transfer_output_files": "not_there_out",
}
)
@action
def jobOutputFailureEP(submitJobOutputFailureEP):
assert submitJobOutputFailureEP.wait(condition=ClusterState.all_held,timeout=60)
return submitJobOutputFailureEP.query()[0]
class TestXferHoldCodes:
def test_submit_all(submitJobInputFailureAP, submitJobOutputFailureAP, submitJobInputFailureEP, submitJobOutputFailureEP):
assert True
def test_jobInputFailureAP(self, jobInputFailureAP):
assert jobInputFailureAP["HoldReasonCode"] == 13
assert "Transfer input files failure at access point" in jobInputFailureAP["HoldReason"]
def test_jobOutputFailureAP(self, jobOutputFailureAP):
assert jobOutputFailureAP["HoldReasonCode"] == 12
assert "Transfer output files failure at access point" in jobOutputFailureAP["HoldReason"]
def test_jobInputFailureEP(self, jobInputFailureEP):
assert jobInputFailureEP["HoldReasonCode"] == 13
assert "Transfer input files failure at execution point" in jobInputFailureEP["HoldReason"]
def test_jobOutputFailureEP(self, jobOutputFailureEP):
assert jobOutputFailureEP["HoldReasonCode"] == 12
assert "Transfer output files failure at execution point" in jobOutputFailureEP["HoldReason"]
|
{
"content_hash": "095b834578940281aeb92c7c33570876",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 125,
"avg_line_length": 33.786407766990294,
"alnum_prop": 0.6755747126436782,
"repo_name": "htcondor/htcondor",
"id": "8a4e4bbb09492ab146a31efc59767d931f9a6b3d",
"size": "3503",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "src/condor_tests/test_xfer_hold_codes.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Ada",
"bytes": "71055"
},
{
"name": "Awk",
"bytes": "9454"
},
{
"name": "Batchfile",
"bytes": "146264"
},
{
"name": "C",
"bytes": "1651049"
},
{
"name": "C++",
"bytes": "31790435"
},
{
"name": "CMake",
"bytes": "468527"
},
{
"name": "CSS",
"bytes": "9738"
},
{
"name": "Dockerfile",
"bytes": "75955"
},
{
"name": "Fortran",
"bytes": "1279"
},
{
"name": "HTML",
"bytes": "59724"
},
{
"name": "Java",
"bytes": "43977"
},
{
"name": "JavaScript",
"bytes": "130293"
},
{
"name": "M4",
"bytes": "20440"
},
{
"name": "Makefile",
"bytes": "68811"
},
{
"name": "Perl",
"bytes": "3761627"
},
{
"name": "PowerShell",
"bytes": "5412"
},
{
"name": "Python",
"bytes": "1593654"
},
{
"name": "Roff",
"bytes": "2353"
},
{
"name": "Shell",
"bytes": "579393"
},
{
"name": "VBScript",
"bytes": "8734"
},
{
"name": "Yacc",
"bytes": "13532"
}
],
"symlink_target": ""
}
|
import os
# contents of: remotecmd.py
def simple(arg):
return arg + 1
def listdir(path):
return os.listdir(path)
if __name__ == "__channelexec__":
for item in channel:
channel.send(eval(item))
|
{
"content_hash": "b207dde856e2dd7a182c621b023dd6ab",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 33,
"avg_line_length": 15.571428571428571,
"alnum_prop": 0.6238532110091743,
"repo_name": "alfredodeza/execnet",
"id": "5d3fd99594f7b9009a1c7d5f6b273877bab2d8ba",
"size": "242",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "doc/example/remotecmd.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "322"
},
{
"name": "Python",
"bytes": "201446"
}
],
"symlink_target": ""
}
|
"""Zookeeper Serializers, Deserializers, and NamedTuple objects"""
from collections import namedtuple
import struct
import six
from kazoo.exceptions import EXCEPTIONS
from kazoo.protocol.states import ZnodeStat
from kazoo.security import ACL
from kazoo.security import Id
# Struct objects with formats compiled
bool_struct = struct.Struct('B')
int_struct = struct.Struct('!i')
int_int_struct = struct.Struct('!ii')
int_int_long_struct = struct.Struct('!iiq')
int_long_int_long_struct = struct.Struct('!iqiq')
long_struct = struct.Struct('!q')
multiheader_struct = struct.Struct('!iBi')
reply_header_struct = struct.Struct('!iqi')
stat_struct = struct.Struct('!qqqqiiiqiiq')
def read_string(buffer, offset):
"""Reads an int specified buffer into a string and returns the
string and the new offset in the buffer"""
length = int_struct.unpack_from(buffer, offset)[0]
offset += int_struct.size
if length < 0:
return None, offset
else:
index = offset
offset += length
return buffer[index:index + length].decode('utf-8'), offset
def read_acl(bytes, offset):
perms = int_struct.unpack_from(bytes, offset)[0]
offset += int_struct.size
scheme, offset = read_string(bytes, offset)
id, offset = read_string(bytes, offset)
return ACL(perms, Id(scheme, id)), offset
def write_string(bytes):
if not bytes:
return int_struct.pack(-1)
else:
utf8_str = bytes.encode('utf-8')
return int_struct.pack(len(utf8_str)) + utf8_str
def write_buffer(bytes):
if bytes is None:
return int_struct.pack(-1)
else:
return int_struct.pack(len(bytes)) + bytes
def read_buffer(bytes, offset):
length = int_struct.unpack_from(bytes, offset)[0]
offset += int_struct.size
if length < 0:
return None, offset
else:
index = offset
offset += length
return bytes[index:index + length], offset
class Close(namedtuple('Close', '')):
type = -11
@classmethod
def serialize(cls):
return b''
CloseInstance = Close()
class Ping(namedtuple('Ping', '')):
type = 11
@classmethod
def serialize(cls):
return b''
PingInstance = Ping()
class Connect(namedtuple('Connect', 'protocol_version last_zxid_seen'
' time_out session_id passwd read_only')):
type = None
def serialize(self):
b = bytearray()
b.extend(int_long_int_long_struct.pack(
self.protocol_version, self.last_zxid_seen, self.time_out,
self.session_id))
b.extend(write_buffer(self.passwd))
b.extend([1 if self.read_only else 0])
return b
@classmethod
def deserialize(cls, bytes, offset):
proto_version, timeout, session_id = int_int_long_struct.unpack_from(
bytes, offset)
offset += int_int_long_struct.size
password, offset = read_buffer(bytes, offset)
try:
read_only = bool_struct.unpack_from(bytes, offset)[0] == 1
offset += bool_struct.size
except struct.error:
read_only = False
return cls(proto_version, 0, timeout, session_id, password,
read_only), offset
class Create(namedtuple('Create', 'path data acl flags')):
type = 1
def serialize(self):
b = bytearray()
b.extend(write_string(self.path))
b.extend(write_buffer(self.data))
b.extend(int_struct.pack(len(self.acl)))
for acl in self.acl:
b.extend(int_struct.pack(acl.perms) +
write_string(acl.id.scheme) + write_string(acl.id.id))
b.extend(int_struct.pack(self.flags))
return b
@classmethod
def deserialize(cls, bytes, offset):
return read_string(bytes, offset)[0]
class Delete(namedtuple('Delete', 'path version')):
type = 2
def serialize(self):
b = bytearray()
b.extend(write_string(self.path))
b.extend(int_struct.pack(self.version))
return b
@classmethod
def deserialize(self, bytes, offset):
return True
class Exists(namedtuple('Exists', 'path watcher')):
type = 3
def serialize(self):
b = bytearray()
b.extend(write_string(self.path))
b.extend([1 if self.watcher else 0])
return b
@classmethod
def deserialize(cls, bytes, offset):
stat = ZnodeStat._make(stat_struct.unpack_from(bytes, offset))
return stat if stat.czxid != -1 else None
class GetData(namedtuple('GetData', 'path watcher')):
type = 4
def serialize(self):
b = bytearray()
b.extend(write_string(self.path))
b.extend([1 if self.watcher else 0])
return b
@classmethod
def deserialize(cls, bytes, offset):
data, offset = read_buffer(bytes, offset)
stat = ZnodeStat._make(stat_struct.unpack_from(bytes, offset))
return data, stat
class SetData(namedtuple('SetData', 'path data version')):
type = 5
def serialize(self):
b = bytearray()
b.extend(write_string(self.path))
b.extend(write_buffer(self.data))
b.extend(int_struct.pack(self.version))
return b
@classmethod
def deserialize(cls, bytes, offset):
return ZnodeStat._make(stat_struct.unpack_from(bytes, offset))
class GetACL(namedtuple('GetACL', 'path')):
type = 6
def serialize(self):
return bytearray(write_string(self.path))
@classmethod
def deserialize(cls, bytes, offset):
count = int_struct.unpack_from(bytes, offset)[0]
offset += int_struct.size
if count == -1: # pragma: nocover
return []
acls = []
for c in range(count):
acl, offset = read_acl(bytes, offset)
acls.append(acl)
stat = ZnodeStat._make(stat_struct.unpack_from(bytes, offset))
return acls, stat
class SetACL(namedtuple('SetACL', 'path acls version')):
type = 7
def serialize(self):
b = bytearray()
b.extend(write_string(self.path))
b.extend(int_struct.pack(len(self.acls)))
for acl in self.acls:
b.extend(int_struct.pack(acl.perms) +
write_string(acl.id.scheme) + write_string(acl.id.id))
b.extend(int_struct.pack(self.version))
return b
@classmethod
def deserialize(cls, bytes, offset):
return ZnodeStat._make(stat_struct.unpack_from(bytes, offset))
class GetChildren(namedtuple('GetChildren', 'path watcher')):
type = 8
def serialize(self):
b = bytearray()
b.extend(write_string(self.path))
b.extend([1 if self.watcher else 0])
return b
@classmethod
def deserialize(cls, bytes, offset):
count = int_struct.unpack_from(bytes, offset)[0]
offset += int_struct.size
if count == -1: # pragma: nocover
return []
children = []
for c in range(count):
child, offset = read_string(bytes, offset)
children.append(child)
return children
class Sync(namedtuple('Sync', 'path')):
type = 9
def serialize(self):
return write_string(self.path)
@classmethod
def deserialize(cls, buffer, offset):
return read_string(buffer, offset)[0]
class GetChildren2(namedtuple('GetChildren2', 'path watcher')):
type = 12
def serialize(self):
b = bytearray()
b.extend(write_string(self.path))
b.extend([1 if self.watcher else 0])
return b
@classmethod
def deserialize(cls, bytes, offset):
count = int_struct.unpack_from(bytes, offset)[0]
offset += int_struct.size
if count == -1: # pragma: nocover
return []
children = []
for c in range(count):
child, offset = read_string(bytes, offset)
children.append(child)
stat = ZnodeStat._make(stat_struct.unpack_from(bytes, offset))
return children, stat
class CheckVersion(namedtuple('CheckVersion', 'path version')):
type = 13
def serialize(self):
b = bytearray()
b.extend(write_string(self.path))
b.extend(int_struct.pack(self.version))
return b
class Transaction(namedtuple('Transaction', 'operations')):
type = 14
def serialize(self):
b = bytearray()
for op in self.operations:
b.extend(MultiHeader(op.type, False, -1).serialize() +
op.serialize())
return b + multiheader_struct.pack(-1, True, -1)
@classmethod
def deserialize(cls, bytes, offset):
header = MultiHeader(None, False, None)
results = []
response = None
while not header.done:
if header.type == Create.type:
response, offset = read_string(bytes, offset)
elif header.type == Delete.type:
response = True
elif header.type == SetData.type:
response = ZnodeStat._make(
stat_struct.unpack_from(bytes, offset))
offset += stat_struct.size
elif header.type == CheckVersion.type:
response = True
elif header.type == -1:
err = int_struct.unpack_from(bytes, offset)[0]
offset += int_struct.size
response = EXCEPTIONS[err]()
if response:
results.append(response)
header, offset = MultiHeader.deserialize(bytes, offset)
return results
@staticmethod
def unchroot(client, response):
resp = []
for result in response:
if isinstance(result, six.string_types):
resp.append(client.unchroot(result))
else:
resp.append(result)
return resp
class Create2(namedtuple('Create2', 'path data acl flags')):
type = 15
def serialize(self):
b = bytearray()
b.extend(write_string(self.path))
b.extend(write_buffer(self.data))
b.extend(int_struct.pack(len(self.acl)))
for acl in self.acl:
b.extend(int_struct.pack(acl.perms) +
write_string(acl.id.scheme) + write_string(acl.id.id))
b.extend(int_struct.pack(self.flags))
return b
@classmethod
def deserialize(cls, bytes, offset):
path, offset = read_string(bytes, offset)
stat = ZnodeStat._make(stat_struct.unpack_from(bytes, offset))
return path, stat
class Reconfig(namedtuple('Reconfig',
'joining leaving new_members config_id')):
type = 16
def serialize(self):
b = bytearray()
b.extend(write_string(self.joining))
b.extend(write_string(self.leaving))
b.extend(write_string(self.new_members))
b.extend(long_struct.pack(self.config_id))
return b
@classmethod
def deserialize(cls, bytes, offset):
data, offset = read_buffer(bytes, offset)
stat = ZnodeStat._make(stat_struct.unpack_from(bytes, offset))
return data, stat
class Auth(namedtuple('Auth', 'auth_type scheme auth')):
type = 100
def serialize(self):
return (int_struct.pack(self.auth_type) + write_string(self.scheme) +
write_string(self.auth))
class SASL(namedtuple('SASL', 'challenge')):
type = 102
def serialize(self):
b = bytearray()
b.extend(write_buffer(self.challenge))
return b
@classmethod
def deserialize(cls, bytes, offset):
challenge, offset = read_buffer(bytes, offset)
return challenge, offset
class Watch(namedtuple('Watch', 'type state path')):
@classmethod
def deserialize(cls, bytes, offset):
"""Given bytes and the current bytes offset, return the
type, state, path, and new offset"""
type, state = int_int_struct.unpack_from(bytes, offset)
offset += int_int_struct.size
path, offset = read_string(bytes, offset)
return cls(type, state, path), offset
class ReplyHeader(namedtuple('ReplyHeader', 'xid, zxid, err')):
@classmethod
def deserialize(cls, bytes, offset):
"""Given bytes and the current bytes offset, return a
:class:`ReplyHeader` instance and the new offset"""
new_offset = offset + reply_header_struct.size
return cls._make(
reply_header_struct.unpack_from(bytes, offset)), new_offset
class MultiHeader(namedtuple('MultiHeader', 'type done err')):
def serialize(self):
b = bytearray()
b.extend(int_struct.pack(self.type))
b.extend([1 if self.done else 0])
b.extend(int_struct.pack(self.err))
return b
@classmethod
def deserialize(cls, bytes, offset):
t, done, err = multiheader_struct.unpack_from(bytes, offset)
offset += multiheader_struct.size
return cls(t, done == 1, err), offset
|
{
"content_hash": "970c83abe5053aca7bf74f5f86796232",
"timestamp": "",
"source": "github",
"line_count": 449,
"max_line_length": 77,
"avg_line_length": 28.870824053452115,
"alnum_prop": 0.6043354161845252,
"repo_name": "kawamon/hue",
"id": "80fa4d1048205e07ecb034dbd1d5e62272d17c61",
"size": "12963",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "desktop/core/ext-py/kazoo-2.8.0/kazoo/protocol/serialization.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ABAP",
"bytes": "962"
},
{
"name": "ActionScript",
"bytes": "1133"
},
{
"name": "Ada",
"bytes": "99"
},
{
"name": "Assembly",
"bytes": "5786"
},
{
"name": "AutoHotkey",
"bytes": "720"
},
{
"name": "Batchfile",
"bytes": "118907"
},
{
"name": "C",
"bytes": "3196521"
},
{
"name": "C#",
"bytes": "83"
},
{
"name": "C++",
"bytes": "308860"
},
{
"name": "COBOL",
"bytes": "4"
},
{
"name": "CSS",
"bytes": "1050129"
},
{
"name": "Cirru",
"bytes": "520"
},
{
"name": "Clojure",
"bytes": "794"
},
{
"name": "CoffeeScript",
"bytes": "403"
},
{
"name": "ColdFusion",
"bytes": "86"
},
{
"name": "Common Lisp",
"bytes": "632"
},
{
"name": "D",
"bytes": "324"
},
{
"name": "Dart",
"bytes": "489"
},
{
"name": "Dockerfile",
"bytes": "10981"
},
{
"name": "Eiffel",
"bytes": "375"
},
{
"name": "Elixir",
"bytes": "692"
},
{
"name": "Elm",
"bytes": "487"
},
{
"name": "Emacs Lisp",
"bytes": "411907"
},
{
"name": "Erlang",
"bytes": "487"
},
{
"name": "Forth",
"bytes": "979"
},
{
"name": "FreeMarker",
"bytes": "1017"
},
{
"name": "G-code",
"bytes": "521"
},
{
"name": "GLSL",
"bytes": "512"
},
{
"name": "Genshi",
"bytes": "946"
},
{
"name": "Gherkin",
"bytes": "699"
},
{
"name": "Go",
"bytes": "7312"
},
{
"name": "Groovy",
"bytes": "1080"
},
{
"name": "HTML",
"bytes": "24999718"
},
{
"name": "Haskell",
"bytes": "512"
},
{
"name": "Haxe",
"bytes": "447"
},
{
"name": "HiveQL",
"bytes": "43"
},
{
"name": "Io",
"bytes": "140"
},
{
"name": "JSONiq",
"bytes": "4"
},
{
"name": "Java",
"bytes": "471854"
},
{
"name": "JavaScript",
"bytes": "28075556"
},
{
"name": "Julia",
"bytes": "210"
},
{
"name": "Jupyter Notebook",
"bytes": "73168"
},
{
"name": "LSL",
"bytes": "2080"
},
{
"name": "Lean",
"bytes": "213"
},
{
"name": "Lex",
"bytes": "264449"
},
{
"name": "Liquid",
"bytes": "1883"
},
{
"name": "LiveScript",
"bytes": "5747"
},
{
"name": "Lua",
"bytes": "78382"
},
{
"name": "M4",
"bytes": "1377"
},
{
"name": "MATLAB",
"bytes": "203"
},
{
"name": "Makefile",
"bytes": "269655"
},
{
"name": "Mako",
"bytes": "3614942"
},
{
"name": "Mask",
"bytes": "597"
},
{
"name": "Myghty",
"bytes": "936"
},
{
"name": "Nix",
"bytes": "2212"
},
{
"name": "OCaml",
"bytes": "539"
},
{
"name": "Objective-C",
"bytes": "2672"
},
{
"name": "OpenSCAD",
"bytes": "333"
},
{
"name": "PHP",
"bytes": "662"
},
{
"name": "PLSQL",
"bytes": "31565"
},
{
"name": "PLpgSQL",
"bytes": "6006"
},
{
"name": "Pascal",
"bytes": "1412"
},
{
"name": "Perl",
"bytes": "4327"
},
{
"name": "PigLatin",
"bytes": "371"
},
{
"name": "PowerShell",
"bytes": "3204"
},
{
"name": "Python",
"bytes": "76440000"
},
{
"name": "R",
"bytes": "2445"
},
{
"name": "Roff",
"bytes": "95764"
},
{
"name": "Ruby",
"bytes": "1098"
},
{
"name": "Rust",
"bytes": "495"
},
{
"name": "Scala",
"bytes": "1541"
},
{
"name": "Scheme",
"bytes": "559"
},
{
"name": "Shell",
"bytes": "190718"
},
{
"name": "Smarty",
"bytes": "130"
},
{
"name": "TSQL",
"bytes": "10013"
},
{
"name": "Tcl",
"bytes": "899"
},
{
"name": "TeX",
"bytes": "165743"
},
{
"name": "Thrift",
"bytes": "317058"
},
{
"name": "TypeScript",
"bytes": "1607"
},
{
"name": "VBA",
"bytes": "2884"
},
{
"name": "VBScript",
"bytes": "938"
},
{
"name": "VHDL",
"bytes": "830"
},
{
"name": "Vala",
"bytes": "485"
},
{
"name": "Verilog",
"bytes": "274"
},
{
"name": "Vim Snippet",
"bytes": "226931"
},
{
"name": "XQuery",
"bytes": "114"
},
{
"name": "XSLT",
"bytes": "521413"
},
{
"name": "Yacc",
"bytes": "2133855"
}
],
"symlink_target": ""
}
|
"""empty message
Revision ID: 35099fc974d2
Revises: ae00e7974dca
Create Date: 2017-05-19 17:01:48.878196
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '35099fc974d2'
down_revision = 'ae00e7974dca'
branch_labels = None
depends_on = None
def upgrade():
op.drop_column('node', 'wipe_root_disk_next_boot')
op.drop_column('node', 'root_partition_size_sectors')
op.drop_column('node', 'root_disk')
def downgrade():
op.add_column('node', sa.Column('root_disk', sa.VARCHAR(length=80), autoincrement=False, nullable=False))
op.add_column('node', sa.Column('root_partition_size_sectors', sa.INTEGER(), autoincrement=False, nullable=True))
op.add_column('node', sa.Column('wipe_root_disk_next_boot', sa.BOOLEAN(), autoincrement=False, nullable=False))
|
{
"content_hash": "ff4e91c6d6b68e49ababc26054412ea7",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 117,
"avg_line_length": 29.642857142857142,
"alnum_prop": 0.7192771084337349,
"repo_name": "nailgun/seedbox",
"id": "652f515638fabbdc29fa03743668cd7f3bcdf458",
"size": "830",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/seedbox/migrations/versions/35099fc974d2_.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "13334"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "131526"
},
{
"name": "Shell",
"bytes": "747"
}
],
"symlink_target": ""
}
|
from openpyxl.descriptors.serialisable import Serialisable
from openpyxl.descriptors import (
Typed,
Float,
Set,
Alias
)
from openpyxl.descriptors.excel import ExtensionList
from openpyxl.descriptors.nested import (
NestedNoneSet,
NestedSet,
NestedBool,
NestedFloat,
)
from .data_source import NumDataSource
from .shapes import GraphicalProperties
class ErrorBars(Serialisable):
tagname = "errBars"
errDir = NestedNoneSet(values=(['x', 'y']))
direction = Alias("errDir")
errBarType = NestedSet(values=(['both', 'minus', 'plus']))
style = Alias("errBarType")
errValType = NestedSet(values=(['cust', 'fixedVal', 'percentage', 'stdDev', 'stdErr']))
size = Alias("errValType")
noEndCap = NestedBool(nested=True, allow_none=True)
plus = Typed(expected_type=NumDataSource, allow_none=True)
minus = Typed(expected_type=NumDataSource, allow_none=True)
val = NestedFloat(allow_none=True)
spPr = Typed(expected_type=GraphicalProperties, allow_none=True)
graphicalProperties = Alias("spPr")
extLst = Typed(expected_type=ExtensionList, allow_none=True)
__elements__ = ('errDir','errBarType', 'errValType', 'noEndCap','minus', 'plus', 'val', 'spPr')
def __init__(self,
errDir=None,
errBarType="both",
errValType="fixedVal",
noEndCap=None,
plus=None,
minus=None,
val=None,
spPr=None,
extLst=None,
):
self.errDir = errDir
self.errBarType = errBarType
self.errValType = errValType
self.noEndCap = noEndCap
self.plus = plus
self.minus = minus
self.val = val
self.spPr = spPr
|
{
"content_hash": "c13b98c7dd34da7d629c5a90074a6593",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 99,
"avg_line_length": 29.933333333333334,
"alnum_prop": 0.6158129175946548,
"repo_name": "kawamon/hue",
"id": "942509f197fa5baceb84d0c49c08024ad58ced4d",
"size": "1832",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "desktop/core/ext-py/openpyxl-2.6.4/openpyxl/chart/error_bar.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ABAP",
"bytes": "962"
},
{
"name": "ActionScript",
"bytes": "1133"
},
{
"name": "Ada",
"bytes": "99"
},
{
"name": "Assembly",
"bytes": "5786"
},
{
"name": "AutoHotkey",
"bytes": "720"
},
{
"name": "Batchfile",
"bytes": "118907"
},
{
"name": "C",
"bytes": "3196521"
},
{
"name": "C#",
"bytes": "83"
},
{
"name": "C++",
"bytes": "308860"
},
{
"name": "COBOL",
"bytes": "4"
},
{
"name": "CSS",
"bytes": "1050129"
},
{
"name": "Cirru",
"bytes": "520"
},
{
"name": "Clojure",
"bytes": "794"
},
{
"name": "CoffeeScript",
"bytes": "403"
},
{
"name": "ColdFusion",
"bytes": "86"
},
{
"name": "Common Lisp",
"bytes": "632"
},
{
"name": "D",
"bytes": "324"
},
{
"name": "Dart",
"bytes": "489"
},
{
"name": "Dockerfile",
"bytes": "10981"
},
{
"name": "Eiffel",
"bytes": "375"
},
{
"name": "Elixir",
"bytes": "692"
},
{
"name": "Elm",
"bytes": "487"
},
{
"name": "Emacs Lisp",
"bytes": "411907"
},
{
"name": "Erlang",
"bytes": "487"
},
{
"name": "Forth",
"bytes": "979"
},
{
"name": "FreeMarker",
"bytes": "1017"
},
{
"name": "G-code",
"bytes": "521"
},
{
"name": "GLSL",
"bytes": "512"
},
{
"name": "Genshi",
"bytes": "946"
},
{
"name": "Gherkin",
"bytes": "699"
},
{
"name": "Go",
"bytes": "7312"
},
{
"name": "Groovy",
"bytes": "1080"
},
{
"name": "HTML",
"bytes": "24999718"
},
{
"name": "Haskell",
"bytes": "512"
},
{
"name": "Haxe",
"bytes": "447"
},
{
"name": "HiveQL",
"bytes": "43"
},
{
"name": "Io",
"bytes": "140"
},
{
"name": "JSONiq",
"bytes": "4"
},
{
"name": "Java",
"bytes": "471854"
},
{
"name": "JavaScript",
"bytes": "28075556"
},
{
"name": "Julia",
"bytes": "210"
},
{
"name": "Jupyter Notebook",
"bytes": "73168"
},
{
"name": "LSL",
"bytes": "2080"
},
{
"name": "Lean",
"bytes": "213"
},
{
"name": "Lex",
"bytes": "264449"
},
{
"name": "Liquid",
"bytes": "1883"
},
{
"name": "LiveScript",
"bytes": "5747"
},
{
"name": "Lua",
"bytes": "78382"
},
{
"name": "M4",
"bytes": "1377"
},
{
"name": "MATLAB",
"bytes": "203"
},
{
"name": "Makefile",
"bytes": "269655"
},
{
"name": "Mako",
"bytes": "3614942"
},
{
"name": "Mask",
"bytes": "597"
},
{
"name": "Myghty",
"bytes": "936"
},
{
"name": "Nix",
"bytes": "2212"
},
{
"name": "OCaml",
"bytes": "539"
},
{
"name": "Objective-C",
"bytes": "2672"
},
{
"name": "OpenSCAD",
"bytes": "333"
},
{
"name": "PHP",
"bytes": "662"
},
{
"name": "PLSQL",
"bytes": "31565"
},
{
"name": "PLpgSQL",
"bytes": "6006"
},
{
"name": "Pascal",
"bytes": "1412"
},
{
"name": "Perl",
"bytes": "4327"
},
{
"name": "PigLatin",
"bytes": "371"
},
{
"name": "PowerShell",
"bytes": "3204"
},
{
"name": "Python",
"bytes": "76440000"
},
{
"name": "R",
"bytes": "2445"
},
{
"name": "Roff",
"bytes": "95764"
},
{
"name": "Ruby",
"bytes": "1098"
},
{
"name": "Rust",
"bytes": "495"
},
{
"name": "Scala",
"bytes": "1541"
},
{
"name": "Scheme",
"bytes": "559"
},
{
"name": "Shell",
"bytes": "190718"
},
{
"name": "Smarty",
"bytes": "130"
},
{
"name": "TSQL",
"bytes": "10013"
},
{
"name": "Tcl",
"bytes": "899"
},
{
"name": "TeX",
"bytes": "165743"
},
{
"name": "Thrift",
"bytes": "317058"
},
{
"name": "TypeScript",
"bytes": "1607"
},
{
"name": "VBA",
"bytes": "2884"
},
{
"name": "VBScript",
"bytes": "938"
},
{
"name": "VHDL",
"bytes": "830"
},
{
"name": "Vala",
"bytes": "485"
},
{
"name": "Verilog",
"bytes": "274"
},
{
"name": "Vim Snippet",
"bytes": "226931"
},
{
"name": "XQuery",
"bytes": "114"
},
{
"name": "XSLT",
"bytes": "521413"
},
{
"name": "Yacc",
"bytes": "2133855"
}
],
"symlink_target": ""
}
|
"""Support for Enigma2 media players."""
from openwebif.api import CreateDevice
import voluptuous as vol
from homeassistant.components.media_player import MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_TYPE_TVSHOW,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SELECT_SOURCE,
SUPPORT_STOP,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_MUTE,
SUPPORT_VOLUME_SET,
SUPPORT_VOLUME_STEP,
)
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_SSL,
CONF_USERNAME,
STATE_OFF,
STATE_ON,
STATE_PLAYING,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.config_validation import PLATFORM_SCHEMA
ATTR_MEDIA_CURRENTLY_RECORDING = "media_currently_recording"
ATTR_MEDIA_DESCRIPTION = "media_description"
ATTR_MEDIA_END_TIME = "media_end_time"
ATTR_MEDIA_START_TIME = "media_start_time"
CONF_USE_CHANNEL_ICON = "use_channel_icon"
CONF_DEEP_STANDBY = "deep_standby"
CONF_MAC_ADDRESS = "mac_address"
CONF_SOURCE_BOUQUET = "source_bouquet"
DEFAULT_NAME = "Enigma2 Media Player"
DEFAULT_PORT = 80
DEFAULT_SSL = False
DEFAULT_USE_CHANNEL_ICON = False
DEFAULT_USERNAME = "root"
DEFAULT_PASSWORD = "dreambox"
DEFAULT_DEEP_STANDBY = False
DEFAULT_MAC_ADDRESS = ""
DEFAULT_SOURCE_BOUQUET = ""
SUPPORTED_ENIGMA2 = (
SUPPORT_VOLUME_SET
| SUPPORT_VOLUME_MUTE
| SUPPORT_TURN_OFF
| SUPPORT_NEXT_TRACK
| SUPPORT_STOP
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_VOLUME_STEP
| SUPPORT_TURN_ON
| SUPPORT_PAUSE
| SUPPORT_SELECT_SOURCE
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string,
vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,
vol.Optional(
CONF_USE_CHANNEL_ICON, default=DEFAULT_USE_CHANNEL_ICON
): cv.boolean,
vol.Optional(CONF_DEEP_STANDBY, default=DEFAULT_DEEP_STANDBY): cv.boolean,
vol.Optional(CONF_MAC_ADDRESS, default=DEFAULT_MAC_ADDRESS): cv.string,
vol.Optional(CONF_SOURCE_BOUQUET, default=DEFAULT_SOURCE_BOUQUET): cv.string,
}
)
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up of an enigma2 media player."""
if discovery_info:
# Discovery gives us the streaming service port (8001)
# which is not useful as OpenWebif never runs on that port.
# So use the default port instead.
config[CONF_PORT] = DEFAULT_PORT
config[CONF_NAME] = discovery_info["hostname"]
config[CONF_HOST] = discovery_info["host"]
config[CONF_USERNAME] = DEFAULT_USERNAME
config[CONF_PASSWORD] = DEFAULT_PASSWORD
config[CONF_SSL] = DEFAULT_SSL
config[CONF_USE_CHANNEL_ICON] = DEFAULT_USE_CHANNEL_ICON
config[CONF_MAC_ADDRESS] = DEFAULT_MAC_ADDRESS
config[CONF_DEEP_STANDBY] = DEFAULT_DEEP_STANDBY
config[CONF_SOURCE_BOUQUET] = DEFAULT_SOURCE_BOUQUET
device = CreateDevice(
host=config[CONF_HOST],
port=config.get(CONF_PORT),
username=config.get(CONF_USERNAME),
password=config.get(CONF_PASSWORD),
is_https=config[CONF_SSL],
prefer_picon=config.get(CONF_USE_CHANNEL_ICON),
mac_address=config.get(CONF_MAC_ADDRESS),
turn_off_to_deep=config.get(CONF_DEEP_STANDBY),
source_bouquet=config.get(CONF_SOURCE_BOUQUET),
)
add_devices([Enigma2Device(config[CONF_NAME], device)], True)
class Enigma2Device(MediaPlayerEntity):
"""Representation of an Enigma2 box."""
def __init__(self, name, device):
"""Initialize the Enigma2 device."""
self._name = name
self.e2_box = device
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def unique_id(self):
"""Return the unique ID for this entity."""
return self.e2_box.mac_address
@property
def state(self):
"""Return the state of the device."""
if self.e2_box.is_recording_playback:
return STATE_PLAYING
return STATE_OFF if self.e2_box.in_standby else STATE_ON
@property
def available(self):
"""Return True if the device is available."""
return not self.e2_box.is_offline
@property
def supported_features(self):
"""Flag of media commands that are supported."""
return SUPPORTED_ENIGMA2
def turn_off(self):
"""Turn off media player."""
self.e2_box.turn_off()
def turn_on(self):
"""Turn the media player on."""
self.e2_box.turn_on()
@property
def media_title(self):
"""Title of current playing media."""
return self.e2_box.current_service_channel_name
@property
def media_series_title(self):
"""Return the title of current episode of TV show."""
return self.e2_box.current_programme_name
@property
def media_channel(self):
"""Channel of current playing media."""
return self.e2_box.current_service_channel_name
@property
def media_content_id(self):
"""Service Ref of current playing media."""
return self.e2_box.current_service_ref
@property
def media_content_type(self):
"""Type of video currently playing."""
return MEDIA_TYPE_TVSHOW
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self.e2_box.muted
@property
def media_image_url(self):
"""Picon url for the channel."""
return self.e2_box.picon_url
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
self.e2_box.set_volume(int(volume * 100))
def volume_up(self):
"""Volume up the media player."""
self.e2_box.set_volume(int(self.e2_box.volume * 100) + 5)
def volume_down(self):
"""Volume down media player."""
self.e2_box.set_volume(int(self.e2_box.volume * 100) - 5)
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self.e2_box.volume
def media_stop(self):
"""Send stop command."""
self.e2_box.set_stop()
def media_play(self):
"""Play media."""
self.e2_box.toggle_play_pause()
def media_pause(self):
"""Pause the media player."""
self.e2_box.toggle_play_pause()
def media_next_track(self):
"""Send next track command."""
self.e2_box.set_channel_up()
def media_previous_track(self):
"""Send next track command."""
self.e2_box.set_channel_down()
def mute_volume(self, mute):
"""Mute or unmute."""
self.e2_box.mute_volume()
@property
def source(self):
"""Return the current input source."""
return self.e2_box.current_service_channel_name
@property
def source_list(self):
"""List of available input sources."""
return self.e2_box.source_list
def select_source(self, source):
"""Select input source."""
self.e2_box.select_source(self.e2_box.sources[source])
def update(self):
"""Update state of the media_player."""
self.e2_box.update()
@property
def extra_state_attributes(self):
"""Return device specific state attributes.
isRecording: Is the box currently recording.
currservice_fulldescription: Full program description.
currservice_begin: is in the format '21:00'.
currservice_end: is in the format '21:00'.
"""
if self.e2_box.in_standby:
return {}
return {
ATTR_MEDIA_CURRENTLY_RECORDING: self.e2_box.status_info["isRecording"],
ATTR_MEDIA_DESCRIPTION: self.e2_box.status_info[
"currservice_fulldescription"
],
ATTR_MEDIA_START_TIME: self.e2_box.status_info["currservice_begin"],
ATTR_MEDIA_END_TIME: self.e2_box.status_info["currservice_end"],
}
|
{
"content_hash": "9201f7fae9fc447ce2461e1fb3b7dc22",
"timestamp": "",
"source": "github",
"line_count": 271,
"max_line_length": 85,
"avg_line_length": 30.911439114391143,
"alnum_prop": 0.6373403366360272,
"repo_name": "FreekingDean/home-assistant",
"id": "1c32b1ab805272c3b7c8593a9b1c385e93812605",
"size": "8377",
"binary": false,
"copies": "9",
"ref": "refs/heads/dev",
"path": "homeassistant/components/enigma2/media_player.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2335"
},
{
"name": "Python",
"bytes": "36746639"
},
{
"name": "Shell",
"bytes": "4910"
}
],
"symlink_target": ""
}
|
"""Ops to manipulate lists of tensors."""
# pylint: disable=g-bad-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_list_ops
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.ops.gen_list_ops import *
# pylint: enable=wildcard-import
from tensorflow.python.util.lazy_loader import LazyLoader
# list_ops -> control_flow_ops -> tensor_array_ops -> list_ops
control_flow_ops = LazyLoader(
"control_flow_ops", globals(),
"tensorflow.python.ops.control_flow_ops")
ops.NotDifferentiable("TensorListConcatLists")
ops.NotDifferentiable("TensorListElementShape")
ops.NotDifferentiable("TensorListLength")
ops.NotDifferentiable("TensorListPushBackBatch")
def empty_tensor_list(element_shape,
element_dtype,
max_num_elements=None,
name=None):
if max_num_elements is None:
max_num_elements = -1
return gen_list_ops.empty_tensor_list(
element_shape=_build_element_shape(element_shape),
element_dtype=element_dtype,
max_num_elements=max_num_elements,
name=name)
def tensor_list_reserve(element_shape, num_elements, element_dtype, name=None):
return gen_list_ops.tensor_list_reserve(
element_shape=_build_element_shape(element_shape),
num_elements=num_elements,
element_dtype=element_dtype,
name=name)
def tensor_list_from_tensor(tensor, element_shape, name=None):
return gen_list_ops.tensor_list_from_tensor(
tensor=tensor,
element_shape=_build_element_shape(element_shape),
name=name)
def tensor_list_get_item(input_handle, index, element_dtype, element_shape=None,
name=None):
return gen_list_ops.tensor_list_get_item(
input_handle=input_handle,
index=index,
element_shape=_build_element_shape(element_shape),
element_dtype=element_dtype,
name=name)
def tensor_list_pop_back(input_handle, element_dtype, name=None):
return gen_list_ops.tensor_list_pop_back(
input_handle=input_handle,
element_shape=-1,
element_dtype=element_dtype,
name=name)
def tensor_list_gather(input_handle, indices, element_dtype, name=None):
return gen_list_ops.tensor_list_gather(
input_handle=input_handle,
indices=indices,
element_shape=-1,
element_dtype=element_dtype,
name=name)
def tensor_list_scatter(tensor, indices, element_shape, name=None):
return gen_list_ops.tensor_list_scatter_v2(
tensor=tensor,
indices=indices,
element_shape=_build_element_shape(element_shape),
num_elements=-1,
name=name)
def tensor_list_stack(input_handle, element_dtype, num_elements=-1, name=None):
return gen_list_ops.tensor_list_stack(
input_handle=input_handle,
element_shape=-1,
element_dtype=element_dtype,
num_elements=num_elements,
name=name)
def tensor_list_concat(input_handle, element_dtype, element_shape=None,
name=None):
# Ignore the lengths output of TensorListConcat. It is only used during
# gradient computation.
return gen_list_ops.tensor_list_concat_v2(
input_handle=input_handle,
element_dtype=element_dtype,
element_shape=_build_element_shape(element_shape),
leading_dims=ops.convert_to_tensor([], dtype=dtypes.int64),
name=name)[0]
def tensor_list_split(tensor, element_shape, lengths, name=None):
return gen_list_ops.tensor_list_split(
tensor=tensor,
element_shape=_build_element_shape(element_shape),
lengths=lengths,
name=name)
def tensor_list_set_item(input_handle,
index,
item,
resize_if_index_out_of_bounds=False,
name=None):
"""Sets `item` at `index` in input list."""
if resize_if_index_out_of_bounds:
input_list_size = gen_list_ops.tensor_list_length(input_handle)
# TODO(srbs): This could cause some slowdown. Consider fusing resize
# functionality in the SetItem op.
input_handle = control_flow_ops.cond(
index >= input_list_size,
lambda: gen_list_ops.tensor_list_resize( # pylint: disable=g-long-lambda
input_handle, index + 1),
lambda: input_handle)
return gen_list_ops.tensor_list_set_item(
input_handle=input_handle, index=index, item=item, name=name)
@ops.RegisterGradient("TensorListPushBack")
def _PushBackGrad(op, dresult):
return gen_list_ops.tensor_list_pop_back(
dresult,
element_shape=array_ops.shape(op.inputs[1]),
element_dtype=op.get_attr("element_dtype"))
@ops.RegisterGradient("TensorListPopBack")
def _PopBackGrad(op, dlist, delement):
if dlist is None:
dlist = empty_tensor_list(
element_dtype=delement.dtype,
element_shape=gen_list_ops.tensor_list_element_shape(
op.outputs[0], shape_type=dtypes.int32))
return gen_list_ops.tensor_list_push_back(dlist, delement), None
@ops.RegisterGradient("TensorListStack")
def _TensorListStackGrad(unused_op, dtensor):
return tensor_list_from_tensor(dtensor, element_shape=dtensor.shape[1:]), None
@ops.RegisterGradient("TensorListConcat")
@ops.RegisterGradient("TensorListConcatV2")
def _TensorListConcatGrad(op, dtensor, unused_dlengths):
"""Gradient function for TensorListConcat."""
dlist = tensor_list_split(
dtensor,
element_shape=gen_list_ops.tensor_list_element_shape(
op.inputs[0], shape_type=dtypes.int32),
lengths=op.outputs[1])
if op.type == "TensorListConcatV2":
return dlist, None, None
else:
return dlist
@ops.RegisterGradient("TensorListSplit")
def _TensorListSplitGrad(op, dlist):
tensor, _, lengths = op.inputs
element_shape = array_ops.slice(array_ops.shape(tensor), [1], [-1])
element_shape = array_ops.concat([[-1], element_shape], axis=0)
return gen_list_ops.tensor_list_concat_v2(
dlist,
element_shape=element_shape,
leading_dims=lengths,
element_dtype=op.inputs[0].dtype)[0], None, None
@ops.RegisterGradient("TensorListFromTensor")
def _TensorListFromTensorGrad(op, dlist):
"""Gradient for TensorListFromTensor."""
t = op.inputs[0]
if t.shape.dims and t.shape.dims[0].value is not None:
num_elements = t.shape.dims[0].value
else:
num_elements = None
if dlist is None:
dlist = empty_tensor_list(
element_dtype=t.dtype,
element_shape=gen_list_ops.tensor_list_element_shape(
op.outputs[0], shape_type=dtypes.int32))
tensor_grad = gen_list_ops.tensor_list_stack(
dlist,
element_shape=array_ops.slice(array_ops.shape(t), [1], [-1]),
element_dtype=t.dtype,
num_elements=num_elements)
shape_grad = None
return tensor_grad, shape_grad
@ops.RegisterGradient("TensorListGetItem")
def _TensorListGetItemGrad(op, ditem):
"""Gradient for TensorListGetItem."""
list_size = gen_list_ops.tensor_list_length(op.inputs[0])
list_grad = gen_list_ops.tensor_list_set_item(
gen_list_ops.tensor_list_reserve(
gen_list_ops.tensor_list_element_shape(op.inputs[0],
shape_type=dtypes.int32),
list_size, element_dtype=ditem.dtype),
index=op.inputs[1],
item=ditem)
index_grad = None
element_shape_grad = None
return list_grad, index_grad, element_shape_grad
@ops.RegisterGradient("TensorListSetItem")
def _TensorListSetItemGrad(op, dlist):
"""Gradient function for TensorListSetItem."""
_, index, item = op.inputs
list_grad = gen_list_ops.tensor_list_set_item(
dlist, index=index, item=array_ops.zeros_like(item))
index_grad = None
element_grad = tensor_list_get_item(
dlist,
index,
element_shape=array_ops.shape(item),
element_dtype=item.dtype)
return list_grad, index_grad, element_grad
@ops.RegisterGradient("TensorListResize")
def _TensorListResizeGrad(op, dlist):
input_list, _ = op.inputs
input_list_size = gen_list_ops.tensor_list_length(input_list)
return gen_list_ops.tensor_list_resize(dlist, input_list_size), None
@ops.RegisterGradient("TensorListGather")
def _TensorListGatherGrad(op, dtensor):
"""Gradient function for TensorListGather."""
input_list, indices, _ = op.inputs
dlist = gen_list_ops.tensor_list_scatter_v2(
tensor=dtensor,
indices=indices,
element_shape=gen_list_ops.tensor_list_element_shape(
input_list, shape_type=dtypes.int32),
num_elements=gen_list_ops.tensor_list_length(input_list))
return dlist, None, None
@ops.RegisterGradient("TensorListScatter")
@ops.RegisterGradient("TensorListScatterV2")
def _TensorListScatterGrad(op, dlist):
"""Gradient function for TensorListScatter."""
tensor = op.inputs[0]
indices = op.inputs[1]
dtensor = gen_list_ops.tensor_list_gather(
dlist,
indices,
element_shape=array_ops.slice(array_ops.shape(tensor), [1], [-1]),
element_dtype=tensor.dtype)
if op.type == "TensorListScatterV2":
return dtensor, None, None, None
else:
return dtensor, None, None
def _build_element_shape(shape):
"""Converts shape to a format understood by list_ops for element_shape.
If `shape` is already a `Tensor` it is returned as-is. We do not perform a
type check here.
If shape is None or a TensorShape with unknown rank, -1 is returned.
If shape is a scalar, an int32 tensor with empty list is returned. Note we
do directly return an empty list since ops.convert_to_tensor would conver it
to a float32 which is not a valid type for element_shape.
If shape is a sequence of dims, None's in the list are replaced with -1. We
do not check the dtype of the other dims.
Args:
shape: Could be None, Tensor, TensorShape or a list of dims (each dim could
be a None, scalar or Tensor).
Returns:
A None-free shape that can be converted to a tensor.
"""
if isinstance(shape, ops.Tensor):
return shape
if isinstance(shape, tensor_shape.TensorShape):
# `TensorShape.as_list` requires rank to be known.
shape = shape.as_list() if shape else None
# Shape is unknown.
if shape is None:
return -1
# Shape is a scalar.
if not shape:
return ops.convert_to_tensor(shape, dtype=dtypes.int32)
# Shape is a sequence of dimensions. Convert None dims to -1.
def convert(val):
if val is None:
return -1
if isinstance(val, ops.Tensor):
return val
if isinstance(val, tensor_shape.Dimension):
return val.value if val.value is not None else -1
return val
return [convert(d) for d in shape]
|
{
"content_hash": "9694636191d98783e08dc3c3d55fb5d2",
"timestamp": "",
"source": "github",
"line_count": 327,
"max_line_length": 81,
"avg_line_length": 33.21406727828746,
"alnum_prop": 0.6888868428321517,
"repo_name": "jendap/tensorflow",
"id": "87409eb2ac6f962caacc566c4a6a9fd7e0e3c184",
"size": "11550",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tensorflow/python/ops/list_ops.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "2867"
},
{
"name": "Batchfile",
"bytes": "14734"
},
{
"name": "C",
"bytes": "606044"
},
{
"name": "C#",
"bytes": "8446"
},
{
"name": "C++",
"bytes": "55619540"
},
{
"name": "CMake",
"bytes": "207169"
},
{
"name": "Dockerfile",
"bytes": "78675"
},
{
"name": "Go",
"bytes": "1383418"
},
{
"name": "HTML",
"bytes": "4680118"
},
{
"name": "Java",
"bytes": "900190"
},
{
"name": "Jupyter Notebook",
"bytes": "2510235"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "77367"
},
{
"name": "Objective-C",
"bytes": "16140"
},
{
"name": "Objective-C++",
"bytes": "102889"
},
{
"name": "PHP",
"bytes": "14644"
},
{
"name": "Pascal",
"bytes": "399"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "PureBasic",
"bytes": "25356"
},
{
"name": "Python",
"bytes": "45358371"
},
{
"name": "RobotFramework",
"bytes": "891"
},
{
"name": "Ruby",
"bytes": "838"
},
{
"name": "Shell",
"bytes": "530065"
},
{
"name": "Smarty",
"bytes": "25609"
}
],
"symlink_target": ""
}
|
"""
Compile a very strict subset of Python (in which the only types are floats)
to WASM.
"""
from time import time as perf_counter # cause perf_counter not available in pypy3
import ast
import wasmfun as wf
class Context:
def __init__(self):
self.instructions = []
self.names = {}
self._name_counter = 0
self._block_stack = []
def name_idx(self, name):
if name not in self.names:
self.names[name] = self._name_counter
self._name_counter += 1
return self.names[name]
def new_stub(self):
name = 'stub' + str(self._name_counter)
return self.name_idx(name)
def push_block(self, kind):
assert kind in ('if', 'for', 'while')
self._block_stack.append(kind)
def pop_block(self, kind):
assert self._block_stack.pop(-1) == kind
def get_block_level(self):
for i, kind in enumerate(reversed(self._block_stack)):
if kind in ('for', 'while'):
return i
def simplepy2wasm(code):
""" Compile Python code to wasm, by using Python's ast parser
and compiling a very specific subset to WASM instructions.
"""
# Verify / convert input
if isinstance(code, ast.AST):
root = code
elif isinstance(code, str):
root = ast.parse(code)
else:
raise TypeError('simplepy2wasm() requires (str) code or AST.')
if not isinstance(root, ast.Module):
raise ValueError('simplepy2wasm() expecteded root node to be a ast.Module.')
# Compile to instructions
ctx = Context()
for node in root.body:
_compile_expr(node, ctx, False)
locals = ['f64' for i in ctx.names]
# Produce wasm
module = wf.Module(
wf.ImportedFuncion('print_ln', ['f64'], [], 'js', 'print_ln'),
wf.ImportedFuncion('perf_counter', [], ['f64'], 'js', 'perf_counter'),
wf.Function('$main', [], [], locals, ctx.instructions),
)
return module
def _compile_expr(node, ctx, push_stack):
if isinstance(node, ast.Expr):
_compile_expr(node.value, ctx, push_stack)
elif isinstance(node, ast.Assign):
if not (len(node.targets) == 1 and isinstance(node.targets[0], ast.Name)):
raise SyntaxError('Unsupported assignment at line', node.lineno)
idx = ctx.name_idx(node.targets[0].id)
_compile_expr(node.value, ctx, True)
ctx.instructions.append(('set_local', idx))
assert not push_stack
elif isinstance(node, ast.Name):
assert push_stack
ctx.instructions.append(('get_local', ctx.name_idx(node.id)))
elif isinstance(node, ast.Num):
ctx.instructions.append(('f64.const', node.n))
elif isinstance(node, ast.UnaryOp):
_compile_expr(node.operand, ctx, True)
if isinstance(node.op, ast.USub):
ctx.instructions.append(('f64.neg'))
else:
raise SyntaxError('Unsupported unary operator: %s' % node.op.__class__.__name__)
elif isinstance(node, ast.BinOp):
_compile_expr(node.left, ctx, True)
_compile_expr(node.right, ctx, True)
if isinstance(node.op, ast.Add):
ctx.instructions.append(('f64.add'))
elif isinstance(node.op, ast.Sub):
ctx.instructions.append(('f64.sub'))
elif isinstance(node.op, ast.Mult):
ctx.instructions.append(('f64.mul'))
elif isinstance(node.op, ast.Div):
ctx.instructions.append(('f64.div'))
elif isinstance(node.op, ast.Mod):
# todo: this is fragile. E.g. for negative numbers
_compile_expr(node.left, ctx, True) # push again
_compile_expr(node.right, ctx, True)
ctx.instructions.append(('f64.div'))
ctx.instructions.append(('f64.floor'))
ctx.instructions.append(('f64.mul')) # consumes last right
ctx.instructions.append(('f64.sub')) # consumes last left
elif isinstance(node.op, ast.FloorDiv):
ctx.instructions.append(('f64.div'))
ctx.instructions.append(('f64.floor')) # not trunc
else:
raise SyntaxError('Unsuppored binary op: %s' % node.op.__class__.__name__)
if not push_stack:
ctx.instructions.append(('drop'))
elif isinstance(node, ast.Compare):
if len(node.ops) != 1:
raise SyntaxError('Only supports binary comparators (one operand).')
_compile_expr(node.left, ctx, True)
_compile_expr(node.comparators[0], ctx, True)
op = node.ops[0]
if isinstance(op, ast.Eq):
ctx.instructions.append(('f64.eq'))
elif isinstance(op, ast.NotEq):
ctx.instructions.append(('f64.ne'))
elif isinstance(op, ast.Gt):
ctx.instructions.append(('f64.gt'))
elif isinstance(op, ast.Lt):
ctx.instructions.append(('f64.lt'))
elif isinstance(op, ast.GtE):
ctx.instructions.append(('f64.ge'))
elif isinstance(op, ast.LtE):
ctx.instructions.append(('f64.le'))
else:
raise SyntaxError('Unsupported operand: %s' % op)
elif isinstance(node, ast.If):
_compile_expr(node.test, ctx, True)
assert not push_stack # Python is not an expression lang
ctx.push_block('if')
ctx.instructions.append(('if', 'emptyblock'))
for e in node.body:
_compile_expr(e, ctx, False)
if node.orelse:
ctx.instructions.append(('else', ))
for e in node.orelse:
_compile_expr(e, ctx, False)
ctx.instructions.append(('end', ))
ctx.pop_block('if')
elif isinstance(node, ast.For):
# Check whether this is the kind of simple for-loop that we support
if not (isinstance(node.iter, ast.Call) and node.iter.func.id == 'range'):
raise SyntaxError('For-loops are limited to range().')
if node.orelse:
raise SyntaxError('For-loops do not support orelse.')
if not isinstance(node.target, ast.Name):
raise SyntaxError('For-loops support just one iterable.')
# Prepare start, stop, step
start_stub = ctx.new_stub()
end_stub = ctx.new_stub()
step_stub = ctx.new_stub()
if len(node.iter.args) == 1:
ctx.instructions.append(('f64.const', 0))
_compile_expr(node.iter.args[0], ctx, True)
ctx.instructions.append(('f64.const', 1))
elif len(node.iter.args) == 2:
_compile_expr(node.iter.args[0], ctx, True)
_compile_expr(node.iter.args[1], ctx, True)
ctx.instructions.append(('f64.const', 1))
elif len(node.iter.args) == 3:
_compile_expr(node.iter.args[0], ctx, True)
_compile_expr(node.iter.args[1], ctx, True)
_compile_expr(node.iter.args[2], ctx, True)
else:
raise SyntaxError('range() should have 1, 2, or 3 args')
ctx.instructions.append(('set_local', step_stub)) # reversed order, pop from stack
ctx.instructions.append(('set_local', end_stub))
ctx.instructions.append(('set_local', start_stub))
# Body
target = ctx.name_idx(node.target.id)
ctx.push_block('for')
for i in [('get_local', start_stub), ('set_local', target), # Init target
('block', 'emptyblock'), ('loop', 'emptyblock'), # enter loop
('get_local', target), ('get_local', end_stub), ('f64.ge'), ('br_if', 1), # break (level 2)
]:
ctx.instructions.append(i)
for subnode in node.body:
_compile_expr(subnode, ctx, False)
for i in [('get_local', target), ('get_local', step_stub), ('f64.add'), ('set_local', target), # next iter
('br', 0), # loop
('end'), ('end'), # end of loop and outer block
]:
ctx.instructions.append(i)
ctx.pop_block('for')
elif isinstance(node, ast.While):
# Check whether this is the kind of simple for-loop that we support
if node.orelse:
raise SyntaxError('While-loops do not support orelse.')
# Body
ctx.push_block('while')
for i in [('block', 'emptyblock'), ('loop', 'emptyblock'), # enter loop (outer block for break)
]:
ctx.instructions.append(i)
for subnode in node.body:
_compile_expr(subnode, ctx, False)
_compile_expr(node.test, ctx, True)
for i in [('br_if', 0), # loop
('end'), ('end'), # end of loop
]:
ctx.instructions.append(i)
ctx.pop_block('while')
elif isinstance(node, ast.Pass):
pass
elif isinstance(node, ast.Continue):
ctx.instructions.append(('br', ctx.get_block_level()))
elif isinstance(node, ast.Break):
ctx.instructions.append(('br', ctx.get_block_level() + 1))
elif isinstance(node, ast.Return):
assert node.value is not None
_compile_expr(node.value, ctx, True)
ctx.instructions.append(('return', ))
elif isinstance(node, ast.Call):
if not isinstance(node.func, ast.Name):
raise SyntaxError('Only support simple function names')
if node.keywords:
raise SyntaxError('No support for keyword args')
name = node.func.id
if name == 'print':
assert len(node.args) == 1, 'print() accepts exactly one argument'
_compile_expr(node.args[0], ctx, True)
ctx.instructions.append(('call', 0))
elif name == 'perf_counter':
assert len(node.args) == 0, 'perf_counter() accepts exactly zero arguments'
ctx.instructions.append(('call', 1))
else:
raise SyntaxError('Not a supported function: %s' % name)
else:
raise SyntaxError('Unsupported syntax: %s' % node.__class__.__name__)
|
{
"content_hash": "657092a61ee5c11d65212b3d46b283b2",
"timestamp": "",
"source": "github",
"line_count": 255,
"max_line_length": 115,
"avg_line_length": 39.51764705882353,
"alnum_prop": 0.5693162647613377,
"repo_name": "almarklein/wasmfun",
"id": "1aeb1ef1d51b6773c45e0ed7651e5256010cef1d",
"size": "10077",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "simplepy/simplepy.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Brainfuck",
"bytes": "3797"
},
{
"name": "HTML",
"bytes": "408911"
},
{
"name": "JavaScript",
"bytes": "1581"
},
{
"name": "Julia",
"bytes": "873"
},
{
"name": "Jupyter Notebook",
"bytes": "62487"
},
{
"name": "Python",
"bytes": "123337"
}
],
"symlink_target": ""
}
|
from wtforms.fields import BooleanField, FileField, SelectField, TextAreaField
from wtforms.fields.html5 import EmailField
from wtforms.validators import DataRequired, Email, InputRequired, Optional, ValidationError
from indico.util.i18n import _
from indico.util.placeholders import get_missing_placeholders, render_placeholder_info
from indico.web.forms.base import IndicoForm
from indico.web.forms.fields import IndicoRadioField
from indico.web.forms.validators import UsedIf
from indico.web.forms.widgets import CKEditorWidget
class AgreementForm(IndicoForm):
agreed = IndicoRadioField(_("Do you agree with the stated above?"), [InputRequired()],
coerce=lambda x: bool(int(x)), choices=[(1, _("I agree")), (0, _("I disagree"))])
reason = TextAreaField(_("Reason"))
class AgreementEmailForm(IndicoForm):
from_address = SelectField(_("From"), [DataRequired()])
cc_addresses = EmailField(_("CC"), [Optional(), Email()],
description=_("Warning: this email address will be able to sign the agreement!"))
body = TextAreaField(_("Email body"), widget=CKEditorWidget(simple=True))
def __init__(self, *args, **kwargs):
self._definition = kwargs.pop('definition')
event = kwargs.pop('event')
super().__init__(*args, **kwargs)
self.from_address.choices = list(event.get_allowed_sender_emails().items())
self.body.description = render_placeholder_info('agreement-email', definition=self._definition, agreement=None)
def validate_body(self, field):
missing = get_missing_placeholders('agreement-email', field.data, definition=self._definition, agreement=None)
if missing:
raise ValidationError(_('Missing placeholders: {}').format(', '.join(missing)))
class AgreementAnswerSubmissionForm(IndicoForm):
answer = IndicoRadioField(_("Answer"), [InputRequired()], coerce=lambda x: bool(int(x)),
choices=[(1, _("Agreement")), (0, _("Disagreement"))])
document = FileField(_("Document"), [UsedIf(lambda form, field: form.answer.data), DataRequired()])
upload_confirm = BooleanField(_("I confirm that I'm uploading a document that clearly shows this person's answer"),
[UsedIf(lambda form, field: form.answer.data), DataRequired()])
understand = BooleanField(_("I understand that I'm answering the agreement on behalf of this person"),
[DataRequired()], description=_("This answer is legally binding and can't be changed "
"afterwards."))
|
{
"content_hash": "eeb8aeaa7d9f4bf8ae0be0b8c1799f14",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 119,
"avg_line_length": 57.80434782608695,
"alnum_prop": 0.6615268898081986,
"repo_name": "pferreir/indico",
"id": "36e0560e797c608a4142a603885b759d94beba3f",
"size": "2873",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "indico/modules/events/agreements/forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "34704"
},
{
"name": "HTML",
"bytes": "1394116"
},
{
"name": "JavaScript",
"bytes": "2078347"
},
{
"name": "Mako",
"bytes": "1527"
},
{
"name": "Python",
"bytes": "4993798"
},
{
"name": "SCSS",
"bytes": "475126"
},
{
"name": "Shell",
"bytes": "3877"
},
{
"name": "TeX",
"bytes": "23327"
},
{
"name": "XSLT",
"bytes": "1504"
}
],
"symlink_target": ""
}
|
import os
import time
import hashlib
import warnings
from tempfile import mkdtemp
from shutil import rmtree
from twisted.trial import unittest
from twisted.internet import defer
from scrapy.contrib.pipeline.files import FilesPipeline, FSFilesStore
from scrapy.item import Item, Field
from scrapy.http import Request, Response
from scrapy.settings import Settings
from tests import mock
def _mocked_download_func(request, info):
response = request.meta.get('response')
return response() if callable(response) else response
class FilesPipelineTestCase(unittest.TestCase):
def setUp(self):
self.tempdir = mkdtemp()
self.pipeline = FilesPipeline.from_settings(Settings({'FILES_STORE': self.tempdir}))
self.pipeline.download_func = _mocked_download_func
self.pipeline.open_spider(None)
def tearDown(self):
rmtree(self.tempdir)
def test_file_path(self):
file_path = self.pipeline.file_path
self.assertEqual(file_path(Request("https://dev.mydeco.com/mydeco.pdf")),
'full/c9b564df929f4bc635bdd19fde4f3d4847c757c5.pdf')
self.assertEqual(file_path(Request("http://www.maddiebrown.co.uk///catalogue-items//image_54642_12175_95307.txt")),
'full/4ce274dd83db0368bafd7e406f382ae088e39219.txt')
self.assertEqual(file_path(Request("https://dev.mydeco.com/two/dirs/with%20spaces%2Bsigns.doc")),
'full/94ccc495a17b9ac5d40e3eabf3afcb8c2c9b9e1a.doc')
self.assertEqual(file_path(Request("http://www.dfsonline.co.uk/get_prod_image.php?img=status_0907_mdm.jpg")),
'full/4507be485f38b0da8a0be9eb2e1dfab8a19223f2.jpg')
self.assertEqual(file_path(Request("http://www.dorma.co.uk/images/product_details/2532/")),
'full/97ee6f8a46cbbb418ea91502fd24176865cf39b2')
self.assertEqual(file_path(Request("http://www.dorma.co.uk/images/product_details/2532")),
'full/244e0dd7d96a3b7b01f54eded250c9e272577aa1')
self.assertEqual(file_path(Request("http://www.dorma.co.uk/images/product_details/2532"),
response=Response("http://www.dorma.co.uk/images/product_details/2532"),
info=object()),
'full/244e0dd7d96a3b7b01f54eded250c9e272577aa1')
def test_fs_store(self):
assert isinstance(self.pipeline.store, FSFilesStore)
self.assertEqual(self.pipeline.store.basedir, self.tempdir)
path = 'some/image/key.jpg'
fullpath = os.path.join(self.tempdir, 'some', 'image', 'key.jpg')
self.assertEqual(self.pipeline.store._get_filesystem_path(path), fullpath)
@defer.inlineCallbacks
def test_file_not_expired(self):
item_url = "http://example.com/file.pdf"
item = _create_item_with_files(item_url)
patchers = [
mock.patch.object(FilesPipeline, 'inc_stats', return_value=True),
mock.patch.object(FSFilesStore, 'stat_file', return_value={
'checksum': 'abc', 'last_modified': time.time()}),
mock.patch.object(FilesPipeline, 'get_media_requests',
return_value=[_prepare_request_object(item_url)])
]
for p in patchers:
p.start()
result = yield self.pipeline.process_item(item, None)
self.assertEqual(result['files'][0]['checksum'], 'abc')
for p in patchers:
p.stop()
@defer.inlineCallbacks
def test_file_expired(self):
item_url = "http://example.com/file2.pdf"
item = _create_item_with_files(item_url)
patchers = [
mock.patch.object(FSFilesStore, 'stat_file', return_value={
'checksum': 'abc',
'last_modified': time.time() - (FilesPipeline.EXPIRES * 60 * 60 * 24 * 2)}),
mock.patch.object(FilesPipeline, 'get_media_requests',
return_value=[_prepare_request_object(item_url)]),
mock.patch.object(FilesPipeline, 'inc_stats', return_value=True)
]
for p in patchers:
p.start()
result = yield self.pipeline.process_item(item, None)
self.assertNotEqual(result['files'][0]['checksum'], 'abc')
for p in patchers:
p.stop()
class DeprecatedFilesPipeline(FilesPipeline):
def file_key(self, url):
media_guid = hashlib.sha1(url).hexdigest()
media_ext = os.path.splitext(url)[1]
return 'empty/%s%s' % (media_guid, media_ext)
class DeprecatedFilesPipelineTestCase(unittest.TestCase):
def setUp(self):
self.tempdir = mkdtemp()
def init_pipeline(self, pipeline_class):
self.pipeline = pipeline_class.from_settings(Settings({'FILES_STORE': self.tempdir}))
self.pipeline.download_func = _mocked_download_func
self.pipeline.open_spider(None)
def test_default_file_key_method(self):
self.init_pipeline(FilesPipeline)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.assertEqual(self.pipeline.file_key("https://dev.mydeco.com/mydeco.pdf"),
'full/c9b564df929f4bc635bdd19fde4f3d4847c757c5.pdf')
self.assertEqual(len(w), 1)
self.assertTrue('file_key(url) method is deprecated' in str(w[-1].message))
def test_overridden_file_key_method(self):
self.init_pipeline(DeprecatedFilesPipeline)
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.assertEqual(self.pipeline.file_path(Request("https://dev.mydeco.com/mydeco.pdf")),
'empty/c9b564df929f4bc635bdd19fde4f3d4847c757c5.pdf')
self.assertEqual(len(w), 1)
self.assertTrue('file_key(url) method is deprecated' in str(w[-1].message))
def tearDown(self):
rmtree(self.tempdir)
class FilesPipelineTestCaseFields(unittest.TestCase):
def test_item_fields_default(self):
from scrapy.contrib.pipeline.files import FilesPipeline
class TestItem(Item):
name = Field()
file_urls = Field()
files = Field()
url = 'http://www.example.com/files/1.txt'
item = TestItem({'name': 'item1', 'file_urls': [url]})
pipeline = FilesPipeline.from_settings(Settings({'FILES_STORE': 's3://example/files/'}))
requests = list(pipeline.get_media_requests(item, None))
self.assertEqual(requests[0].url, url)
results = [(True, {'url': url})]
pipeline.item_completed(results, item, None)
self.assertEqual(item['files'], [results[0][1]])
def test_item_fields_override_settings(self):
from scrapy.contrib.pipeline.files import FilesPipeline
class TestItem(Item):
name = Field()
files = Field()
stored_file = Field()
url = 'http://www.example.com/files/1.txt'
item = TestItem({'name': 'item1', 'files': [url]})
pipeline = FilesPipeline.from_settings(Settings({'FILES_STORE': 's3://example/files/',
'FILES_URLS_FIELD': 'files', 'FILES_RESULT_FIELD': 'stored_file'}))
requests = list(pipeline.get_media_requests(item, None))
self.assertEqual(requests[0].url, url)
results = [(True, {'url': url})]
pipeline.item_completed(results, item, None)
self.assertEqual(item['stored_file'], [results[0][1]])
class ItemWithFiles(Item):
file_urls = Field()
files = Field()
def _create_item_with_files(*files):
item = ItemWithFiles()
item['file_urls'] = files
return item
def _prepare_request_object(item_url):
return Request(
item_url,
meta={'response': Response(item_url, status=200, body='data')})
if __name__ == "__main__":
unittest.main()
|
{
"content_hash": "f4fb1e4dbbf6b5ca07f0df8b0a419594",
"timestamp": "",
"source": "github",
"line_count": 194,
"max_line_length": 123,
"avg_line_length": 40.845360824742265,
"alnum_prop": 0.6296062594649167,
"repo_name": "ramiro/scrapy",
"id": "0a1737c44945dd2fb3da03e2c03c09264c84b25f",
"size": "7924",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_pipeline_files.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Groff",
"bytes": "2008"
},
{
"name": "HTML",
"bytes": "1809"
},
{
"name": "Python",
"bytes": "1237354"
},
{
"name": "Shell",
"bytes": "673"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contacts', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='field',
name='is_active',
field=models.BooleanField(default=True, help_text='Whether this field is active'),
),
migrations.AddField(
model_name='field',
name='value_type',
field=models.CharField(default='T', max_length=1, verbose_name='Value data type'),
preserve_default=False,
),
]
|
{
"content_hash": "a6a797c1460fb19bfec0c67da84b1ae9",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 94,
"avg_line_length": 26.916666666666668,
"alnum_prop": 0.5820433436532507,
"repo_name": "xkmato/casepro",
"id": "5f62eba3b02940928bb80420c990bd34e5580f97",
"size": "670",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "casepro/contacts/migrations/0002_auto_20160208_1517.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "3475"
},
{
"name": "CoffeeScript",
"bytes": "220522"
},
{
"name": "HTML",
"bytes": "104527"
},
{
"name": "PLpgSQL",
"bytes": "6012"
},
{
"name": "Python",
"bytes": "878626"
}
],
"symlink_target": ""
}
|
import pytest
from _plotly_utils.basevalidators import BaseDataValidator
from plotly.graph_objs import Scatter, Bar, Box
# Fixtures
# --------
@pytest.fixture()
def validator():
return BaseDataValidator(
class_strs_map={"scatter": "Scatter", "bar": "Bar", "box": "Box"},
plotly_name="prop",
parent_name="parent",
set_uid=True,
)
@pytest.fixture()
def validator_nouid():
return BaseDataValidator(
class_strs_map={"scatter": "Scatter", "bar": "Bar", "box": "Box"},
plotly_name="prop",
parent_name="parent",
set_uid=False,
)
# Tests
# -----
def test_acceptance(validator):
val = [Scatter(mode="lines"), Box(fillcolor="yellow")]
res = validator.validate_coerce(val)
res_present = validator.present(res)
assert isinstance(res, list)
assert isinstance(res_present, tuple)
assert isinstance(res_present[0], Scatter)
assert res_present[0].type == "scatter"
assert res_present[0].mode == "lines"
assert isinstance(res_present[1], Box)
assert res_present[1].type == "box"
assert res_present[1].fillcolor == "yellow"
# Make sure UIDs are actually unique
assert res_present[0].uid != res_present[1].uid
def test_acceptance_dict(validator):
val = (dict(type="scatter", mode="lines"), dict(type="box", fillcolor="yellow"))
res = validator.validate_coerce(val)
res_present = validator.present(res)
assert isinstance(res, list)
assert isinstance(res_present, tuple)
assert isinstance(res_present[0], Scatter)
assert res_present[0].type == "scatter"
assert res_present[0].mode == "lines"
assert isinstance(res_present[1], Box)
assert res_present[1].type == "box"
assert res_present[1].fillcolor == "yellow"
# Make sure UIDs are actually unique
assert res_present[0].uid != res_present[1].uid
def test_default_is_scatter(validator):
val = [dict(mode="lines")]
res = validator.validate_coerce(val)
res_present = validator.present(res)
assert isinstance(res, list)
assert isinstance(res_present, tuple)
assert isinstance(res_present[0], Scatter)
assert res_present[0].type == "scatter"
assert res_present[0].mode == "lines"
def test_rejection_type(validator):
val = 37
with pytest.raises(ValueError) as validation_failure:
validator.validate_coerce(val)
assert "Invalid element(s)" in str(validation_failure.value)
def test_rejection_element_type(validator):
val = [42]
with pytest.raises(ValueError) as validation_failure:
validator.validate_coerce(val)
assert "Invalid element(s)" in str(validation_failure.value)
def test_rejection_element_attr(validator):
val = [dict(type="scatter", bogus=99)]
with pytest.raises(ValueError) as validation_failure:
validator.validate_coerce(val)
assert (
"Invalid property specified for object of type "
+ "plotly.graph_objs.Scatter: 'bogus'"
in str(validation_failure.value)
)
def test_rejection_element_tracetype(validator):
val = [dict(type="bogus", a=4)]
with pytest.raises(ValueError) as validation_failure:
validator.validate_coerce(val)
assert "Invalid element(s)" in str(validation_failure.value)
def test_skip_invalid(validator_nouid):
val = (
dict(
type="scatter",
mode="lines",
marker={"color": "green", "bogus": 23},
line="bad_value",
),
dict(type="box", fillcolor="yellow", bogus=111),
dict(type="bogus", mode="lines+markers", x=[2, 1, 3]),
)
expected = [
dict(type="scatter", mode="lines", marker={"color": "green"}),
dict(type="box", fillcolor="yellow"),
dict(type="scatter", mode="lines+markers", x=[2, 1, 3]),
]
res = validator_nouid.validate_coerce(val, skip_invalid=True)
assert [el.to_plotly_json() for el in res] == expected
|
{
"content_hash": "626377262e9c7aca1031f98fb6d4083a",
"timestamp": "",
"source": "github",
"line_count": 141,
"max_line_length": 84,
"avg_line_length": 28.04964539007092,
"alnum_prop": 0.643236409608091,
"repo_name": "plotly/python-api",
"id": "f8d50b28da46409a89735dae4d65dbc89dfb5ac5",
"size": "3955",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "packages/python/plotly/_plotly_utils/tests/validators/test_basetraces_validator.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "6870"
},
{
"name": "Makefile",
"bytes": "1708"
},
{
"name": "Python",
"bytes": "823245"
},
{
"name": "Shell",
"bytes": "3238"
}
],
"symlink_target": ""
}
|
#!/usr/bin/env python2
#
# Copyright (c) 2009, Roboterclub Aachen e.V.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Roboterclub Aachen e.V. nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY ROBOTERCLUB AACHEN E.V. ''AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL ROBOTERCLUB AACHEN E.V. BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import string
import time
import re
from SCons.Script import *
import posixpath
import traceback
generationBlockString = """/*
* WARNING: This file is generated automatically, do not edit!
* Please modify the corresponding *.in file instead and rebuild this file.
*/
// ----------------------------------------------------------------------------"""
# -----------------------------------------------------------------------------
def template_action(target, source, env):
if not env.has_key('substitutions'):
raise SCons.Errors.UserError, "Use 'Template(..., substitutions = ...)'"
source = source[0].abspath
target = target[0].abspath
output = string.Template(open(source, 'r').read()).safe_substitute(env['substitutions'])
open(target, 'w').write(output)
return 0
# -----------------------------------------------------------------------------
def jinja2_template_action(target, source, env):
if not env.has_key('substitutions'):
raise SCons.Errors.UserError, "Use 'Jinja2Template(..., substitutions = ...)'"
try:
import jinja2
except ImportError:
env.Error("To use this functionality you need to install the jinja2 template engine")
Exit(1)
globals = {
'time': time.strftime("%d %b %Y, %H:%M:%S", time.localtime()),
'match': re.match,
'generation_block': generationBlockString,
}
def filter_wordwrap(value, width=79):
return '\n\n'.join([textwrap.fill(str, width) for str in value.split('\n\n')])
def filter_indent(value, level=0):
return ('\n' + '\t' * level).join(value.split('\n'))
def filter_pad(value, min_width):
tab_width = 4
tab_count = (min_width/tab_width - len(value)/tab_width) + 1
return value + ('\t' * tab_count)
def filter_split(value, delimiter):
return value.split(delimiter)
def filter_values(lst, key):
"""
Goes through the list of dictionaries and
adds all the values of a certain key
to a list which is thus returned
"""
values = []
for item in lst:
if isinstance(item, dict) and key in item:
if item[key] not in values:
values.append(item[key])
return values
# Overwrite jinja2 Environment in order to enable relative paths
# since this runs locally that should not be a security concern
# Code from:
# http://stackoverflow.com/questions/8512677/how-to-include-a-template-with-relative-path-in-jinja2
class RelEnvironment(jinja2.Environment):
"""Override join_path() to enable relative template paths.
Take care of paths. Jinja seems to use '/' as path separator in
templates.
"""
def join_path(self, template, parent):
d = posixpath.join(posixpath.dirname(parent), template)
return posixpath.normpath(d)
# path, filename = os.path.split(source[0].path)
path = env['XPCC_ROOTPATH']
filename = os.path.relpath(source[0].path, path)
loader = RelEnvironment(loader = jinja2.FileSystemLoader(path), extensions=['jinja2.ext.do'])
if 'XPCC_JINJA2_FILTER' in env:
loader.filters = dict(loader.filters.items() +
env['XPCC_JINJA2_FILTER'].items())
loader.filters['xpcc.wordwrap'] = filter_wordwrap
loader.filters['xpcc.indent'] = filter_indent
loader.filters['xpcc.pad'] = filter_pad
loader.filters['xpcc.values'] = filter_values
loader.filters['split'] = filter_split # not XPCC specific
if 'XPCC_JINJA2_TEST' in env:
loader.tests = dict(loader.tests.items() +
env['XPCC_JINJA2_TEST'].items())
# Jinja2 Line Statements
loader.line_statement_prefix = '%%'
loader.line_comment_prefix = '%#'
try:
#convert native path format of filename into '/' separated jinja relative path
template = loader.get_template(filename.replace('\\', '/'), globals=globals)
except Exception as e:
#jinja may generate an TemplateNotFound error, which is a subclass of EnvironmentError, but the variable e.errno will be None
#in SCons/Errors.py in method convert_to_BuildError exceptions which are instance of EnvironmentError are
#special handled, but need the variable errno be not zero. My guess is, that system generated Environment Errors have errno set to any non zero value.
#This leads in SCons/Action.py in method __call__ the variable stat.status, s and afterwards the returned result be none and Exception lost.
#Solution rase Exception, which is not a subclass of EnvironmentError
#because python cannot chain exceptions e is printed
traceback.print_exc()
raise Exception('Failed to retrieve Template',e)
output = template.render(env['substitutions']).encode('utf-8')
open(target[0].path, 'w').write(output)
def template_emitter(target, source, env):
Depends(target, SCons.Node.Python.Value(env['substitutions']))
return target, source
def template_string(target, source, env):
return "Template: '%s' to '%s'" % (str(source[0]), str(target[0]))
def template_add_test(env, test_name, test_function, alias='template_add_test'):
if 'XPCC_JINJA2_TEST' not in env:
env['XPCC_JINJA2_TEST'] = {}
env['XPCC_JINJA2_TEST'][test_name] = test_function
def template_add_filter(env, filter_name, filter_function, alias='template_add_filter'):
if 'XPCC_JINJA2_FILTER' not in env:
env['XPCC_JINJA2_FILTER'] = {}
env['XPCC_JINJA2_FILTER'][filter_name] = filter_function
# -----------------------------------------------------------------------------
includeExpression = re.compile(r"(\{%|%%)\s+(import|include)\s+'(?P<file>\S+)'")
def find_includes(file):
""" Find include directives in an .in file """
files = []
for line in open(file).readlines():
match = includeExpression.search(line)
if match:
filename = match.group('file')
if not os.path.isabs(filename):
filename = os.path.join(os.path.dirname(os.path.abspath(file)), filename)
files.append(filename)
return files
def in_include_scanner(node, env, path, arg=None):
""" Generates the dependencies for the .in files """
abspath, targetFilename = os.path.split(node.get_abspath())
stack = [targetFilename]
dependencies = [targetFilename]
while stack:
nextFile = stack.pop()
files = find_includes(os.path.join(abspath, nextFile))
for file in files:
if file not in dependencies:
stack.append(file)
# env.Debug(".in include scanner found %s" % file)
dependencies.extend(files)
dependencies.remove(targetFilename)
return dependencies
# -----------------------------------------------------------------------------
def generate(env, **kw):
env.Append(
BUILDERS = {
'Template': env.Builder(
action = env.Action(template_action, template_string),
emitter = template_emitter,
src_suffix = '.in',
source_scanner =
SCons.Script.Scanner(
function = in_include_scanner,
skeys = ['.in']),
single_source = True
),
'Jinja2Template': env.Builder(
action = env.Action(jinja2_template_action, template_string),
emitter = template_emitter,
src_suffix = '.in',
source_scanner =
SCons.Script.Scanner(
function = in_include_scanner,
skeys = ['.in']),
single_source = True
),
})
env.AddMethod(template_add_test, 'AddTemplateJinja2Test')
env.AddMethod(template_add_filter, 'AddTemplateJinja2Filter')
def exists(env):
return True
|
{
"content_hash": "cb13640bdc0039ef8ffe80691b1ff69c",
"timestamp": "",
"source": "github",
"line_count": 229,
"max_line_length": 152,
"avg_line_length": 37.842794759825324,
"alnum_prop": 0.6848603738749135,
"repo_name": "dergraaf/xpcc",
"id": "5f70c530495159f9ad3dd0dba898b23e6d5fdd03",
"size": "8666",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "scons/site_tools/template.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "54265378"
},
{
"name": "C++",
"bytes": "3595390"
},
{
"name": "Objective-C",
"bytes": "24080"
},
{
"name": "Python",
"bytes": "177057"
},
{
"name": "Shell",
"bytes": "1071"
}
],
"symlink_target": ""
}
|
'''OpenGL extension EXT.shader_image_load_store
This module customises the behaviour of the
OpenGL.raw.GL.EXT.shader_image_load_store to provide a more
Python-friendly API
Overview (from the spec)
This extension provides GLSL built-in functions allowing shaders to load
from, store to, and perform atomic read-modify-write operations to a
single level of a texture object from any shader stage. These built-in
functions are named imageLoad(), imageStore(), and imageAtomic*(),
respectively, and accept integer texel coordinates to identify the texel
accessed. The extension adds the notion of "image units" to the OpenGL
API, to which texture levels are bound for access by the GLSL built-in
functions. To allow shaders to specify the image unit to access, GLSL
provides a new set of data types ("image*") similar to samplers. Each
image variable is assigned an integer value to identify an image unit to
access, which is specified using Uniform*() APIs in a manner similar to
samplers. For implementations supporting the NV_gpu_program5 extensions,
assembly language instructions to perform image loads, stores, and atomics
are also provided.
This extension also provides the capability to explicitly enable "early"
per-fragment tests, where operations like depth and stencil testing are
performed prior to fragment shader execution. In unextended OpenGL,
fragment shaders never have any side effects and implementations can
sometimes perform per-fragment tests and discard some fragments prior to
executing the fragment shader. Since this extension allows fragment
shaders to write to texture and buffer object memory using the built-in
image functions, such optimizations could lead to non-deterministic
results. To avoid this, implementations supporting this extension may not
perform such optimizations on shaders having such side effects. However,
enabling early per-fragment tests guarantees that such tests will be
performed prior to fragment shader execution, and ensures that image
stores and atomics will not be performed by fragment shader invocations
where these per-fragment tests fail.
Finally, this extension provides both a GLSL built-in function and an
OpenGL API function allowing applications some control over the ordering
of image loads, stores, and atomics relative to other OpenGL pipeline
operations accessing the same memory. Because the extension provides the
ability to perform random accesses to texture or buffer object memory,
such accesses are not easily tracked by the OpenGL driver. To avoid the
need for heavy-handed synchronization at the driver level, this extension
requires manual synchronization. The MemoryBarrierEXT() OpenGL API
function allows applications to specify a bitfield indicating the set of
OpenGL API operations to synchronize relative to shader memory access.
The memoryBarrier() GLSL built-in function provides a synchronization
point within a given shader invocation to ensure that all memory accesses
performed prior to the synchronization point complete prior to any started
after the synchronization point.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/EXT/shader_image_load_store.txt
'''
from OpenGL import platform, constants, constant, arrays
from OpenGL import extensions, wrapper
from OpenGL.GL import glget
import ctypes
from OpenGL.raw.GL.EXT.shader_image_load_store import *
### END AUTOGENERATED SECTION
|
{
"content_hash": "94ed145a0ef7ef99cbdd3ae80296e0ab",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 75,
"avg_line_length": 56.274193548387096,
"alnum_prop": 0.8091143594153053,
"repo_name": "frederica07/Dragon_Programming_Process",
"id": "eee3b184433a5321ee653a17431e5fdad6027e13",
"size": "3489",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PyOpenGL-3.0.2/OpenGL/GL/EXT/shader_image_load_store.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Makefile",
"bytes": "1548"
},
{
"name": "Python",
"bytes": "2558317"
}
],
"symlink_target": ""
}
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns(
'record.views',
# Examples:
# url(r'^$', 'dzhops.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^list/$', 'record', name='record_list'),
url(r'^detail/$', 'recordDetail', name='record_detail'),
)
|
{
"content_hash": "7ef71a5f61be3b84e07dff011cca05bb",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 60,
"avg_line_length": 31.416666666666668,
"alnum_prop": 0.6392572944297082,
"repo_name": "Hasal/dzhops",
"id": "c01dff86a35a8d632dd6189faa7c80a0e1816fcd",
"size": "401",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "record/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "4410"
},
{
"name": "HTML",
"bytes": "61984"
},
{
"name": "JavaScript",
"bytes": "64299"
},
{
"name": "Python",
"bytes": "95576"
}
],
"symlink_target": ""
}
|
def sequentialSearch(alist, item):
pos = 0
found = False
while pos < len(alist) and not found:
if alist[pos] == item:
found = True
else:
pos = pos+1
return found
testlist = [1, 2, 32, 8, 17, 19, 42, 13, 0]
print(sequentialSearch(testlist, 3))
print(sequentialSearch(testlist, 13))
|
{
"content_hash": "71caadfcde852332192851cd201fce71",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 43,
"avg_line_length": 22.866666666666667,
"alnum_prop": 0.5743440233236151,
"repo_name": "robin1885/algorithms-exercises-using-python",
"id": "7d0f9541364c6b6dc7ec61901b12eba83c3aea5c",
"size": "343",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "source-code-from-author-book/Chapter5/sequentialsearch.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "182896"
}
],
"symlink_target": ""
}
|
from analytics.flask_api_server import app
if __name__ == "__main__":
app.run()
|
{
"content_hash": "2933e631ca1f2a5c9634df8855a3f7c2",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 42,
"avg_line_length": 21.25,
"alnum_prop": 0.611764705882353,
"repo_name": "codeforfrankfurt/PolBotCheck",
"id": "fa7369d0c5fb695c80805a575bb8e697277444e9",
"size": "85",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webapi/wsgi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "641"
},
{
"name": "HTML",
"bytes": "1137"
},
{
"name": "JavaScript",
"bytes": "11044"
},
{
"name": "Python",
"bytes": "30606"
},
{
"name": "Shell",
"bytes": "408"
}
],
"symlink_target": ""
}
|
"""Implementation of python-orcid library."""
from bs4 import BeautifulSoup
import requests
import simplejson as json
import sys
from lxml import etree
if sys.version_info[0] == 2:
from urllib import urlencode
string_types = basestring,
else:
from urllib.parse import urlencode
string_types = str,
SEARCH_VERSION = "/v2.0"
VERSION = "/v2.0"
__version__ = "1.0.3"
class PublicAPI(object):
"""Public API."""
TYPES_WITH_PUTCODES = set(['address',
'education',
'email',
'employment',
'external-identifier',
'funding',
'keywords',
'other-names',
'peer-review',
'researcher-urls',
'work'])
TYPES_WITH_MULTIPLE_PUTCODES = set(['works'])
def __init__(self, institution_key, institution_secret, sandbox=False,
timeout=None, do_store_raw_response=False):
"""Initialize public API.
Parameters
----------
:param institution_key: string
The ORCID key given to the institution
:param institution_secret: string
The ORCID secret given to the institution
:param sandbox: boolean
Should the sandbox be used. False (default) indicates production
mode.
:param timeout: float or tuple
The request timeout in seconds. If None, no timeout is used. See
`requests documentation
<http://docs.python-requests.org/en/master/user/advanced/#timeouts>`_
for more information.
"""
self._key = institution_key
self._secret = institution_secret
self._timeout = timeout
self.raw_response = None
self.do_store_raw_response = do_store_raw_response
if sandbox:
self._host = "sandbox.orcid.org"
self._login_or_register_endpoint = \
"https://sandbox.orcid.org/oauth/authorize"
self._login_url = \
"https://sandbox.orcid.org/oauth/custom/login.json"
self._token_url = "https://api.sandbox.orcid.org/oauth/token"
self._endpoint = "https://pub.sandbox.orcid.org"
else:
self._host = "orcid.org"
self._login_or_register_endpoint = \
"https://orcid.org/oauth/authorize"
self._login_url = \
'https://orcid.org/oauth/custom/login.json'
self._token_url = "https://api.orcid.org/oauth/token"
self._endpoint = "https://pub.orcid.org"
def get_login_url(self, scope, redirect_uri, state=None,
family_names=None, given_names=None, email=None,
lang=None, show_login=None):
"""Return a URL for a user to login/register with ORCID.
Parameters
----------
:param scope: string or iterable of strings
The scope(s) of the authorization request.
For example '/authenticate'
:param redirect_uri: string
The URI to which the user's browser should be redirected after the
login.
:param state: string
An arbitrary token to prevent CSRF. See the OAuth 2 docs for
details.
:param family_names: string
The user's family name, used to fill the registration form.
:param given_names: string
The user's given name, used to fill the registration form.
:param email: string
The user's email address, used to fill the sign-in or registration
form.
:param lang: string
The language in which to display the authorization page.
:param show_login: bool
Determines whether the log-in or registration form will be shown by
default.
Returns
-------
:returns: string
The URL ready to be offered as a link to the user.
"""
if not isinstance(scope, string_types):
scope = " ".join(sorted(set(scope)))
data = [("client_id", self._key), ("scope", scope),
("response_type", "code"), ("redirect_uri", redirect_uri)]
if state:
data.append(("state", state))
if family_names:
data.append(("family_names", family_names.encode("utf-8")))
if given_names:
data.append(("given_names", given_names.encode("utf-8")))
if email:
data.append(("email", email))
if lang:
data.append(("lang", lang))
if show_login is not None:
data.append(("show_login", "true" if show_login else "false"))
return self._login_or_register_endpoint + "?" + urlencode(data)
def search(self, query, method="lucene", start=None,
rows=None, access_token=None):
"""Search the ORCID database.
Parameters
----------
:param query: string
Query in line with the chosen method.
:param method: string
One of 'lucene', 'edismax', 'dismax'
:param start: string
Index of the first record requested. Use for pagination.
:param rows: string
Number of records requested. Use for pagination.
:param access_token: string
If obtained before, the access token to use to pass through
authorization. Note that if this argument is not provided,
the function will take more time.
Returns
-------
:returns: dict
Search result with error description available. The results can
be obtained by accessing key 'result'. To get the number
of all results, access the key 'num-found'.
"""
if access_token is None:
access_token = self. \
get_search_token_from_orcid()
headers = {'Accept': 'application/orcid+json',
'Authorization': 'Bearer %s' % access_token}
return self._search(query, method, start, rows, headers,
self._endpoint)
def search_generator(self, query, method="lucene",
pagination=10, access_token=None):
"""Search the ORCID database with a generator.
The generator will yield every result.
Parameters
----------
:param query: string
Query in line with the chosen method.
:param method: string
One of 'lucene', 'edismax', 'dismax'
:param pagination: integer
How many papers should be fetched with the request.
:param access_token: string
If obtained before, the access token to use to pass through
authorization. Note that if this argument is not provided,
the function will take more time.
Yields
-------
:yields: dict
Single profile from the search results.
"""
if access_token is None:
access_token = self. \
get_search_token_from_orcid()
headers = {'Accept': 'application/orcid+json',
'Authorization': 'Bearer %s' % access_token}
index = 0
while True:
paginated_result = self._search(query, method, index, pagination,
headers, self._endpoint)
if not paginated_result['result']:
return
for result in paginated_result['result']:
yield result
index += pagination
def get_search_token_from_orcid(self, scope='/read-public'):
"""Get a token for searching ORCID records.
Parameters
----------
:param scope: string
/read-public or /read-member
Returns
-------
:returns: string
The token.
"""
payload = {'client_id': self._key,
'client_secret': self._secret,
'scope': scope,
'grant_type': 'client_credentials'
}
url = "%s/oauth/token" % self._endpoint
headers = {'Accept': 'application/json'}
response = requests.post(url, data=payload, headers=headers,
timeout=self._timeout)
response.raise_for_status()
if self.do_store_raw_response:
self.raw_response = response
return response.json()['access_token']
def get_token(self, user_id, password, redirect_uri,
scope='/read-limited'):
"""Get the token.
Parameters
----------
:param user_id: string
The id of the user used for authentication.
:param password: string
The user password.
:param redirect_uri: string
The redirect uri of the institution.
:param scope: string
The desired scope. For example '/activities/update',
'/read-limited', etc.
Returns
-------
:returns: string
The token.
"""
response = self._authenticate(user_id, password, redirect_uri,
scope)
return response['access_token']
def get_token_from_authorization_code(self,
authorization_code, redirect_uri):
"""Like `get_token`, but using an OAuth 2 authorization code.
Use this method if you run a webserver that serves as an endpoint for
the redirect URI. The webserver can retrieve the authorization code
from the URL that is requested by ORCID.
Parameters
----------
:param redirect_uri: string
The redirect uri of the institution.
:param authorization_code: string
The authorization code.
Returns
-------
:returns: dict
All data of the access token. The access token itself is in the
``"access_token"`` key.
"""
token_dict = {
"client_id": self._key,
"client_secret": self._secret,
"grant_type": "authorization_code",
"code": authorization_code,
"redirect_uri": redirect_uri,
}
response = requests.post(self._token_url, data=token_dict,
headers={'Accept': 'application/json'},
timeout=self._timeout)
response.raise_for_status()
if self.do_store_raw_response:
self.raw_response = response
return json.loads(response.text)
def read_record_public(self, orcid_id, request_type, token, put_code=None,
accept_type='application/orcid+json'):
"""Get the public info about the researcher.
Parameters
----------
:param orcid_id: string
Id of the queried author.
:param request_type: string
For example: 'record'.
See https://members.orcid.org/api/tutorial/read-orcid-records
for possible values.
:param token: string
Token received from OAuth 2 3-legged authorization.
:param put_code: string | list of strings
The id of the queried work. In case of 'works' request_type
might be a list of strings
:param accept_type: expected MIME type of received data
Returns
-------
:returns: dict | lxml.etree._Element
Record(s) in JSON-compatible dictionary representation or
in XML E-tree, depending on accept_type specified.
"""
return self._get_info(orcid_id, self._get_public_info, request_type,
token, put_code, accept_type)
def _authenticate(self, user_id, password, redirect_uri, scope):
session = requests.session()
session.get('https://' + self._host + '/signout',
timeout=self._timeout)
params = {
'client_id': self._key,
'response_type': 'code',
'scope': scope,
'redirect_uri': redirect_uri
}
response = session.get(self._login_or_register_endpoint,
params=params,
headers={'Host': self._host},
timeout=self._timeout)
response.raise_for_status()
soup = BeautifulSoup(response.content, 'html5lib')
csrf = soup.find(attrs={'name': '_csrf'}).attrs['content']
headers = {
'Host': self._host,
'Origin': 'https://' + self._host,
'Content-Type': 'application/json;charset=UTF-8',
'X-CSRF-TOKEN': csrf
}
data = {
"userName": user_id,
"password": password,
"approved": True,
"persistentTokenEnabled": True,
"redirectUrl": None
}
response = session.post(
self._login_url,
data=json.dumps(data),
headers=headers
)
response.raise_for_status()
uri = json.loads(response.text)['redirectUrl']
authorization_code = uri[uri.rfind('=') + 1:]
return self.get_token_from_authorization_code(authorization_code,
redirect_uri)
def _get_info(self, orcid_id, function, request_type, token,
put_code=None, accept_type='application/orcid+json'):
if request_type in self.TYPES_WITH_PUTCODES and not put_code:
raise ValueError("""In order to fetch specific record,
please specify the 'put_code' argument.""")
elif request_type not in self.TYPES_WITH_PUTCODES and \
request_type not in self.TYPES_WITH_MULTIPLE_PUTCODES \
and isinstance(put_code, str):
raise ValueError("""In order to fetch a summary, the
'put_code' argument is redundant.""")
elif request_type in self.TYPES_WITH_MULTIPLE_PUTCODES \
and put_code is not None and not isinstance(put_code, list):
raise ValueError("""In order to fetch multiple records,
the 'put_code' should be a list.""")
response = function(orcid_id, request_type, token,
put_code, accept_type)
response.raise_for_status()
if self.do_store_raw_response:
self.raw_response = response
return self._deserialize_by_content_type(response.content, accept_type)
def _get_public_info(self, orcid_id, request_type, access_token, put_code,
accept_type):
request_url = '%s/%s/%s' % (self._endpoint + VERSION,
orcid_id, request_type)
if put_code:
if request_type in self.TYPES_WITH_MULTIPLE_PUTCODES:
request_url += '/%s' % ','.join(put_code)
else:
request_url += '/%s' % put_code
headers = {'Accept': accept_type,
'Authorization': 'Bearer %s' % access_token}
return requests.get(request_url, headers=headers,
timeout=self._timeout)
def _search(self, query, method, start, rows, headers,
endpoint):
url = endpoint + SEARCH_VERSION + \
"/search/?defType=" + method + "&q=" + query
if start:
url += "&start=%s" % start
if rows:
url += "&rows=%s" % rows
response = requests.get(url, headers=headers,
timeout=self._timeout)
response.raise_for_status()
if self.do_store_raw_response:
self.raw_response = response
return response.json()
def _deserialize_by_content_type(self, data, content_type):
if content_type == 'application/orcid+json':
return json.loads(data)
if content_type == 'application/orcid+xml':
return etree.XML(data)
raise NotImplementedError('No deserializer for content of type %s'
% content_type)
class MemberAPI(PublicAPI):
"""Member API."""
def __init__(self, institution_key, institution_secret, sandbox=False,
timeout=None, do_store_raw_response=False):
"""Initialize member API.
Parameters
----------
:param institution_key: string
The ORCID key given to the institution
:param institution_secret: string
The ORCID secret given to the institution
:param sandbox: boolean
Should the sandbox be used. False (default) indicates production
mode.
:param timeout: float or tuple
The request timeout in seconds. If None, no timeout is used. See
`requests documentation
<http://docs.python-requests.org/en/master/user/advanced/#timeouts>`_
for more information.
"""
super(MemberAPI, self).__init__(institution_key,
institution_secret, sandbox, timeout)
self.raw_response = None
self.do_store_raw_response = do_store_raw_response
if sandbox:
self._endpoint = "https://api.sandbox.orcid.org"
self._auth_url = 'https://sandbox.orcid.org/signin/auth.json'
self._authorize_url = \
'https://sandbox.orcid.org/oauth/custom/authorize.json'
else:
self._endpoint = "https://api.orcid.org"
self._auth_url = 'https://orcid.org/signin/auth.json'
self._authorize_url = \
'https://orcid.org/oauth/custom/authorize.json'
def add_record(self, orcid_id, token, request_type, data,
content_type='application/orcid+json'):
"""Add a record to a profile.
Parameters
----------
:param orcid_id: string
Id of the author.
:param token: string
Token received from OAuth 2 3-legged authorization.
:param request_type: string
One of 'activities', 'education', 'employment', 'funding',
'peer-review', 'work'.
:param data: dict | lxml.etree._Element
The record in Python-friendly format, as either JSON-compatible
dictionary (content_type == 'application/orcid+json') or
XML (content_type == 'application/orcid+xml')
:param content_type: string
MIME type of the passed record.
Returns
-------
:returns: string
Put-code of the new work.
"""
return self._update_activities(orcid_id, token, requests.post,
request_type, data,
content_type=content_type)
def get_token(self, user_id, password, redirect_uri,
scope='/activities/update'):
"""Get the token.
Parameters
----------
:param user_id: string
The id of the user used for authentication.
:param password: string
The user password.
:param redirect_uri: string
The redirect uri of the institution.
:param scope: string
The desired scope. For example '/activities/update',
'/read-limited', etc.
Returns
-------
:returns: string
The token.
"""
return super(MemberAPI, self).get_token(user_id, password,
redirect_uri, scope)
def get_user_orcid(self, user_id, password, redirect_uri):
"""Get the user orcid from authentication process.
Parameters
----------
:param user_id: string
The id of the user used for authentication.
:param password: string
The user password.
:param redirect_uri: string
The redirect uri of the institution.
Returns
-------
:returns: string
The orcid.
"""
response = self._authenticate(user_id, password, redirect_uri,
'/authenticate')
return response['orcid']
def read_record_member(self, orcid_id, request_type, token, put_code=None,
accept_type='application/orcid+json'):
"""Get the member info about the researcher.
Parameters
----------
:param orcid_id: string
Id of the queried author.
:param request_type: string
For example: 'record'.
See https://members.orcid.org/api/tutorial/read-orcid-records
for possible values..
:param response_format: string
One of json, xml.
:param token: string
Token received from OAuth 2 3-legged authorization.
:param put_code: string | list of strings
The id of the queried work. In case of 'works' request_type
might be a list of strings
:param accept_type: expected MIME type of received data
Returns
-------
:returns: dict | lxml.etree._Element
Record(s) in JSON-compatible dictionary representation or
in XML E-tree, depending on accept_type specified.
"""
return self._get_info(orcid_id, self._get_member_info, request_type,
token, put_code, accept_type)
def remove_record(self, orcid_id, token, request_type, put_code):
"""Add a record to a profile.
Parameters
----------
:param orcid_id: string
Id of the author.
:param token: string
Token received from OAuth 2 3-legged authorization.
:param request_type: string
One of 'activities', 'education', 'employment', 'funding',
'peer-review', 'work'.
:param put_code: string
The id of the record. Can be retrieved using read_record_* method.
In the result of it, it will be called 'put-code'.
"""
self._update_activities(orcid_id, token, requests.delete, request_type,
put_code=put_code)
def search(self, query, method="lucene", start=None, rows=None,
access_token=None):
"""Search the ORCID database.
Parameters
----------
:param query: string
Query in line with the chosen method.
:param method: string
One of 'lucene', 'edismax', 'dismax'
:param start: string
Index of the first record requested. Use for pagination.
:param rows: string
Number of records requested. Use for pagination.
:param access_token: string
If obtained before, the access token to use to pass through
authorization. Note that if this argument is not provided,
the function will take more time.
Returns
-------
:returns: dict
Search result with error description available. The results can
be obtained by accessing key 'result'.
To get the number of all results, access the key 'num-found'.
"""
if access_token is None:
access_token = self. \
get_search_token_from_orcid()
headers = {'Accept': 'application/orcid+json',
'Authorization': 'Bearer %s' % access_token}
return self._search(query, method, start, rows, headers,
self._endpoint)
def search_generator(self, query, method="lucene", pagination=10,
access_token=None):
"""Search the ORCID database with a generator.
The generator will yield every result.
Parameters
----------
:param query: string
Query in line with the chosen method.
:param method: string
One of 'lucene', 'edismax', 'dismax'
:param pagination: integer
How many papers should be fetched with the request.
:param access_token: string
If obtained before, the access token to use to pass through
authorization. Note that if this argument is not provided,
the function will take more time.
"""
if access_token is None:
access_token = self. \
get_search_token_from_orcid()
headers = {'Accept': 'application/orcid+json',
'Authorization': 'Bearer %s' % access_token}
index = 0
while True:
paginated_result = self._search(query, method, index, pagination,
headers, self._endpoint)
if not paginated_result['result']:
return
for result in paginated_result['result']:
yield result
index += pagination
def update_record(self, orcid_id, token, request_type, data, put_code,
content_type='application/orcid+json'):
"""Add a record to a profile.
Parameters
----------
:param orcid_id: string
Id of the author.
:param token: string
Token received from OAuth 2 3-legged authorization.
:param request_type: string
One of 'activities', 'education', 'employment', 'funding',
'peer-review', 'work'.
:param data: dict | lxml.etree._Element
The record in Python-friendly format, as either JSON-compatible
dictionary (content_type == 'application/orcid+json') or
XML (content_type == 'application/orcid+xml')
:param put_code: string
The id of the record. Can be retrieved using read_record_* method.
In the result of it, it will be called 'put-code'.
:param content_type: string
MIME type of the data being sent.
"""
self._update_activities(orcid_id, token, requests.put, request_type,
data, put_code, content_type)
def _get_member_info(self, orcid_id, request_type, access_token, put_code,
accept_type):
request_url = '%s/%s/%s' % (self._endpoint + VERSION,
orcid_id, request_type)
if put_code:
if request_type in self.TYPES_WITH_MULTIPLE_PUTCODES:
request_url += '/%s' % ','.join(put_code)
else:
request_url += '/%s' % put_code
headers = {'Accept': accept_type,
'Authorization': 'Bearer %s' % access_token}
return requests.get(request_url, headers=headers,
timeout=self._timeout)
def _update_activities(self, orcid_id, token, method, request_type,
data=None, put_code=None,
content_type='application/orcid+json'):
url = "%s/%s/%s" % (self._endpoint + VERSION, orcid_id,
request_type)
if put_code:
url += ('/%s' % put_code)
if data is not None:
self._add_put_code_by_content_type(content_type, data,
put_code)
headers = {'Accept': 'application/orcid+json',
'Content-Type': content_type,
'Authorization': 'Bearer ' + token}
if method == requests.delete:
response = method(url, headers=headers, timeout=self._timeout)
else:
xml = self._serialize_by_content_type(data, content_type)
response = method(url, xml, headers=headers, timeout=self._timeout)
response.raise_for_status()
if self.do_store_raw_response:
self.raw_response = response
if 'location' in response.headers:
# Return the new put-code
return response.headers['location'].split('/')[-1]
def _add_put_code_by_content_type(self, content_type, data, put_code):
if content_type == 'application/orcid+json':
data['put-code'] = put_code
elif content_type == 'application/orcid+xml':
data.attrib['put-code'] = '%s' % put_code
else:
raise NotImplementedError('Cannot add to content of type %s'
% content_type)
def _serialize_by_content_type(self, data, content_type):
if content_type == 'application/orcid+json':
return json.dumps(data)
if content_type == 'application/orcid+xml':
return etree.tostring(data)
raise NotImplementedError('No serializer for content of type %s'
% content_type)
|
{
"content_hash": "0ecd092ebb99a43fd99998c08f26d1cd",
"timestamp": "",
"source": "github",
"line_count": 759,
"max_line_length": 81,
"avg_line_length": 38.37022397891963,
"alnum_prop": 0.541084366308416,
"repo_name": "ORCID/python-orcid",
"id": "9bd79ff82c1cf5c49e8eb3b097f781f1a25a4428",
"size": "29123",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "orcid/orcid.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "46248"
}
],
"symlink_target": ""
}
|
import csv
import numpy as np
from sklearn.svm import SVR
import matplotlib.pyplot as plt
# plt.switch_backend('newbackend')
dates = []
prices = []
def get_data(filename):
with open(filename, 'r') as csvfile:
csvFileReader = csv.reader(csvfile)
next(csvFileReader) # skipping column names
for row in csvFileReader:
dates.append(int(row[0].split('/')[0]))
prices.append(float(row[1]))
return
def predict_price(dates, prices, x):
dates = np.reshape(dates,(len(dates), 1)) # converting to matrix of n X 1
svr_lin = SVR(kernel= 'linear', C= 1e3)
svr_poly = SVR(kernel= 'poly', C= 1e3, degree= 2)
svr_rbf = SVR(kernel= 'rbf', C= 1e3, gamma= 0.1) # defining the support vector regression models
svr_rbf.fit(dates, prices) # fitting the data points in the models
svr_lin.fit(dates, prices)
svr_poly.fit(dates, prices)
plt.scatter(dates, prices, color= 'black', label= 'Data') # plotting the initial datapoints
plt.plot(dates, svr_rbf.predict(dates), color= 'red', label= 'RBF model') # plotting the line made by the RBF kernel
plt.plot(dates,svr_lin.predict(dates), color= 'green', label= 'Linear model') # plotting the line made by linear kernel
plt.plot(dates,svr_poly.predict(dates), color= 'blue', label= 'Polynomial model') # plotting the line made by polynomial kernel
plt.xlabel('Date')
plt.ylabel('Price')
plt.title('Support Vector Regression')
plt.legend()
plt.show()
return svr_rbf.predict(x)[0], svr_lin.predict(x)[0], svr_poly.predict(x)[0]
get_data('../../data/nvda.csv')
print(dates)
print(prices)
# get_data('aapl.csv') # calling get_data method by passing the csv file to it
#print "Dates- ", dates
#print "Prices- ", prices
# predicted_price = predict_price(dates, prices, 29)
|
{
"content_hash": "30c07553a20475a92c6ac8bdff33a16c",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 128,
"avg_line_length": 35.224489795918366,
"alnum_prop": 0.7056778679026651,
"repo_name": "stephen2run/EcoDataLearn",
"id": "6326ac398bfcbb8547c7a9c893febafd28cb392d",
"size": "1726",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/tool/predictingprices.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "8027"
}
],
"symlink_target": ""
}
|
import gnupg, tarfile
import shutil
import os
import tarfile
def sanitise_keys(keys):
sanitised_all = {}
for key in keys:
sanitised_key = {}
sanitised_key['date'] = key['date']
sanitised_key['expires'] = key['expires']
sanitised_key['fingerprint'] = key['fingerprint']
sanitised_key['length'] = key['length']
sanitised_key['uid'] = key['uids'][0]
sanitised_all[key['keyid']] = sanitised_key
return sanitised_all
def user_to_key_dict(private_keys):
user_key = {}
for key in private_keys:
user_key[key['uids'][0]] = key['keyid']
return user_key
#print sanitise_keys(keys)
true_gpg_path = '~/.gnupg'
tmp_home = 'tmpgpg/'
def generate_gpg_key(real_name, nickname, email, passphrase, key_length = 2048, key_type = "RSA", expire_date = "1y"):
gpg = gnupg.GPG(gnupghome = true_gpg_path)
return gpg.gen_key(gpg.gen_key_input(key_type = key_type, key_length = key_length, name_real = real_name, name_comment = nickname, name_email = email, expire_date = expire_date, passphrase = passphrase))
def private_keys_users():
gpg = gnupg.GPG(gnupghome = true_gpg_path)
return user_to_key_dict(gpg.list_keys(True))
def private_keys_details():
gpg = gnupg.GPG(gnupghome = true_gpg_path)
return sanitise_keys(gpg.list_keys(True))
def public_keys_details():
gpg = gnupg.GPG(gnupghome = true_gpg_path)
return sanitise_keys(gpg.list_keys(False))
def tmp_public_keys_details():
gpg = gnupg.GPG(gnupghome = tmp_home)
return sanitise_keys(gpg.list_keys(False))
def sign_video(video_filepath, passphrase, keyid):
gpg = gnupg.GPG(gnupghome = true_gpg_path)
with open(video_filepath, "rb") as stream:
signed = gpg.sign_file(stream, keyid = keyid, passphrase = passphrase, detach = True)
print dir(signed)
print signed.stderr
signature_path = video_filepath + ".signature"
with open(signature_path, "wb") as video_signature:
video_signature.write(signed.data)
return signature_path
#result_file = open(video_filepath + ".sig","wb")
#result_file.write(signed.data)
#result_file.close()
def create_vaida(video_filepath, passphrase, keyid):
#TODO slashes -> windows (throughout)
signature_path = sign_video(video_filepath, passphrase, keyid)
with tarfile.open(name=video_filepath.split("/")[-1] + ".vaida", mode='w', fileobj=None, bufsize=10240) as tar:
gpg = gnupg.GPG(gnupghome = true_gpg_path)
armored_key = gpg.export_keys(keyid)
with open ("pubkey", "wb") as pubkey:
pubkey.write(armored_key)
tar.add(video_filepath, arcname = "video")
tar.add("pubkey", arcname = "pubkey")
tar.add(signature_path, arcname = "signature")
def untar_verify_vaida(vaida_path):
_clear_temp()
gpg = gnupg.GPG(gnupghome = tmp_home)
with tarfile.open(name = vaida_path, mode = "r") as tar:
print tar.getnames()
tar.extractall(tmp_home)
with open(tmp_home + "pubkey", "rb") as pubkey:
imported = gpg.import_keys(pubkey.read())
print dir(imported)
verification = gpg.verify_file(open(tmp_home + tar.getnames()[2], "rb"), tmp_home + tar.getnames()[0])
print verification.valid
print verification.stderr
#print dir(verification)
dicto = tmp_public_keys_details()
for key in dicto:
expiration = dicto[key]["expires"]
return (verification.valid, imported.fingerprints[0], os.path.abspath(tmp_home + "/video"), expiration)
def _clear_temp():
if os.path.isdir(tmp_home):
shutil.rmtree(tmp_home)
def add_tmp_to_keyring():
gpg = gnupg.GPG(gnupghome = true_gpg_path)
with open(tmp_home + "pubkey", "rb") as pubkey:
trusted = gpg.import_keys(pubkey.read())
print dir(trusted)
print trusted.stderr
_clear_temp()
#create_vaida("/home/picrin/programming/VAIDA/Honey_Sample_G.avi", "dirty loondry boundry. stash/", u"D98029C596F20E5D")
#print verify_vaida("/home/picrin/programming/VAIDA/backend/Honey_Sample_G.avi.vaida.tar")
#add_pub_keyring()
#print public_keys_details()
#print private_keys_details()
#generate_gpg_key("Hugh McGrade (do not trust)", "hmg (do not trust)", "hugh@mcgrade.ac.uk (do not trust)", "dirty loondry boundry. stash/")
#create_vaida("/home/hugh/Documents/VAIDA/UI/video004.mp4", "dirty loondry boundry. stash/", u"62CDD87039635942")
#untar_verify_vaida("video004.mp4.vaida")
print public_keys_details()
print private_keys_details()
|
{
"content_hash": "1065c2f92c6f9f5d09ea45cc5c76cbbd",
"timestamp": "",
"source": "github",
"line_count": 117,
"max_line_length": 207,
"avg_line_length": 38.76068376068376,
"alnum_prop": 0.6606394707828004,
"repo_name": "picrin/VAIDA_OLD",
"id": "f5cfb5886702782170b2bdd21110f41d3e8120ee",
"size": "4535",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "UI/gpglib.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "45873"
},
{
"name": "Shell",
"bytes": "29"
}
],
"symlink_target": ""
}
|
from six import PY3
import sys
if PY3:
from xmlrpc.server import SimpleXMLRPCServer
else:
from SimpleXMLRPCServer import SimpleXMLRPCServer
from remoteserver import announce_port
class Documentation(SimpleXMLRPCServer):
def __init__(self, port=8270, port_file=None):
SimpleXMLRPCServer.__init__(self, ('127.0.0.1', int(port)))
self.register_function(self.get_keyword_names)
self.register_function(self.get_keyword_documentation)
self.register_function(self.run_keyword)
announce_port(self.socket, port_file)
self.serve_forever()
def get_keyword_names(self):
return ['Empty', 'Single line', 'Multi line']
def get_keyword_documentation(self, name):
if name == 'Single line':
return 'Single line documentation'
if name == 'Multi line':
return 'Multi\nline\ndocumentation\n'
return ''
def run_keyword(self, name, args):
return {'status': 'PASS'}
if __name__ == '__main__':
Documentation(*sys.argv[1:])
|
{
"content_hash": "01b61f5a107edad9100415744920de69",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 67,
"avg_line_length": 28.324324324324323,
"alnum_prop": 0.6498091603053435,
"repo_name": "userzimmermann/robotframework-python3",
"id": "2a615436dcd939a64af6c584db109bf87f4979e9",
"size": "1048",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "atest/testdata/standard_libraries/remote/documentation.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "16539"
},
{
"name": "HTML",
"bytes": "1011996"
},
{
"name": "Java",
"bytes": "58737"
},
{
"name": "JavaScript",
"bytes": "159003"
},
{
"name": "Python",
"bytes": "2018310"
},
{
"name": "RobotFramework",
"bytes": "4288"
},
{
"name": "Shell",
"bytes": "1093"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.